##// END OF EJS Templates
UPDATE de operaciones en el Dominio de la Frecuencia y 2 scripts de prueba
sebastianVP -
r1778:799ea06d41f3
parent child
Show More
@@ -0,0 +1,386
1 #!python
2 '''
3 '''
4
5 import os, sys, json, argparse
6 import datetime
7 import time
8
9 from schainpy.controller import Project
10
11 PATH = "/home/pc-igp-179/Documentos/SOPHy"
12
13
14 PARAM = {
15 'S': {'zmin': -80, 'zmax':-45, 'colormap': 'jet' , 'label': 'Power', 'wrname': 'power','cb_label': 'dBm', 'ch':0},
16 'SNR':{'zmin': -10, 'zmax': 15, 'colormap': 'jet' , 'label': 'SNR', 'wrname': 'snr','cb_label': 'dB', 'ch':0},
17 'V': {'zmin': -12, 'zmax': 12, 'colormap': 'sophy_v', 'label': 'Velocity', 'wrname': 'velocity', 'cb_label': 'm/s', 'ch':0},
18 'R': {'zmin': 0.5, 'zmax': 1 , 'colormap': 'sophy_r', 'label': 'RhoHV', 'wrname':'rhoHV', 'cb_label': '', 'ch':0},
19 'P': {'zmin': -180,'zmax': 180,'colormap': 'sophy_p', 'label': 'PhiDP', 'wrname':'phiDP' , 'cb_label': 'degrees', 'ch':0},
20 'D': {'zmin': -9 , 'zmax': 12, 'colormap': 'sophy_d', 'label': 'ZDR','wrname':'differential_reflectivity' , 'cb_label': 'dB','ch':0},
21 'Z': {'zmin': -20, 'zmax': 80, 'colormap': 'sophy_z', 'label': 'Reflectivity ', 'wrname':'reflectivity', 'cb_label': 'dBz','ch':0},
22 'W': {'zmin': 0 , 'zmax': 12, 'colormap': 'sophy_w', 'label': 'Spectral Width', 'wrname':'spectral_width', 'cb_label': 'm/s', 'ch':0}
23 }
24
25 META = ['heightList', 'data_azi', 'data_ele', 'mode_op', 'latitude', 'longitude', 'altitude', 'heading', 'radar_name',
26 'institution', 'contact', 'h0', 'range_unit', 'prf', 'prf_unit', 'variable', 'variable_unit', 'n_pulses',
27 'pulse1_range', 'pulse1_width', 'pulse2_width', 'pulse1_repetitions', 'pulse2_repetitions', 'pulse_width_unit',
28 'snr_threshold', 'data_noise']
29
30
31 def max_index(r, sample_rate, ipp, h0,ipp_km):
32
33 return int(sample_rate*ipp*1e6 * r / ipp_km) + int(sample_rate*ipp*1e6 * -h0 / ipp_km)
34
35
36 def main(args):
37 experiment = args.experiment
38
39 fp = open(os.path.join(PATH, experiment, 'experiment.json'))
40 conf = json.loads(fp.read())
41
42 ipp_km = conf['usrp_tx']['ipp']
43 bottom = conf['pedestal']['bottom']
44 ipp = ipp_km * 2 /300000
45 sample_rate = conf['usrp_rx']['sample_rate']
46 speed_axis = conf['pedestal']['speed']
47
48 if args.angles:
49 angles = args.angles
50 else:
51 angles = conf['pedestal']['table']
52
53
54
55 start_date = conf['name'].split('@')[1].split('T')[0].replace('-', '/')
56 end_date = start_date
57 if args.start_time:
58 start_time = args.start_time
59 else:
60 start_time = conf['name'].split('@')[1].split('T')[1].replace('-', ':')
61
62 if args.end_time:
63 end_time = args.end_time
64 else:
65 end_time = '23:59:59'
66
67
68 if args.label:
69 label = '-{}'.format(args.label)
70 else:
71 label = ''
72
73 path_plots = os.path.join(PATH, experiment, 'plots{}'.format(label))
74 path_save = os.path.join(PATH, experiment, 'param{}'.format(label))
75
76
77 time_offset = args.time_offset
78 parameters = args.parameters
79
80 N = int(1.0/(abs(speed_axis[0])*ipp))
81 RMIX = 6.0
82 H0 = -1.33
83 MASK = args.mask
84
85 path = os.path.join(PATH, experiment, 'rawdata')
86 path_ped = os.path.join(PATH, experiment, 'position')
87
88 proyecto = Project()
89 proyecto.setup(id = '2', name='Test_2025', description="PRUEBA")
90
91 lectura = proyecto.addReadUnit(datatype='DigitalRFReader',
92 path=path,
93 startDate= "2025/01/06",#today,
94 endDate= "2025/01/06",#today,
95 startTime= start_time,
96 endTime= end_time,
97 delay=30,
98 #set=0,
99 online=0,
100 walk=1,
101 ippKm = ipp_km,
102 getByBlock = 1,
103 nProfileBlocks = N
104 )
105
106 n_pulses = 1
107 pulse_1_width = conf['usrp_tx']['pulse_1']
108 pulse_1_repetitions = conf['usrp_tx']['repetitions_1']
109 pulse_2_width = conf['usrp_tx']['pulse_2']
110 pulse_2_repetitions = conf['usrp_tx']['repetitions_2']
111
112 if '1' in args.pulses:
113 voltage1 = proyecto.addProcUnit(
114 datatype='VoltageProc',
115 inputId=lectura.getId()
116 )
117
118 op = voltage1.addOperation(
119 name='ProfileSelector'
120 )
121 op.addParameter(
122 name='profileRangeList',
123 value='0,{}'.format(conf['usrp_tx']['repetitions_1']-1)
124 )
125
126 if conf['usrp_tx']['code_type_1'] != 'None':
127 codes = [ c.strip() for c in conf['usrp_tx']['code_1'].split(',')]
128 code = []
129 for c in codes:
130 code.append([int(x) for x in c])
131 op = voltage1.addOperation(name='Decoder', optype='other')
132 op.addParameter(name='code', value=code)
133 op.addParameter(name='nCode', value=len(code), format='int')
134 op.addParameter(name='nBaud', value=len(code[0]), format='int')
135 ncode = len(code)
136 else:
137 ncode = 1
138 code = ['0']
139
140 op = voltage1.addOperation(name='CohInt', optype='other')
141 op.addParameter(name='n', value=ncode, format='int')
142
143 op = voltage1.addOperation(name='setH0')
144 op.addParameter(name='h0', value=H0, format='float')
145
146 if args.range > 0:
147 op = voltage1.addOperation(name='selectHeights')
148 op.addParameter(name='minIndex', value=max_index(0, sample_rate, ipp, H0,ipp_km), format='int')
149 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp, H0,ipp_km), format='int')
150
151
152 op = voltage1.addOperation(name='PulsePair_vRF', optype='other')
153 op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_1'])/ncode, format='int')
154
155 if args.rmDC:
156 op.addParameter(name='removeDC', value=1, format='int')
157
158 proc1 = proyecto.addProcUnit(datatype='ParametersProc', inputId=voltage1.getId())
159 proc1.addParameter(name='runNextUnit', value=True)
160
161 opObj10 = proc1.addOperation(name="WeatherRadar")
162 opObj10.addParameter(name='CR_Flag',value=True)
163 print(1, len(code[0]))
164 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
165 opObj10.addParameter(name='Pt',value=200)
166 opObj10.addParameter(name='min_index',value=max_index(0, sample_rate, ipp, H0,ipp_km))
167
168
169 op = proc1.addOperation(name='PedestalInformation')
170 op.addParameter(name='path', value=path_ped, format='str')
171 op.addParameter(name='interval', value='0.04')
172 op.addParameter(name='time_offset', value=time_offset)
173 op.addParameter(name='mode', value=args.mode)
174
175 op = proc1.addOperation(name='Block360')
176 op.addParameter(name='attr_data', value='data_param')
177 op.addParameter(name='runNextOp', value=True)
178 op.addParameter(name='angles', value=angles)
179 op.addParameter(name='heading', value=conf['heading'])
180
181
182 if '2' in args.pulses:
183 voltage2 = proyecto.addProcUnit(
184 datatype='VoltageProc',
185 inputId=lectura.getId()
186 )
187
188 op = voltage2.addOperation(
189 name='ProfileSelector'
190 )
191 op.addParameter(
192 name='profileRangeList',
193 value='{},{}'.format(conf['usrp_tx']['repetitions_1'], conf['usrp_tx']['repetitions_1']+conf['usrp_tx']['repetitions_2']-1)
194 )
195
196 if conf['usrp_tx']['code_type_2']:
197 codes = [ c.strip() for c in conf['usrp_tx']['code_2'].split(',')]
198 code = []
199 for c in codes:
200 code.append([int(x) for x in c])
201 op = voltage2.addOperation(name='Decoder', optype='other')
202 op.addParameter(name='code', value=code)
203 op.addParameter(name='nCode', value=len(code), format='int')
204 op.addParameter(name='nBaud', value=len(code[0]), format='int')
205
206 op = voltage2.addOperation(name='CohInt', optype='other')
207 op.addParameter(name='n', value=len(code), format='int')
208
209 ncode = len(code)
210 else:
211 ncode = 1
212
213 op = voltage2.addOperation(name='setH0')
214 op.addParameter(name='h0', value=H0, format='float')
215
216 if args.range > 0:
217 op = voltage2.addOperation(name='selectHeights')
218 op.addParameter(name='minIndex', value=max_index(0, sample_rate, ipp, H0,ipp_km), format='int')
219 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp, H0,ipp_km), format='int')
220
221
222 op = voltage2.addOperation(name='PulsePair_vRF', optype='other')
223 op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_1'])/ncode, format='int')
224
225 proc2 = proyecto.addProcUnit(datatype='ParametersProc', inputId=voltage2.getId())
226 proc2.addParameter(name='runNextUnit', value=True)
227
228 opObj10 = proc2.addOperation(name="WeatherRadar")
229 opObj10.addParameter(name='CR_Flag',value=True,format='bool')
230 print(2, len(code[0]))
231 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
232 opObj10.addParameter(name='Pt',value=200)
233 opObj10.addParameter(name='min_index',value=max_index(RMIX, sample_rate, ipp, H0,ipp_km))
234
235 op = proc2.addOperation(name='PedestalInformation')
236 op.addParameter(name='path', value=path_ped, format='str')
237 op.addParameter(name='interval', value='0.04')
238 op.addParameter(name='time_offset', value=time_offset)
239 op.addParameter(name='mode', value=args.mode)
240 op.addParameter(name='heading', value=conf['heading'])
241
242 op = proc2.addOperation(name='Block360')
243 op.addParameter(name='attr_data', value='data_param')
244 op.addParameter(name='runNextOp', value=True)
245 op.addParameter(name='angles', value=angles)
246 op.addParameter(name='heading', value=conf['heading'])
247
248 if '1' in args.pulses and '2' in args.pulses:
249 merge = proyecto.addProcUnit(datatype='MergeProc', inputId=[proc1.getId(), proc2.getId()])
250 merge.addParameter(name='attr_data', value='data_param')
251 merge.addParameter(name='mode', value='7')
252 merge.addParameter(name='index', value=max_index(RMIX, sample_rate, ipp, H0,ipp_km))
253
254 elif '1' in args.pulses:
255 merge = proc1
256 elif '2' in args.pulses:
257 merge = proc2
258
259 for param in parameters:
260
261 if args.plot:
262 op= merge.addOperation(name='WeatherParamsPlot')
263 if args.save:
264 op.addParameter(name='save', value=path_plots, format='str')
265 op.addParameter(name='save_period', value=-1)
266 op.addParameter(name='show', value=args.show)
267 op.addParameter(name='channels', value='0,')
268 op.addParameter(name='zmin', value=PARAM[param]['zmin'], format='int')
269 op.addParameter(name='zmax', value=PARAM[param]['zmax'], format='int')
270 op.addParameter(name='yrange', value=20, format='int')
271 op.addParameter(name='xrange', value=args.range, format='int')
272 op.addParameter(name='attr_data', value=param, format='str')
273 op.addParameter(name='labels', value=[[PARAM[param]['label']], [PARAM[param]['label']]])
274 op.addParameter(name='save_code', value=param)
275 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
276 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
277 op.addParameter(name='bgcolor', value='black')
278 op.addParameter(name='localtime', value=False)
279 op.addParameter(name='shapes', value='./shapes')
280 op.addParameter(name='latitude', value=conf['latitude'], format='float')
281 op.addParameter(name='longitude', value=conf['longitude'], format='float')
282 op.addParameter(name='map', value=True)
283
284 if MASK: op.addParameter(name='mask', value=MASK, format='float')
285 if args.server:
286 op.addParameter(name='server', value='190.187.237.239:4444')
287 op.addParameter(name='exp_code', value='400')
288
289 desc = {
290 'Data': {
291 'data_param': {PARAM[param]['wrname']: ['H', 'V']},
292 'utctime': 'time'
293 },
294 'Metadata': {
295 'heightList': 'range',
296 'data_azi': 'azimuth',
297 'data_ele': 'elevation',
298 'mode_op': 'scan_type',
299 'h0': 'range_correction',
300 'dataPP_NOISE': 'noise',
301 }
302 }
303
304 if args.save:
305 writer = merge.addOperation(name='HDFWriter')
306 writer.addParameter(name='path', value=path_save, format='str')
307 writer.addParameter(name='Reset', value=True)
308 writer.addParameter(name='setType', value='weather')
309 writer.addParameter(name='setChannel', value='0') #new parameter choose ch 0 H or ch 1 V
310 writer.addParameter(name='description', value=json.dumps(desc))
311 writer.addParameter(name='blocksPerFile', value='1',format='int')
312 writer.addParameter(name='metadataList', value=','.join(META))
313 writer.addParameter(name='dataList', value='data_param,utctime')
314 writer.addParameter(name='weather_var', value=param)
315 writer.addParameter(name='mask', value=MASK, format='float')
316 writer.addParameter(name='localtime', value=False)
317 # meta
318 writer.addParameter(name='latitude', value=conf['latitude'])
319 writer.addParameter(name='longitude', value=conf['longitude'])
320 writer.addParameter(name='altitude', value=conf['altitude'])
321 writer.addParameter(name='heading', value=conf['heading'])
322 writer.addParameter(name='radar_name', value='SOPHy')
323 writer.addParameter(name='institution', value='IGP')
324 writer.addParameter(name='contact', value='dscipion@igp.gob.pe')
325 writer.addParameter(name='created_by', value='Signal Chain (https://pypi.org/project/schainpy/)')
326 writer.addParameter(name='range_unit', value='km')
327 writer.addParameter(name='prf', value=1/ipp)
328 writer.addParameter(name='prf_unit', value='hertz')
329 writer.addParameter(name='variable', value=PARAM[param]['label'])
330 writer.addParameter(name='variable_unit', value=PARAM[param]['cb_label'])
331 writer.addParameter(name='n_pulses', value=n_pulses)
332 writer.addParameter(name='pulse1_range', value=RMIX)
333 writer.addParameter(name='pulse1_width', value=pulse_1_width)
334 writer.addParameter(name='pulse2_width', value=pulse_2_width)
335 writer.addParameter(name='pulse1_repetitions', value=pulse_1_repetitions)
336 writer.addParameter(name='pulse2_repetitions', value=pulse_2_repetitions)
337 writer.addParameter(name='pulse_width_unit', value='microseconds')
338 writer.addParameter(name='snr_threshold', value=MASK)
339 writer.addParameter(name='cr_hv', value=[67.41,67.17]) #new parameter
340
341
342 return proyecto
343
344
345
346
347 if __name__ == '__main__':
348
349 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
350 parser.add_argument('experiment',
351 help='Experiment name')
352 parser.add_argument('--parameters', nargs='*', default=['S'],
353 help='Variables to process: P, Z, V')
354 parser.add_argument('--pulses', nargs='*', default=['1', '2'],
355 help='Variables to process: 1, 2')
356 parser.add_argument('--range', default=60, type=float,
357 help='Max range to plot')
358 parser.add_argument('--server', action='store_true',
359 help='Send to realtime')
360 parser.add_argument('--start_time', default='',
361 help='Set start time.')
362 parser.add_argument('--end_time', default='',
363 help='Set end time.')
364 parser.add_argument('--rmDC', action='store_true',
365 help='Apply remove DC.')
366 parser.add_argument('--time_offset', default=0,
367 help='Fix time offset')
368 parser.add_argument('--mode', default=None,
369 help='Type of scan')
370 parser.add_argument('--angles', nargs='*', default=[], type=int,
371 help='Angles to process')
372 parser.add_argument('--plot', action='store_true',
373 help='Create plot files')
374 parser.add_argument('--save', action='store_true',
375 help='Create output files')
376 parser.add_argument('--show', action='store_true',
377 help='Show matplotlib plot.')
378 parser.add_argument('--mask', default=0.36, type=float,
379 help='Filter mask over SNR')
380 parser.add_argument('--label', default='',
381 help='Label for plot & param folder')
382
383 args = parser.parse_args()
384
385 proyecto= main(args)
386 proyecto.start() No newline at end of file
@@ -0,0 +1,381
1 import os, sys, json, argparse
2 import multiprocessing
3 import datetime
4 import time
5
6 PATH = "/home/pc-igp-179/Documentos/SOPHy"
7
8 PARAM = {
9 'S': {'zmin': -80, 'zmax':-45, 'colormap': 'jet' , 'label': 'Power', 'wrname': 'power','cb_label': 'dBm', 'ch':0},
10 'SNR':{'zmin': -10, 'zmax': 15, 'colormap': 'jet' , 'label': 'SNR', 'wrname': 'snr','cb_label': 'dB', 'ch':0},
11 'V': {'zmin': -12, 'zmax': 12, 'colormap': 'sophy_v', 'label': 'Velocity', 'wrname': 'velocity', 'cb_label': 'm/s', 'ch':0},
12 'R': {'zmin': 0.5, 'zmax': 1 , 'colormap': 'sophy_r', 'label': 'RhoHV', 'wrname':'rhoHV', 'cb_label': '', 'ch':0},
13 'P': {'zmin': -180,'zmax': 180,'colormap': 'sophy_p', 'label': 'PhiDP', 'wrname':'phiDP' , 'cb_label': 'degrees', 'ch':0},
14 'D': {'zmin': -9 , 'zmax': 12, 'colormap': 'sophy_d', 'label': 'ZDR','wrname':'differential_reflectivity' , 'cb_label': 'dB','ch':0},
15 'Z': {'zmin': -20, 'zmax': 80, 'colormap': 'sophy_z', 'label': 'Reflectivity ', 'wrname':'reflectivity', 'cb_label': 'dBz','ch':0},
16 'W': {'zmin': 0 , 'zmax': 12, 'colormap': 'sophy_w', 'label': 'Spectral Width', 'wrname':'spectral_width', 'cb_label': 'm/s', 'ch':0}
17 }
18
19 META = ['heightList', 'data_azi', 'data_ele', 'mode_op', 'latitude', 'longitude', 'altitude', 'heading', 'radar_name',
20 'institution', 'contact', 'h0', 'range_unit', 'prf', 'prf_unit', 'variable', 'variable_unit', 'n_pulses',
21 'pulse1_range', 'pulse1_width', 'pulse2_width', 'pulse1_repetitions', 'pulse2_repetitions', 'pulse_width_unit',
22 'snr_threshold', 'data_noise']
23
24
25 def max_index(r, sample_rate, ipp, h0,ipp_km):
26
27 return int(sample_rate*ipp*1e6 * r / ipp_km) + int(sample_rate*ipp*1e6 * -h0 / ipp_km)
28
29 def main(args):
30
31 experiment = args.experiment
32 fp = open(os.path.join(PATH, experiment, 'experiment.json'))
33 conf = json.loads(fp.read())
34
35 ipp_km = conf['usrp_tx']['ipp']
36 bottom = conf['pedestal']['bottom']
37 ipp = ipp_km * 2 /300000
38 sample_rate = conf['usrp_rx']['sample_rate']
39 speed_axis = conf['pedestal']['speed']
40 if args.angles:
41 angles = args.angles
42 else:
43 angles = conf['pedestal']['table']
44 time_offset = args.time_offset
45 parameters = args.parameters
46 start_date = conf['name'].split('@')[1].split('T')[0].replace('-', '/')
47 end_date = start_date
48 if args.start_time:
49 start_time = args.start_time
50 else:
51 start_time = conf['name'].split('@')[1].split('T')[1].replace('-', ':')
52
53 if args.end_time:
54 end_time = args.end_time
55 else:
56 end_time = '23:59:59'
57
58 N = int(1.0/(abs(speed_axis[0])*ipp))
59
60 path = os.path.join(PATH, experiment, 'rawdata')
61
62 path_ped = os.path.join(PATH, experiment, 'position')
63 if args.label:
64 label = '-{}'.format(args.label)
65 else:
66 label = ''
67 path_plots = os.path.join(PATH, experiment, 'plots{}'.format(label))
68 path_save = os.path.join(PATH, experiment, 'param{}'.format(label))
69 RMIX = 6.0
70 H0 = -1.33
71 MASK = args.mask
72
73 from schainpy.controller import Project
74
75 project = Project()
76 project.setup(id='1', name='Sophy', description='sophy proc')
77
78 reader = project.addReadUnit(datatype='DigitalRFReader',
79 path=path,
80 startDate=start_date,
81 endDate=end_date,
82 startTime=start_time,
83 endTime=end_time,
84 delay=0,
85 online=args.online,
86 walk=0,
87 ippKm = ipp_km,
88 getByBlock = 1,
89 nProfileBlocks = N,
90 )
91
92
93 n_pulses = 1
94 pulse_1_width = conf['usrp_tx']['pulse_1']
95 pulse_1_repetitions = conf['usrp_tx']['repetitions_1']
96 pulse_2_width = conf['usrp_tx']['pulse_2']
97 pulse_2_repetitions = conf['usrp_tx']['repetitions_2']
98
99 if '1' in args.pulses:
100 voltage1 = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
101
102 op = voltage1.addOperation(name='ProfileSelector')
103 op.addParameter(name='profileRangeList', value='0,{}'.format(conf['usrp_tx']['repetitions_1']-1))
104
105 if conf['usrp_tx']['code_type_1'] != 'None':
106 codes = [ c.strip() for c in conf['usrp_tx']['code_1'].split(',')]
107 code = []
108 for c in codes:
109 code.append([int(x) for x in c])
110 op = voltage1.addOperation(name='Decoder', optype='other')
111 op.addParameter(name='code', value=code)
112 op.addParameter(name='nCode', value=len(code), format='int')
113 op.addParameter(name='nBaud', value=len(code[0]), format='int')
114 ncode = len(code)
115 else:
116 ncode = 1
117 code = ['0']
118
119
120 op = voltage1.addOperation(name='CohInt', optype='other')
121 op.addParameter(name='n', value=ncode, format='int')
122
123 op = voltage1.addOperation(name='setH0')
124 op.addParameter(name='h0', value=H0, format='float')
125
126 if args.range > 0:
127 op = voltage1.addOperation(name='selectHeights')
128 op.addParameter(name='minIndex', value=max_index(0, sample_rate, ipp, H0,ipp_km), format='int')
129 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp, H0,ipp_km), format='int')
130
131
132
133 #op = voltage1.addOperation(name='PulsePair_vRF', optype='other')
134 #op.addParameter(name='n', value=int(conf['usrp_tx']['repetitions_1'])/ncode, format='int')
135
136 procUnitConfObjA = project.addProcUnit(datatype='SpectraProc', inputId=voltage1.getId())
137 procUnitConfObjA.addParameter(name='nFFTPoints', value=int(conf['usrp_tx']['repetitions_1'])/ncode, format='int')
138 procUnitConfObjA.addParameter(name='nProfiles', value=int(conf['usrp_tx']['repetitions_1'])/ncode, format='int')
139
140
141 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
142 #opObj11.addParameter(name='frequency', value='9.345e9', format='float')
143 #procUnitConfObjA.addOperation(name='removeDC')
144 #if args.rmDC:
145 # op.addParameter(name='removeDC', value=1, format='int')
146
147 proc1 = project.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjA.getId())
148 proc1.addOperation(name='SpectralMoments')
149
150 proc1.addParameter(name='runNextUnit', value=True)
151
152 opObj10 = proc1.addOperation(name="WeatherRadar")
153 opObj10.addParameter(name='CR_Flag',value=True)
154 #print(1, len(code[0]))
155 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
156 opObj10.addParameter(name='Pt',value=200)
157 opObj10.addParameter(name='min_index',value=max_index(0, sample_rate, ipp, H0,ipp_km))
158
159
160 op = proc1.addOperation(name='PedestalInformation')
161 op.addParameter(name='path', value=path_ped, format='str')
162 op.addParameter(name='interval', value='0.04')
163 op.addParameter(name='time_offset', value=time_offset)
164 op.addParameter(name='mode', value=args.mode)
165
166 op = proc1.addOperation(name='Block360')
167 op.addParameter(name='attr_data', value='data_param')
168 op.addParameter(name='runNextOp', value=True)
169 op.addParameter(name='angles', value=angles)
170 op.addParameter(name='heading', value=conf['heading'])
171
172
173 if '2' in args.pulses:
174 voltage2 = project.addProcUnit(datatype='VoltageProc', inputId=reader.getId())
175
176 op = voltage2.addOperation(name='ProfileSelector')
177 op.addParameter(name='profileRangeList', value='{},{}'.format(conf['usrp_tx']['repetitions_1'], conf['usrp_tx']['repetitions_1']+conf['usrp_tx']['repetitions_2']-1))
178
179 if conf['usrp_tx']['code_type_2']:
180 codes = [ c.strip() for c in conf['usrp_tx']['code_2'].split(',')]
181 code = []
182 for c in codes:
183 code.append([int(x) for x in c])
184 op = voltage2.addOperation(name='Decoder', optype='other')
185 op.addParameter(name='code', value=code)
186 op.addParameter(name='nCode', value=len(code), format='int')
187 op.addParameter(name='nBaud', value=len(code[0]), format='int')
188
189 op = voltage2.addOperation(name='CohInt', optype='other')
190 op.addParameter(name='n', value=len(code), format='int')
191
192 ncode = len(code)
193 else:
194 ncode = 1
195
196 op = voltage2.addOperation(name='setH0')
197 op.addParameter(name='h0', value=H0, format='float')
198
199 if args.range > 0:
200 op = voltage2.addOperation(name='selectHeights')
201 op.addParameter(name='minIndex', value=max_index(0, sample_rate, ipp, H0,ipp_km), format='int')
202 op.addParameter(name='maxIndex', value=max_index(args.range, sample_rate, ipp, H0,ipp_km), format='int')
203
204
205
206
207 procUnitConfObjB = project.addProcUnit(datatype='SpectraProc', inputId=voltage2.getId())
208 procUnitConfObjB.addParameter(name='nFFTPoints', value=int(conf['usrp_tx']['repetitions_2'])/ncode, format='int')
209 procUnitConfObjB.addParameter(name='nProfiles', value=int(conf['usrp_tx']['repetitions_2'])/ncode, format='int')
210
211
212
213 #opObj11 = procUnitConfObjB.addOperation(name='setRadarFrequency')
214 #opObj11.addParameter(name='frequency', value='9.345e9', format='float')
215 #procUnitConfObjB.addOperation(name='removeDC')
216
217 #if args.rmDC:
218 # op.addParameter(name='removeDC', value=1, format='int')
219
220 proc2 = project.addProcUnit(datatype='ParametersProc', inputId=procUnitConfObjB.getId())
221 proc2.addOperation(name='SpectralMoments')
222 proc2.addParameter(name='runNextUnit', value=True)
223
224 opObj10 = proc2.addOperation(name="WeatherRadar")
225 opObj10.addParameter(name='CR_Flag',value=True,format='bool')
226 opObj10.addParameter(name='tauW',value=(1e-6/sample_rate)*len(code[0]))
227 opObj10.addParameter(name='Pt',value=200)
228 opObj10.addParameter(name='min_index',value=max_index(RMIX, sample_rate, ipp, H0,ipp_km))
229
230 op = proc2.addOperation(name='PedestalInformation')
231 op.addParameter(name='path', value=path_ped, format='str')
232 op.addParameter(name='interval', value='0.04')
233 op.addParameter(name='time_offset', value=time_offset)
234 op.addParameter(name='mode', value=args.mode)
235 op.addParameter(name='heading', value=conf['heading'])
236
237 op = proc2.addOperation(name='Block360')
238 op.addParameter(name='attr_data', value='data_param')
239 op.addParameter(name='runNextOp', value=True)
240 op.addParameter(name='angles', value=angles)
241 op.addParameter(name='heading', value=conf['heading'])
242
243
244 if '1' in args.pulses and '2' in args.pulses:
245 merge = project.addProcUnit(datatype='MergeProc', inputId=[proc1.getId(), proc2.getId()])
246 merge.addParameter(name='attr_data', value='data_param')
247 merge.addParameter(name='mode', value='7')
248 merge.addParameter(name='index', value=max_index(RMIX, sample_rate, ipp, H0,ipp_km))
249
250 elif '1' in args.pulses:
251 merge = proc1
252 elif '2' in args.pulses:
253 merge = proc2
254
255
256 for param in parameters:
257
258 if args.plot:
259 op= merge.addOperation(name='WeatherParamsPlot')
260 if args.save:
261 op.addParameter(name='save', value=path_plots, format='str')
262 op.addParameter(name='save_period', value=-1)
263 op.addParameter(name='show', value=args.show)
264 op.addParameter(name='channels', value='0,')
265 op.addParameter(name='zmin', value=PARAM[param]['zmin'], format='int')
266 op.addParameter(name='zmax', value=PARAM[param]['zmax'], format='int')
267 op.addParameter(name='yrange', value=20, format='int')
268 op.addParameter(name='xrange', value=args.range, format='int')
269 op.addParameter(name='attr_data', value=param, format='str')
270 op.addParameter(name='labels', value=[[PARAM[param]['label']], [PARAM[param]['label']]])
271 op.addParameter(name='save_code', value=param)
272 op.addParameter(name='cb_label', value=PARAM[param]['cb_label'])
273 op.addParameter(name='colormap', value=PARAM[param]['colormap'])
274 op.addParameter(name='bgcolor', value='black')
275 op.addParameter(name='localtime', value=False)
276 op.addParameter(name='shapes', value='./shapes')
277 op.addParameter(name='latitude', value=conf['latitude'], format='float')
278 op.addParameter(name='longitude', value=conf['longitude'], format='float')
279 op.addParameter(name='map', value=True)
280
281 if MASK: op.addParameter(name='mask', value=MASK, format='float')
282 #if args.server:
283 # op.addParameter(name='server', value='190.187.237.239:4444')
284 # op.addParameter(name='exp_code', value='400')
285
286 desc = {
287 'Data': {
288 'data_param': {PARAM[param]['wrname']: ['H', 'V']},
289 'utctime': 'time'
290 },
291 'Metadata': {
292 'heightList': 'range',
293 'data_azi': 'azimuth',
294 'data_ele': 'elevation',
295 'mode_op': 'scan_type',
296 'h0': 'range_correction',
297 'dataPP_NOISE': 'noise',
298 }
299 }
300
301 if args.save:
302 writer = merge.addOperation(name='HDFWriter')
303 writer.addParameter(name='path', value=path_save, format='str')
304 writer.addParameter(name='Reset', value=True)
305 writer.addParameter(name='setType', value='weather')
306 writer.addParameter(name='setChannel', value='0') #new parameter choose ch 0 H or ch 1 V
307 writer.addParameter(name='description', value=json.dumps(desc))
308 writer.addParameter(name='blocksPerFile', value='1',format='int')
309 writer.addParameter(name='metadataList', value=','.join(META))
310 writer.addParameter(name='dataList', value='data_param,utctime')
311 writer.addParameter(name='weather_var', value=param)
312 writer.addParameter(name='mask', value=MASK, format='float')
313 writer.addParameter(name='localtime', value=False)
314 # meta
315 writer.addParameter(name='latitude', value=conf['latitude'])
316 writer.addParameter(name='longitude', value=conf['longitude'])
317 writer.addParameter(name='altitude', value=conf['altitude'])
318 writer.addParameter(name='heading', value=conf['heading'])
319 writer.addParameter(name='radar_name', value='SOPHy')
320 writer.addParameter(name='institution', value='IGP')
321 writer.addParameter(name='contact', value='dscipion@igp.gob.pe')
322 writer.addParameter(name='created_by', value='Signal Chain (https://pypi.org/project/schainpy/)')
323 writer.addParameter(name='range_unit', value='km')
324 writer.addParameter(name='prf', value=1/ipp)
325 writer.addParameter(name='prf_unit', value='hertz')
326 writer.addParameter(name='variable', value=PARAM[param]['label'])
327 writer.addParameter(name='variable_unit', value=PARAM[param]['cb_label'])
328 writer.addParameter(name='n_pulses', value=n_pulses)
329 writer.addParameter(name='pulse1_range', value=RMIX)
330 writer.addParameter(name='pulse1_width', value=pulse_1_width)
331 writer.addParameter(name='pulse2_width', value=pulse_2_width)
332 writer.addParameter(name='pulse1_repetitions', value=pulse_1_repetitions)
333 writer.addParameter(name='pulse2_repetitions', value=pulse_2_repetitions)
334 writer.addParameter(name='pulse_width_unit', value='microseconds')
335 writer.addParameter(name='snr_threshold', value=MASK)
336 writer.addParameter(name='cr_hv', value=[67.41,67.17]) #new parameter
337
338 return project
339
340 if __name__ == '__main__':
341
342 parser = argparse.ArgumentParser(description='Script to process SOPHy data.')
343 parser.add_argument('experiment',
344 help='Experiment name')
345 parser.add_argument('--parameters', nargs='*', default=['S'],
346 help='Variables to process: P, Z, V')
347 parser.add_argument('--pulses', nargs='*', default=['1', '2'],
348 help='Variables to process: 1, 2')
349 parser.add_argument('--angles', nargs='*', default=[], type=int,
350 help='Angles to process')
351 parser.add_argument('--time_offset', default=0,
352 help='Fix time offset')
353 parser.add_argument('--range', default=60, type=float,
354 help='Max range to plot')
355
356 parser.add_argument('--save', action='store_true',
357 help='Create output files')
358 parser.add_argument('--plot', action='store_true',
359 help='Create plot files')
360 parser.add_argument('--show', action='store_true',
361 help='Show matplotlib plot.')
362 parser.add_argument('--online', action='store_true',
363 help='Set online mode.')
364 parser.add_argument('--server', action='store_true',
365 help='Send to realtime')
366 parser.add_argument('--start_time', default='',
367 help='Set start time.')
368 parser.add_argument('--end_time', default='',
369 help='Set end time.')
370 parser.add_argument('--label', default='',
371 help='Label for plot & param folder')
372 parser.add_argument('--mode', default=None,
373 help='Type of scan')
374 #parser.add_argument('--rmDC', action='store_true',
375 # help='Apply remove DC.')
376 parser.add_argument('--mask', default=0, type=float,
377 help='Set SNR threshold.')
378 args = parser.parse_args()
379
380 project = main(args)
381 project.start() No newline at end of file
@@ -1,769 +1,784
1 1 import os
2 2 import datetime
3 3 import warnings
4 4 import numpy
5 5 from mpl_toolkits.axisartist.grid_finder import FixedLocator, DictFormatter
6 6 from matplotlib.patches import Circle
7 7 from cartopy.feature import ShapelyFeature
8 8 import cartopy.io.shapereader as shpreader
9 9
10 10 from schainpy.model.graphics.jroplot_base import Plot, plt, ccrs
11 11 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
12 12 from schainpy.utils import log
13 13 from schainpy.model.graphics.plotting_codes import cb_tables
14 14
15 15
16 16 EARTH_RADIUS = 6.3710e3
17 17
18 18
19 19 def antenna_to_cartesian(ranges, azimuths, elevations):
20 20 """
21 21 Return Cartesian coordinates from antenna coordinates.
22 22
23 23 Parameters
24 24 ----------
25 25 ranges : array
26 26 Distances to the center of the radar gates (bins) in kilometers.
27 27 azimuths : array
28 28 Azimuth angle of the radar in degrees.
29 29 elevations : array
30 30 Elevation angle of the radar in degrees.
31 31
32 32 Returns
33 33 -------
34 34 x, y, z : array
35 35 Cartesian coordinates in meters from the radar.
36 36
37 37 Notes
38 38 -----
39 39 The calculation for Cartesian coordinate is adapted from equations
40 40 2.28(b) and 2.28(c) of Doviak and Zrnic [1]_ assuming a
41 41 standard atmosphere (4/3 Earth's radius model).
42 42
43 43 .. math::
44 44
45 45 z = \\sqrt{r^2+R^2+2*r*R*sin(\\theta_e)} - R
46 46
47 47 s = R * arcsin(\\frac{r*cos(\\theta_e)}{R+z})
48 48
49 49 x = s * sin(\\theta_a)
50 50
51 51 y = s * cos(\\theta_a)
52 52
53 53 Where r is the distance from the radar to the center of the gate,
54 54 :math:`\\theta_a` is the azimuth angle, :math:`\\theta_e` is the
55 55 elevation angle, s is the arc length, and R is the effective radius
56 56 of the earth, taken to be 4/3 the mean radius of earth (6371 km).
57 57
58 58 References
59 59 ----------
60 60 .. [1] Doviak and Zrnic, Doppler Radar and Weather Observations, Second
61 61 Edition, 1993, p. 21.
62 62
63 63 """
64 64 theta_e = numpy.deg2rad(elevations) # elevation angle in radians.
65 65 theta_a = numpy.deg2rad(azimuths) # azimuth angle in radians.
66 66 R = 6371.0 * 1000.0 * 4.0 / 3.0 # effective radius of earth in meters.
67 67 r = ranges * 1000.0 # distances to gates in meters.
68 68
69 69 z = (r ** 2 + R ** 2 + 2.0 * r * R * numpy.sin(theta_e)) ** 0.5 - R
70 70 s = R * numpy.arcsin(r * numpy.cos(theta_e) / (R + z)) # arc length in m.
71 71 x = s * numpy.sin(theta_a)
72 72 y = s * numpy.cos(theta_a)
73 73 return x, y, z
74 74
75 75 def cartesian_to_geographic_aeqd(x, y, lon_0, lat_0, R=EARTH_RADIUS):
76 76 """
77 77 Azimuthal equidistant Cartesian to geographic coordinate transform.
78 78
79 79 Transform a set of Cartesian/Cartographic coordinates (x, y) to
80 80 geographic coordinate system (lat, lon) using a azimuthal equidistant
81 81 map projection [1]_.
82 82
83 83 .. math::
84 84
85 85 lat = \\arcsin(\\cos(c) * \\sin(lat_0) +
86 86 (y * \\sin(c) * \\cos(lat_0) / \\rho))
87 87
88 88 lon = lon_0 + \\arctan2(
89 89 x * \\sin(c),
90 90 \\rho * \\cos(lat_0) * \\cos(c) - y * \\sin(lat_0) * \\sin(c))
91 91
92 92 \\rho = \\sqrt(x^2 + y^2)
93 93
94 94 c = \\rho / R
95 95
96 96 Where x, y are the Cartesian position from the center of projection;
97 97 lat, lon the corresponding latitude and longitude; lat_0, lon_0 are the
98 98 latitude and longitude of the center of the projection; R is the radius of
99 99 the earth (defaults to ~6371 km). lon is adjusted to be between -180 and
100 100 180.
101 101
102 102 Parameters
103 103 ----------
104 104 x, y : array-like
105 105 Cartesian coordinates in the same units as R, typically meters.
106 106 lon_0, lat_0 : float
107 107 Longitude and latitude, in degrees, of the center of the projection.
108 108 R : float, optional
109 109 Earth radius in the same units as x and y. The default value is in
110 110 units of meters.
111 111
112 112 Returns
113 113 -------
114 114 lon, lat : array
115 115 Longitude and latitude of Cartesian coordinates in degrees.
116 116
117 117 References
118 118 ----------
119 119 .. [1] Snyder, J. P. Map Projections--A Working Manual. U. S. Geological
120 120 Survey Professional Paper 1395, 1987, pp. 191-202.
121 121
122 122 """
123 123 x = numpy.atleast_1d(numpy.asarray(x))
124 124 y = numpy.atleast_1d(numpy.asarray(y))
125 125
126 126 lat_0_rad = numpy.deg2rad(lat_0)
127 127 lon_0_rad = numpy.deg2rad(lon_0)
128 128
129 129 rho = numpy.sqrt(x*x + y*y)
130 130 c = rho / R
131 131
132 132 with warnings.catch_warnings():
133 133 # division by zero may occur here but is properly addressed below so
134 134 # the warnings can be ignored
135 135 warnings.simplefilter("ignore", RuntimeWarning)
136 136 lat_rad = numpy.arcsin(numpy.cos(c) * numpy.sin(lat_0_rad) +
137 137 y * numpy.sin(c) * numpy.cos(lat_0_rad) / rho)
138 138 lat_deg = numpy.rad2deg(lat_rad)
139 139 # fix cases where the distance from the center of the projection is zero
140 140 lat_deg[rho == 0] = lat_0
141 141
142 142 x1 = x * numpy.sin(c)
143 143 x2 = rho*numpy.cos(lat_0_rad)*numpy.cos(c) - y*numpy.sin(lat_0_rad)*numpy.sin(c)
144 144 lon_rad = lon_0_rad + numpy.arctan2(x1, x2)
145 145 lon_deg = numpy.rad2deg(lon_rad)
146 146 # Longitudes should be from -180 to 180 degrees
147 147 lon_deg[lon_deg > 180] -= 360.
148 148 lon_deg[lon_deg < -180] += 360.
149 149
150 150 return lon_deg, lat_deg
151 151
152 152 def antenna_to_geographic(ranges, azimuths, elevations, site):
153 153
154 154 x, y, z = antenna_to_cartesian(numpy.array(ranges), numpy.array(azimuths), numpy.array(elevations))
155 155 lon, lat = cartesian_to_geographic_aeqd(x, y, site[0], site[1], R=6370997.)
156 156
157 157 return lon, lat
158 158
159 159 def ll2xy(lat1, lon1, lat2, lon2):
160 160
161 161 p = 0.017453292519943295
162 162 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
163 163 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
164 164 r = 12742 * numpy.arcsin(numpy.sqrt(a))
165 165 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
166 166 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
167 167 theta = -theta + numpy.pi/2
168 168 return r*numpy.cos(theta), r*numpy.sin(theta)
169 169
170 170
171 171 def km2deg(km):
172 172 '''
173 173 Convert distance in km to degrees
174 174 '''
175 175
176 176 return numpy.rad2deg(km/EARTH_RADIUS)
177 177
178 178
179 179
180 180 class SpectralMomentsPlot(SpectraPlot):
181 181 '''
182 182 Plot for Spectral Moments
183 183 '''
184 184 CODE = 'spc_moments'
185 185 # colormap = 'jet'
186 186 # plot_type = 'pcolor'
187 187
188 188 class DobleGaussianPlot(SpectraPlot):
189 189 '''
190 190 Plot for Double Gaussian Plot
191 191 '''
192 192 CODE = 'gaussian_fit'
193 193 # colormap = 'jet'
194 194 # plot_type = 'pcolor'
195 195
196 196 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
197 197 '''
198 198 Plot SpectraCut with Double Gaussian Fit
199 199 '''
200 200 CODE = 'cut_gaussian_fit'
201 201
202 202 class SnrPlot(RTIPlot):
203 203 '''
204 204 Plot for SNR Data
205 205 '''
206 206
207 207 CODE = 'snr'
208 208 colormap = 'jet'
209 209
210 210 def update(self, dataOut):
211 211
212 212 data = {
213 213 'snr': 10*numpy.log10(dataOut.data_snr)
214 214 }
215 215
216 216 return data, {}
217 217
218 218 class DopplerPlot(RTIPlot):
219 219 '''
220 220 Plot for DOPPLER Data (1st moment)
221 221 '''
222 222
223 223 CODE = 'dop'
224 224 colormap = 'jet'
225 225
226 226 def update(self, dataOut):
227 227
228 228 data = {
229 229 'dop': 10*numpy.log10(dataOut.data_dop)
230 230 }
231 231
232 232 return data, {}
233 233
234 234 class PowerPlot(RTIPlot):
235 235 '''
236 236 Plot for Power Data (0 moment)
237 237 '''
238 238
239 239 CODE = 'pow'
240 240 colormap = 'jet'
241 241
242 242 def update(self, dataOut):
243 243 data = {
244 244 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
245 245 }
246 246 return data, {}
247 247
248 248 class SpectralWidthPlot(RTIPlot):
249 249 '''
250 250 Plot for Spectral Width Data (2nd moment)
251 251 '''
252 252
253 253 CODE = 'width'
254 254 colormap = 'jet'
255 255
256 256 def update(self, dataOut):
257 257
258 258 data = {
259 259 'width': dataOut.data_width
260 260 }
261 261
262 262 return data, {}
263 263
264 264 class SkyMapPlot(Plot):
265 265 '''
266 266 Plot for meteors detection data
267 267 '''
268 268
269 269 CODE = 'param'
270 270
271 271 def setup(self):
272 272
273 273 self.ncols = 1
274 274 self.nrows = 1
275 275 self.width = 7.2
276 276 self.height = 7.2
277 277 self.nplots = 1
278 278 self.xlabel = 'Zonal Zenith Angle (deg)'
279 279 self.ylabel = 'Meridional Zenith Angle (deg)'
280 280 self.polar = True
281 281 self.ymin = -180
282 282 self.ymax = 180
283 283 self.colorbar = False
284 284
285 285 def plot(self):
286 286
287 287 arrayParameters = numpy.concatenate(self.data['param'])
288 288 error = arrayParameters[:, -1]
289 289 indValid = numpy.where(error == 0)[0]
290 290 finalMeteor = arrayParameters[indValid, :]
291 291 finalAzimuth = finalMeteor[:, 3]
292 292 finalZenith = finalMeteor[:, 4]
293 293
294 294 x = finalAzimuth * numpy.pi / 180
295 295 y = finalZenith
296 296
297 297 ax = self.axes[0]
298 298
299 299 if ax.firsttime:
300 300 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
301 301 else:
302 302 ax.plot.set_data(x, y)
303 303
304 304 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
305 305 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
306 306 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
307 307 dt2,
308 308 len(x))
309 309 self.titles[0] = title
310 310
311 311
312 312 class GenericRTIPlot(Plot):
313 313 '''
314 314 Plot for data_xxxx object
315 315 '''
316 316
317 317 CODE = 'param'
318 318 colormap = 'viridis'
319 319 plot_type = 'pcolorbuffer'
320 320
321 321 def setup(self):
322 322 self.xaxis = 'time'
323 323 self.ncols = 1
324 324 self.nrows = self.data.shape('param')[0]
325 325 self.nplots = self.nrows
326 326 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
327 327
328 328 if not self.xlabel:
329 329 self.xlabel = 'Time'
330 330
331 331 self.ylabel = 'Range [km]'
332 332 if not self.titles:
333 333 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
334 334
335 335 def update(self, dataOut):
336 336
337 337 data = {
338 338 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
339 339 }
340 340
341 341 meta = {}
342 342
343 343 return data, meta
344 344
345 345 def plot(self):
346 346 # self.data.normalize_heights()
347 347 self.x = self.data.times
348 348 self.y = self.data.yrange
349 349 self.z = self.data['param']
350 350 self.z = 10*numpy.log10(self.z)
351 351 self.z = numpy.ma.masked_invalid(self.z)
352 352
353 353 if self.decimation is None:
354 354 x, y, z = self.fill_gaps(self.x, self.y, self.z)
355 355 else:
356 356 x, y, z = self.fill_gaps(*self.decimate())
357 357
358 358 for n, ax in enumerate(self.axes):
359 359
360 360 self.zmax = self.zmax if self.zmax is not None else numpy.max(
361 361 self.z[n])
362 362 self.zmin = self.zmin if self.zmin is not None else numpy.min(
363 363 self.z[n])
364 364
365 365 if ax.firsttime:
366 366 if self.zlimits is not None:
367 367 self.zmin, self.zmax = self.zlimits[n]
368 368
369 369 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
370 370 vmin=self.zmin,
371 371 vmax=self.zmax,
372 372 cmap=self.cmaps[n]
373 373 )
374 374 else:
375 375 if self.zlimits is not None:
376 376 self.zmin, self.zmax = self.zlimits[n]
377 377 ax.collections.remove(ax.collections[0])
378 378 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
379 379 vmin=self.zmin,
380 380 vmax=self.zmax,
381 381 cmap=self.cmaps[n]
382 382 )
383 383
384 384
385 385 class PolarMapPlot(Plot):
386 386 '''
387 387 Plot for weather radar
388 388 '''
389 389
390 390 CODE = 'param'
391 391 colormap = 'seismic'
392 392
393 393 def setup(self):
394 394 self.ncols = 1
395 395 self.nrows = 1
396 396 self.width = 9
397 397 self.height = 8
398 398 self.mode = self.data.meta['mode']
399 399 if self.channels is not None:
400 400 self.nplots = len(self.channels)
401 401 self.nrows = len(self.channels)
402 402 else:
403 403 self.nplots = self.data.shape(self.CODE)[0]
404 404 self.nrows = self.nplots
405 405 self.channels = list(range(self.nplots))
406 406 if self.mode == 'E':
407 407 self.xlabel = 'Longitude'
408 408 self.ylabel = 'Latitude'
409 409 else:
410 410 self.xlabel = 'Range (km)'
411 411 self.ylabel = 'Height (km)'
412 412 self.bgcolor = 'white'
413 413 self.cb_labels = self.data.meta['units']
414 414 self.lat = self.data.meta['latitude']
415 415 self.lon = self.data.meta['longitude']
416 416 self.xmin, self.xmax = float(
417 417 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
418 418 self.ymin, self.ymax = float(
419 419 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
420 420 # self.polar = True
421 421
422 422 def plot(self):
423 423
424 424 for n, ax in enumerate(self.axes):
425 425 data = self.data['param'][self.channels[n]]
426 426
427 427 zeniths = numpy.linspace(
428 428 0, self.data.meta['max_range'], data.shape[1])
429 429 if self.mode == 'E':
430 430 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
431 431 r, theta = numpy.meshgrid(zeniths, azimuths)
432 432 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
433 433 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
434 434 x = km2deg(x) + self.lon
435 435 y = km2deg(y) + self.lat
436 436 else:
437 437 azimuths = numpy.radians(self.data.yrange)
438 438 r, theta = numpy.meshgrid(zeniths, azimuths)
439 439 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
440 440 self.y = zeniths
441 441
442 442 if ax.firsttime:
443 443 if self.zlimits is not None:
444 444 self.zmin, self.zmax = self.zlimits[n]
445 445 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
446 446 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
447 447 vmin=self.zmin,
448 448 vmax=self.zmax,
449 449 cmap=self.cmaps[n])
450 450 else:
451 451 if self.zlimits is not None:
452 452 self.zmin, self.zmax = self.zlimits[n]
453 453 ax.collections.remove(ax.collections[0])
454 454 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
455 455 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
456 456 vmin=self.zmin,
457 457 vmax=self.zmax,
458 458 cmap=self.cmaps[n])
459 459
460 460 if self.mode == 'A':
461 461 continue
462 462
463 463 # plot district names
464 464 f = open('/data/workspace/schain_scripts/distrito.csv')
465 465 for line in f:
466 466 label, lon, lat = [s.strip() for s in line.split(',') if s]
467 467 lat = float(lat)
468 468 lon = float(lon)
469 469 # ax.plot(lon, lat, '.b', ms=2)
470 470 ax.text(lon, lat, label.decode('utf8'), ha='center',
471 471 va='bottom', size='8', color='black')
472 472
473 473 # plot limites
474 474 limites = []
475 475 tmp = []
476 476 for line in open('/data/workspace/schain_scripts/lima.csv'):
477 477 if '#' in line:
478 478 if tmp:
479 479 limites.append(tmp)
480 480 tmp = []
481 481 continue
482 482 values = line.strip().split(',')
483 483 tmp.append((float(values[0]), float(values[1])))
484 484 for points in limites:
485 485 ax.add_patch(
486 486 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
487 487
488 488 # plot Cuencas
489 489 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
490 490 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
491 491 values = [line.strip().split(',') for line in f]
492 492 points = [(float(s[0]), float(s[1])) for s in values]
493 493 ax.add_patch(Polygon(points, ec='b', fc='none'))
494 494
495 495 # plot grid
496 496 for r in (15, 30, 45, 60):
497 497 ax.add_artist(plt.Circle((self.lon, self.lat),
498 498 km2deg(r), color='0.6', fill=False, lw=0.2))
499 499 ax.text(
500 500 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
501 501 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
502 502 '{}km'.format(r),
503 503 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
504 504
505 505 if self.mode == 'E':
506 506 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
507 507 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
508 508 else:
509 509 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
510 510 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
511 511
512 512 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
513 513 self.titles = ['{} {}'.format(
514 514 self.data.parameters[x], title) for x in self.channels]
515 515
516
517
516 518 class WeatherParamsPlot(Plot):
517
519
518 520 plot_type = 'scattermap'
519 521 buffering = False
520 522
521 523 def setup(self):
522 524
523 525 self.ncols = 1
524 526 self.nrows = 1
525 527 self.nplots= 1
526 528
527 529 if self.channels is not None:
528 530 self.nplots = len(self.channels)
529 531 self.ncols = len(self.channels)
530 532 else:
531 533 self.nplots = self.data.shape(self.CODE)[0]
532 534 self.ncols = self.nplots
533 535 self.channels = list(range(self.nplots))
534 536
535 537 self.colorbar=True
536 538 if len(self.channels)>1:
537 539 self.width = 12
538 540 else:
539 541 self.width =8
540 542 self.height =7
541 543 self.ini =0
542 544 self.len_azi =0
543 545 self.buffer_ini = None
544 546 self.buffer_ele = None
545 547 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.1})
546 548 self.flag =0
547 549 self.indicador= 0
548 550 self.last_data_ele = None
549 551 self.val_mean = None
550 552
551 553 def update(self, dataOut):
552 554
553 555 vars = {
554 556 'S' : 0,
555 557 'V' : 1,
556 558 'W' : 2,
557 559 'SNR' : 3,
558 560 'Z' : 4,
559 561 'D' : 5,
560 562 'P' : 6,
561 563 'R' : 7,
562 564 }
563 565
564 566 data = {}
565 567 meta = {}
566 568
567 if hasattr(dataOut, 'nFFTPoints'):
568 factor = dataOut.normFactor
569 else:
570 factor = 1
569 ##if hasattr(dataOut, 'nFFTPoints'):
570 ## factor = dataOut.normFactor*10.0 # CONSIDERACION ENTRE PULSE PAIR Y FFT
571 ##else:
572 ## factor = 1
571 573
572 574 if hasattr(dataOut, 'dparam'):
573 575 tmp = getattr(dataOut, 'data_param')
574 576 else:
575 577 #print("-------------------self.attr_data[0]",self.attr_data[0])
576 578 if 'S' in self.attr_data[0]:
577 579 if self.attr_data[0]=='S':
578 tmp = 10*numpy.log10(10.0*getattr(dataOut, 'data_param')[:,0,:]/(factor))
580 tmp = 10*numpy.log10(10.0*getattr(dataOut, 'data_param')[:,0,:]) ## /(factor)) ya no considerar factor se aplica factor jroproc_parametrs
579 581 if self.attr_data[0]=='SNR':
580 582 tmp = 10*numpy.log10(getattr(dataOut, 'data_param')[:,3,:])
581 583 else:
582 584 tmp = getattr(dataOut, 'data_param')[:,vars[self.attr_data[0]],:]
583 585
584 586 if self.mask:
585 587 mask = dataOut.data_param[:,3,:] < self.mask
586 588 tmp[mask] = numpy.nan
587 589 mask = numpy.nansum((tmp, numpy.roll(tmp, 1),numpy.roll(tmp, -1)), axis=0) == tmp
588 590 tmp[mask] = numpy.nan
591
592 ####################################################################
593 #SE GUARDAN LOS DATOS DE LOS PARAMETROS YA SEA PP O SPECTRA EN UN ARCHIVO .npy
594 ##elapsed_time = time.time() - self.start_time
595 ##filename = f'{dataOut.inputUnit}_{self.attr_data[0]}_{elapsed_time:.0f}.npy' # Nombre ΓΊnico con timestamp
596
597 # Guardar el array en el nuevo archivo
598 ##with open(filename, 'wb') as f:
599 ## numpy.save(f, tmp)
600
601 ##print("Se creΓ³ el archivo:", filename)
602
589 603
604 #####################################################################
590 605 r = dataOut.heightList
591 606 delta_height = r[1]-r[0]
592 607 valid = numpy.where(r>=0)[0]
593 608 data['r'] = numpy.arange(len(valid))*delta_height
594 609
595 610 data['data'] = [0, 0]
596 611
597 612 try:
598 613 data['data'][0] = tmp[0][:,valid]
599 614 data['data'][1] = tmp[1][:,valid]
600 615 except:
601 616 data['data'][0] = tmp[0][:,valid]
602 617 data['data'][1] = tmp[0][:,valid]
603 618
604 619 if dataOut.mode_op == 'PPI':
605 620 self.CODE = 'PPI'
606 621 self.title = self.CODE
607 622 elif dataOut.mode_op == 'RHI':
608 623 self.CODE = 'RHI'
609 624 self.title = self.CODE
610 625
611 626 data['azi'] = dataOut.data_azi
612 627 data['ele'] = dataOut.data_ele
613 628
614 629 if isinstance(dataOut.mode_op, bytes):
615 630 try:
616 631 dataOut.mode_op = dataOut.mode_op.decode()
617 632 except:
618 633 dataOut.mode_op = str(dataOut.mode_op, 'utf-8')
619 634 data['mode_op'] = dataOut.mode_op
620 635 self.mode = dataOut.mode_op
621 636
622 637 return data, meta
623 638
624 639 def plot(self):
625 640 data = self.data[-1]
626 641 z = data['data']
627 642 r = data['r']
628 643 self.titles = []
629 644
630 645 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
631 646 self.zmin = self.zmin if self.zmin is not None else numpy.nanmin(z)
632 647
633 648 if isinstance(data['mode_op'], bytes):
634 649 data['mode_op'] = data['mode_op'].decode()
635 650
636 651 if data['mode_op'] == 'RHI':
637 652 r, theta = numpy.meshgrid(r, numpy.radians(data['ele']))
638 653 len_aux = int(data['azi'].shape[0]/4)
639 654 mean = numpy.mean(data['azi'][len_aux:-len_aux])
640 655 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
641 656 if self.yrange:
642 657 self.ylabel= 'Height [km]'
643 658 self.xlabel= 'Distance from radar [km]'
644 659 self.ymax = self.yrange
645 660 self.ymin = 0
646 661 self.xmax = self.xrange if self.xrange else numpy.nanmax(r)
647 662 self.xmin = -self.xrange if self.xrange else -numpy.nanmax(r)
648 663 self.setrhilimits = False
649 664 else:
650 665 self.ymin = 0
651 666 self.ymax = numpy.nanmax(r)
652 667 self.xmin = -numpy.nanmax(r)
653 668 self.xmax = numpy.nanmax(r)
654 669
655 670 elif data['mode_op'] == 'PPI':
656 671 r, theta = numpy.meshgrid(r, -numpy.radians(data['azi'])+numpy.pi/2)
657 672 len_aux = int(data['ele'].shape[0]/4)
658 673 mean = numpy.mean(data['ele'][len_aux:-len_aux])
659 674 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(mean)), r*numpy.sin(
660 675 theta)*numpy.cos(numpy.radians(mean))
661 676 x = km2deg(x) + self.longitude
662 677 y = km2deg(y) + self.latitude
663 678 if self.xrange:
664 679 self.ylabel= 'Latitude'
665 680 self.xlabel= 'Longitude'
666 681
667 682 self.xmin = km2deg(-self.xrange) + self.longitude
668 683 self.xmax = km2deg(self.xrange) + self.longitude
669 684
670 685 self.ymin = km2deg(-self.xrange) + self.latitude
671 686 self.ymax = km2deg(self.xrange) + self.latitude
672 687 else:
673 688 self.xmin = km2deg(-numpy.nanmax(r)) + self.longitude
674 689 self.xmax = km2deg(numpy.nanmax(r)) + self.longitude
675 690
676 691 self.ymin = km2deg(-numpy.nanmax(r)) + self.latitude
677 692 self.ymax = km2deg(numpy.nanmax(r)) + self.latitude
678 693
679 694 self.clear_figures()
680 695
681 696 if data['mode_op'] == 'PPI':
682 697 axes = self.axes['PPI']
683 698 else:
684 699 axes = self.axes['RHI']
685 700
686 701 if self.colormap in cb_tables:
687 702 norm = cb_tables[self.colormap]['norm']
688 703 else:
689 704 norm = None
690 705
691 706 for i, ax in enumerate(axes):
692 707
693 708 if norm is None:
694 709 ax.plt = ax.pcolormesh(x, y, z[i], cmap=self.colormap, vmin=self.zmin, vmax=self.zmax)
695 710 else:
696 711 ax.plt = ax.pcolormesh(x, y, z[i], cmap=self.colormap, norm=norm)
697 712
698 713 if data['mode_op'] == 'RHI':
699 714 len_aux = int(data['azi'].shape[0]/4)
700 715 mean = numpy.mean(data['azi'][len_aux:-len_aux])
701 716 if len(self.channels) !=1:
702 717 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in self.channels]
703 718 else:
704 719 self.titles = ['RHI {} at AZ: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
705 720 elif data['mode_op'] == 'PPI':
706 721 len_aux = int(data['ele'].shape[0]/4)
707 722 mean = numpy.mean(data['ele'][len_aux:-len_aux])
708 723 if len(self.channels) !=1:
709 724 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[x], str(round(mean,1)), x) for x in self.channels]
710 725 else:
711 726 self.titles = ['PPI {} at EL: {} CH {}'.format(self.labels[0], str(round(mean,1)), self.channels[0])]
712 727 self.mode_value = round(mean,1)
713 728
714 729 if data['mode_op'] == 'PPI':
715 730 if self.map:
716 731 gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
717 732 linewidth=1, color='gray', alpha=0.5, linestyle='--')
718 733 gl.xlabel_style = {'size': 8}
719 734 gl.ylabel_style = {'size': 8}
720 735 gl.xlabels_top = False
721 736 gl.ylabels_right = False
722 737 shape_d = os.path.join(self.shapes,'Distritos/PER_adm3.shp')
723 738 shape_p = os.path.join(self.shapes,'PER_ADM2/PER_ADM2.shp')
724 739 capitales = os.path.join(self.shapes,'CAPITALES/cap_distrito.shp')
725 740 vias = os.path.join(self.shapes,'Carreteras/VIAS_NACIONAL_250000.shp')
726 741 reader_d = shpreader.BasicReader(shape_d, encoding='latin1')
727 742 reader_p = shpreader.BasicReader(shape_p, encoding='latin1')
728 743 reader_c = shpreader.BasicReader(capitales, encoding='latin1')
729 744 reader_v = shpreader.BasicReader(vias, encoding='latin1')
730 745 caps = [x for x in reader_c.records() if x.attributes['DEPARTA']=='PIURA' and x.attributes['CATEGORIA']=='CIUDAD']
731 746 districts = [x for x in reader_d.records() if x.attributes['NAME_1']=='Piura']
732 747 provs = [x for x in reader_p.records()]
733 748 vias = [x for x in reader_v.records()]
734 749
735 750 # Display limits and streets
736 751 shape_feature = ShapelyFeature([x.geometry for x in districts], ccrs.PlateCarree(), facecolor="none", edgecolor='grey', lw=0.5)
737 752 ax.add_feature(shape_feature)
738 753 shape_feature = ShapelyFeature([x.geometry for x in provs], ccrs.PlateCarree(), facecolor="none", edgecolor='white', lw=1)
739 754 ax.add_feature(shape_feature)
740 755 shape_feature = ShapelyFeature([x.geometry for x in vias], ccrs.PlateCarree(), facecolor="none", edgecolor='yellow', lw=1)
741 756 ax.add_feature(shape_feature)
742 757
743 758 for cap in caps:
744 759 if cap.attributes['NOMBRE'] in ('PIURA', 'SULLANA', 'PAITA', 'SECHURA', 'TALARA'):
745 760 ax.text(cap.attributes['X'], cap.attributes['Y'], cap.attributes['NOMBRE'], size=8, color='white', weight='bold')
746 761 elif cap.attributes['NOMBRE'] in ('NEGRITOS', 'SAN LUCAS', 'QUERECOTILLO', 'TAMBO GRANDE', 'CHULUCANAS', 'CATACAOS', 'LA UNION'):
747 762 ax.text(cap.attributes['X'], cap.attributes['Y'], cap.attributes['NOMBRE'].title(), size=7, color='white')
748 763 else:
749 764 ax.grid(color='grey', alpha=0.5, linestyle='--', linewidth=1)
750 765
751 766 if self.xrange<=10:
752 767 ranges = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
753 768 elif self.xrange<=30:
754 769 ranges = [5, 10, 15, 20, 25, 30, 35]
755 770 elif self.xrange<=60:
756 771 ranges = [10, 20, 30, 40, 50, 60]
757 772 elif self.xrange<=100:
758 773 ranges = [15, 30, 45, 60, 75, 90]
759 774
760 775 for R in ranges:
761 776 if R <= self.xrange:
762 777 circle = Circle((self.longitude, self.latitude), km2deg(R), facecolor='none',
763 778 edgecolor='skyblue', linewidth=1, alpha=0.5)
764 779 ax.add_patch(circle)
765 780 ax.text(km2deg(R)*numpy.cos(numpy.radians(45))+self.longitude,
766 781 km2deg(R)*numpy.sin(numpy.radians(45))+self.latitude,
767 782 '{}km'.format(R), color='skyblue', size=7)
768 783 elif data['mode_op'] == 'RHI':
769 784 ax.grid(color='grey', alpha=0.5, linestyle='--', linewidth=1)
@@ -1,863 +1,865
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 # SUBCHANNELS EN VEZ DE CHANNELS
7 7 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
8 8 # ACTUALIZACION DE VERSION
9 9 # HEADERS
10 10 # MODULO DE ESCRITURA
11 11 # METADATA
12 12
13 13 import os
14 14 import time
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19 from time import time
20 20 from time import sleep
21 21
22 22 import schainpy.admin
23 23 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
24 24 from schainpy.model.data.jrodata import Voltage
25 25 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
26 26
27 27 import pickle
28 28 try:
29 29 os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
30 30 import digital_rf
31 31 except:
32 32 pass
33 33
34 34
35 35 class DigitalRFReader(ProcessingUnit):
36 36 '''
37 37 classdocs
38 38 '''
39 39
40 40 def __init__(self):
41 41 '''
42 42 Constructor
43 43 '''
44 44
45 45 ProcessingUnit.__init__(self)
46 46
47 47 self.dataOut = Voltage()
48 48 self.__printInfo = True
49 49 self.__flagDiscontinuousBlock = False
50 50 self.__bufferIndex = 9999999
51 51 self.__codeType = 0
52 52 self.__ippKm = None
53 53 self.__nCode = None
54 54 self.__nBaud = None
55 55 self.__code = None
56 56 self.dtype = None
57 57 self.oldAverage = None
58 58 self.path = None
59 59
60 60 def close(self):
61 61 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
62 62 return
63 63
64 64 def __getCurrentSecond(self):
65 65
66 66 return self.__thisUnixSample / self.__sample_rate
67 67
68 68 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
69 69
70 70 def __setFileHeader(self):
71 71 '''
72 72 In this method will be initialized every parameter of dataOut object (header, no data)
73 73 '''
74 74 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
75 75 if not self.getByBlock:
76 76 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
77 77 else:
78 78 nProfiles = self.nProfileBlocks # Number of profiles in one block
79 79
80 80 try:
81 81 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 82 self.__radarControllerHeader)
83 83 except:
84 84 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
85 85 txA=0,
86 86 txB=0,
87 87 nWindows=1,
88 88 nHeights=self.__nSamples,
89 89 firstHeight=self.__firstHeigth,
90 90 deltaHeight=self.__deltaHeigth,
91 91 codeType=self.__codeType,
92 92 nCode=self.__nCode, nBaud=self.__nBaud,
93 93 code=self.__code)
94 94
95 95 try:
96 96 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
97 97 except:
98 98 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
99 99 nProfiles=nProfiles,
100 100 nChannels=len(
101 101 self.__channelList),
102 102 adcResolution=14)
103 103 self.dataOut.type = "Voltage"
104 104
105 105 self.dataOut.data = None
106 106
107 107 self.dataOut.dtype = self.dtype
108 108
109 109 # self.dataOut.nChannels = 0
110 110
111 111 # self.dataOut.nHeights = 0
112 112
113 113 self.dataOut.nProfiles = int(nProfiles)
114 114
115 115 self.dataOut.heightList = self.__firstHeigth + \
116 116 numpy.arange(self.__nSamples, dtype=numpy.float_) * \
117 117 self.__deltaHeigth
118 118
119 119 #self.dataOut.channelList = list(range(self.__num_subchannels))
120 120 self.dataOut.channelList = list(range(len(self.__channelList)))
121 121 if not self.getByBlock:
122 122
123 123 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
124 124 else:
125 125 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights*self.nProfileBlocks
126 126
127 127 # self.dataOut.channelIndexList = None
128 128
129 129 self.dataOut.flagNoData = True
130 130 if not self.getByBlock:
131 131 self.dataOut.flagDataAsBlock = False
132 132 else:
133 133 self.dataOut.flagDataAsBlock = True
134 134 # Set to TRUE if the data is discontinuous
135 135 self.dataOut.flagDiscontinuousBlock = False
136 136
137 137 self.dataOut.utctime = None
138 138
139 139 # timezone like jroheader, difference in minutes between UTC and localtime
140 140 self.dataOut.timeZone = self.__timezone / 60
141 141
142 142 self.dataOut.dstFlag = 0
143 143
144 144 self.dataOut.errorCount = 0
145 145
146 146 try:
147 147 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
148 148 'nCohInt', self.nCohInt)
149 149
150 150 # asumo que la data esta decodificada
151 151 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
152 152 'flagDecodeData', self.flagDecodeData)
153 153
154 154 # asumo que la data esta sin flip
155 155 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
156 156
157 157 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
158 158
159 159 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
160 160 except:
161 161 pass
162 162
163 163 self.dataOut.ippSeconds = ippSeconds
164 164
165 165 # Time interval between profiles
166 166 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
167 167
168 168 self.dataOut.frequency = self.__frequency
169 169
170 170 self.dataOut.realtime = self.__online
171 171
172 172 def findDatafiles(self, path, startDate=None, endDate=None):
173 173
174 174 if not os.path.isdir(path):
175 175 return []
176 176
177 177 try:
178 178 digitalReadObj = digital_rf.DigitalRFReader(
179 179 path, load_all_metadata=True)
180 180 except:
181 181 digitalReadObj = digital_rf.DigitalRFReader(path)
182 182
183 183 channelNameList = digitalReadObj.get_channels()
184 184
185 185 if not channelNameList:
186 186 return []
187 187
188 188 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
189 189
190 190 sample_rate = metadata_dict['sample_rate'][0]
191 191
192 192 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
193 193
194 194 try:
195 195 timezone = this_metadata_file['timezone'].value
196 196 except:
197 197 timezone = 0
198 198
199 199 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
200 200 channelNameList[0]) / sample_rate - timezone
201 201
202 202 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
203 203 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
204 204
205 205 if not startDate:
206 206 startDate = startDatetime.date()
207 207
208 208 if not endDate:
209 209 endDate = endDatatime.date()
210 210
211 211 dateList = []
212 212
213 213 thisDatetime = startDatetime
214 214
215 215 while(thisDatetime <= endDatatime):
216 216
217 217 thisDate = thisDatetime.date()
218 218
219 219 if thisDate < startDate:
220 220 continue
221 221
222 222 if thisDate > endDate:
223 223 break
224 224
225 225 dateList.append(thisDate)
226 226 thisDatetime += datetime.timedelta(1)
227 227
228 228 return dateList
229 229
230 230 def setup(self, path=None,
231 231 startDate=None,
232 232 endDate=None,
233 233 startTime=datetime.time(0, 0, 0),
234 234 endTime=datetime.time(23, 59, 59),
235 235 channelList=None,
236 236 nSamples=None,
237 237 online=False,
238 238 delay=60,
239 239 buffer_size=1024,
240 240 ippKm=None,
241 241 nCohInt=1,
242 242 nCode=1,
243 243 nBaud=1,
244 244 flagDecodeData=False,
245 245 code=numpy.ones((1, 1), dtype=int),
246 246 getByBlock=0,
247 247 nProfileBlocks=1,
248 248 **kwargs):
249 249 '''
250 250 In this method we should set all initial parameters.
251 251
252 252 Inputs:
253 253 path
254 254 startDate
255 255 endDate
256 256 startTime
257 257 endTime
258 258 set
259 259 expLabel
260 260 ext
261 261 online
262 262 delay
263 263 '''
264 264 self.path = path
265 265 self.nCohInt = nCohInt
266 266 self.flagDecodeData = flagDecodeData
267 267 self.i = 0
268 268
269 269 self.getByBlock = getByBlock
270 270 self.nProfileBlocks = nProfileBlocks
271 271 if online:
272 272 print('Waiting for RF data..')
273 273 sleep(40)
274 274
275 275 if not os.path.isdir(path):
276 276 raise ValueError("[Reading] Directory %s does not exist" % path)
277 277
278 278 #print("path",path)
279 279 try:
280 280 self.digitalReadObj = digital_rf.DigitalRFReader(
281 281 path, load_all_metadata=True)
282 282 except:
283 283 self.digitalReadObj = digital_rf.DigitalRFReader(path)
284 284
285 285 channelNameList = self.digitalReadObj.get_channels()
286 286
287 287 if not channelNameList:
288 288 raise ValueError("[Reading] Directory %s does not have any files" % path)
289 289
290 290 if not channelList:
291 291 channelList = list(range(len(channelNameList)))
292 292
293 293 ########## Reading metadata ######################
294 294
295 295 top_properties = self.digitalReadObj.get_properties(
296 296 channelNameList[channelList[0]])
297 297
298 298 self.__num_subchannels = top_properties['num_subchannels']
299 299 self.__sample_rate = 1.0 * \
300 300 top_properties['sample_rate_numerator'] / \
301 301 top_properties['sample_rate_denominator']
302 302 # self.__samples_per_file = top_properties['samples_per_file'][0]
303 303 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
304 304
305 305 this_metadata_file = self.digitalReadObj.get_digital_metadata(
306 306 channelNameList[channelList[0]])
307 307 metadata_bounds = this_metadata_file.get_bounds()
308 308 self.fixed_metadata_dict = this_metadata_file.read(
309 309 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
310 310
311 311 try:
312 312 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
313 313 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
314 314 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
315 315 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
316 316 except:
317 317 pass
318 318
319 319 self.__frequency = None
320 320
321 321 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
322 322
323 self.__frequency = 9.345e9
324
323 325 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
324 326
325 327 try:
326 328 nSamples = self.fixed_metadata_dict['nSamples']
327 329 except:
328 330 nSamples = None
329 331
330 332 self.__firstHeigth = 0
331 333
332 334 try:
333 335 codeType = self.__radarControllerHeader['codeType']
334 336 except:
335 337 codeType = 0
336 338
337 339 try:
338 340 if codeType:
339 341 nCode = self.__radarControllerHeader['nCode']
340 342 nBaud = self.__radarControllerHeader['nBaud']
341 343 code = self.__radarControllerHeader['code']
342 344 except:
343 345 pass
344 346
345 347 if not ippKm:
346 348 try:
347 349 # seconds to km
348 350 ippKm = self.__radarControllerHeader['ipp']
349 351 except:
350 352 ippKm = None
351 353 ####################################################
352 354 self.__ippKm = ippKm
353 355 startUTCSecond = None
354 356 endUTCSecond = None
355 357
356 358 if startDate:
357 359 startDatetime = datetime.datetime.combine(startDate, startTime)
358 360 startUTCSecond = (
359 361 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds()# + self.__timezone
360 362
361 363 if endDate:
362 364 endDatetime = datetime.datetime.combine(endDate, endTime)
363 365 endUTCSecond = (endDatetime - datetime.datetime(1970,
364 366 1, 1)).total_seconds()# + self.__timezone
365 367 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
366 368 if start_index==None or end_index==None:
367 369 print("Check error No data, start_index: ",start_index,",end_index: ",end_index)
368 370 #return 0
369 371 if not startUTCSecond:
370 372 startUTCSecond = start_index / self.__sample_rate
371 373 if start_index > startUTCSecond * self.__sample_rate:
372 374 startUTCSecond = start_index / self.__sample_rate
373 375
374 376 if not endUTCSecond:
375 377 endUTCSecond = end_index / self.__sample_rate
376 378
377 379 if end_index < endUTCSecond * self.__sample_rate:
378 380 endUTCSecond = end_index / self.__sample_rate #Check UTC and LT time
379 381
380 382 if not nSamples:
381 383 if not ippKm:
382 384 raise ValueError("[Reading] nSamples or ippKm should be defined")
383 385 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
384 386
385 387 channelBoundList = []
386 388 channelNameListFiltered = []
387 389
388 390 for thisIndexChannel in channelList:
389 391 thisChannelName = channelNameList[thisIndexChannel]
390 392 start_index, end_index = self.digitalReadObj.get_bounds(
391 393 thisChannelName)
392 394 channelBoundList.append((start_index, end_index))
393 395 channelNameListFiltered.append(thisChannelName)
394 396
395 397 self.profileIndex = 0
396 398 self.i = 0
397 399 self.__delay = delay
398 400
399 401 self.__codeType = codeType
400 402 self.__nCode = nCode
401 403 self.__nBaud = nBaud
402 404 self.__code = code
403 405
404 406 self.__datapath = path
405 407 self.__online = online
406 408 self.__channelList = channelList
407 409 self.__channelNameList = channelNameListFiltered
408 410 self.__channelBoundList = channelBoundList
409 411 self.__nSamples = nSamples
410 412 if self.getByBlock:
411 413 nSamples = nSamples*nProfileBlocks
412 414
413 415
414 416 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
415 417 self.__nChannels = len(self.__channelList)
416 418 #print("------------------------------------------")
417 419 #print("self.__samples_to_read",self.__samples_to_read)
418 420 #print("self.__nSamples",self.__nSamples)
419 421 # son iguales y el buffer_index da 0
420 422 self.__startUTCSecond = startUTCSecond
421 423 self.__endUTCSecond = endUTCSecond
422 424
423 425 self.__timeInterval = 1.0 * self.__samples_to_read / \
424 426 self.__sample_rate # Time interval
425 427
426 428 if online:
427 429 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
428 430 startUTCSecond = numpy.floor(endUTCSecond)
429 431
430 432 # por que en el otro metodo lo primero q se hace es sumar samplestoread
431 433 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
432 434
433 435 #self.__data_buffer = numpy.zeros(
434 436 # (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
435 437 print("samplestoread",self.__samples_to_read)
436 438 self.__data_buffer = numpy.zeros((int(len(channelList)), self.__samples_to_read), dtype=numpy.complex_)
437 439
438 440
439 441 self.__setFileHeader()
440 442 self.isConfig = True
441 443
442 444 print("[Reading] Digital RF Data was found from %s to %s " % (
443 445 datetime.datetime.utcfromtimestamp(
444 446 self.__startUTCSecond - self.__timezone),
445 447 datetime.datetime.utcfromtimestamp(
446 448 self.__endUTCSecond - self.__timezone)
447 449 ))
448 450
449 451 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
450 452 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)))
451 453 self.oldAverage = None
452 454 self.count = 0
453 455 self.executionTime = 0
454 456
455 457 def __reload(self):
456 458 # print
457 459 # print "%s not in range [%s, %s]" %(
458 460 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
459 461 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
460 462 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
461 463 # )
462 464 print("[Reading] reloading metadata ...")
463 465
464 466 try:
465 467 self.digitalReadObj.reload(complete_update=True)
466 468 except:
467 469 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
468 470
469 471 start_index, end_index = self.digitalReadObj.get_bounds(
470 472 self.__channelNameList[self.__channelList[0]])
471 473
472 474 if start_index > self.__startUTCSecond * self.__sample_rate:
473 475 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
474 476
475 477 if end_index > self.__endUTCSecond * self.__sample_rate:
476 478 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
477 479 print()
478 480 print("[Reading] New timerange found [%s, %s] " % (
479 481 datetime.datetime.utcfromtimestamp(
480 482 self.__startUTCSecond - self.__timezone),
481 483 datetime.datetime.utcfromtimestamp(
482 484 self.__endUTCSecond - self.__timezone)
483 485 ))
484 486
485 487 return True
486 488
487 489 return False
488 490
489 491 def timeit(self, toExecute):
490 492 t0 = time.time()
491 493 toExecute()
492 494 self.executionTime = time.time() - t0
493 495 if self.oldAverage is None:
494 496 self.oldAverage = self.executionTime
495 497 self.oldAverage = (self.executionTime + self.count *
496 498 self.oldAverage) / (self.count + 1.0)
497 499 self.count = self.count + 1.0
498 500 return
499 501
500 502 def __readNextBlock(self, seconds=30, volt_scale=1/20000.0):
501 503 '''
502 504 NOTA: APLICACION RADAR METEOROLOGICO
503 505 VALORES OBTENIDOS CON LA USRP, volt_scale = 1,conexion directa al Ch Rx.
504 506
505 507 MAXIMO
506 508 9886 -> 0.980 Voltiospp
507 509 4939 -> 0.480 Voltiospp
508 510 14825 -> 1.440 Voltiospp
509 511 18129 -> 1.940 Voltiospp
510 512 Para llevar al valor correspondiente de Voltaje, debemos dividir por 20000
511 513 y obtenemos la Amplitud correspondiente de entrada IQ.
512 514 volt_scale = (1/20000.0)
513 515 '''
514 516 # Set the next data
515 517 self.__flagDiscontinuousBlock = False
516 518 self.__thisUnixSample += self.__samples_to_read
517 519
518 520 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
519 521 print ("[Reading] There are no more data into selected time-range")
520 522 if self.__online:
521 523 sleep(3)
522 524 self.__reload()
523 525 else:
524 526 return False
525 527
526 528 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
527 529 return False
528 530 self.__thisUnixSample -= self.__samples_to_read
529 531
530 532 indexChannel = 0
531 533
532 534 dataOk = False
533 535
534 536 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
535 537 for indexSubchannel in range(self.__num_subchannels):
536 538 try:
537 539 t0 = time()
538 540 #print("thisUNixSample",self.__thisUnixSample)
539 541 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
540 542 self.__samples_to_read,
541 543 thisChannelName, sub_channel=indexSubchannel)
542 544 #print("result--------------",result)
543 545 self.executionTime = time() - t0
544 546 if self.oldAverage is None:
545 547 self.oldAverage = self.executionTime
546 548 self.oldAverage = (
547 549 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
548 550 self.count = self.count + 1.0
549 551
550 552 except IOError as e:
551 553 # read next profile
552 554 self.__flagDiscontinuousBlock = True
553 555 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
554 556 bot = 0
555 557 while(self.__flagDiscontinuousBlock):
556 558 bot +=1
557 559 self.__thisUnixSample += self.__samples_to_read
558 560 try:
559 561 result = result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,self.__samples_to_read,thisChannelName, sub_channel=indexSubchannel)
560 562 self.__flagDiscontinuousBlock=False
561 563 print("Searching.. NΒ°: ",bot,"Success",self.__thisUnixSample)
562 564 except:
563 565 print("Searching...NΒ°: ",bot,"Fail", self.__thisUnixSample)
564 566 if self.__flagDiscontinuousBlock==True:
565 567 break
566 568 else:
567 569 print("New data index found...",self.__thisUnixSample)
568 570 #break
569 571
570 572 if result.shape[0] != self.__samples_to_read:
571 573 self.__flagDiscontinuousBlock = True
572 574 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
573 575 result.shape[0],
574 576 self.__samples_to_read))
575 577 break
576 578
577 579 self.__data_buffer[indexChannel, :] = result * volt_scale
578 580 indexChannel+=1
579 581
580 582 dataOk = True
581 583
582 584 self.__utctime = self.__thisUnixSample / self.__sample_rate
583 585
584 586 if not dataOk:
585 587 return False
586 588
587 589 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
588 590 self.__samples_to_read,
589 591 self.__timeInterval))
590 592
591 593 self.__bufferIndex = 0
592 594
593 595 return True
594 596
595 597 def __isBufferEmpty(self):
596 598
597 599 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
598 600
599 601 def getData(self, seconds=30, nTries=5):
600 602 '''
601 603 This method gets the data from files and put the data into the dataOut object
602 604
603 605 In addition, increase el the buffer counter in one.
604 606
605 607 Return:
606 608 data : retorna un perfil de voltages (alturas * canales) copiados desde el
607 609 buffer. Si no hay mas archivos a leer retorna None.
608 610
609 611 Affected:
610 612 self.dataOut
611 613 self.profileIndex
612 614 self.flagDiscontinuousBlock
613 615 self.flagIsNewBlock
614 616 '''
615 617 #print("getdata")
616 618 err_counter = 0
617 619 self.dataOut.flagNoData = True
618 620
619 621
620 622 if self.__isBufferEmpty():
621 623 #print("hi")
622 624 self.__flagDiscontinuousBlock = False
623 625
624 626 while True:
625 627 if self.__readNextBlock():
626 628 break
627 629 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
628 630 raise schainpy.admin.SchainError('Error')
629 631 return
630 632
631 633 if self.__flagDiscontinuousBlock:
632 634 raise schainpy.admin.SchainError('discontinuous block found')
633 635 return
634 636
635 637 if not self.__online:
636 638 raise schainpy.admin.SchainError('Online?')
637 639 return
638 640
639 641 err_counter += 1
640 642 if err_counter > nTries:
641 643 raise schainpy.admin.SchainError('Max retrys reach')
642 644 return
643 645
644 646 print('[Reading] waiting %d seconds to read a new block' % seconds)
645 647 sleep(seconds)
646 648
647 649
648 650 if not self.getByBlock:
649 651
650 652 #print("self.__bufferIndex",self.__bufferIndex)# este valor siempre es cero aparentemente
651 653 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
652 654 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
653 655 self.dataOut.flagNoData = False
654 656 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
655 657 self.dataOut.profileIndex = self.profileIndex
656 658
657 659 self.__bufferIndex += self.__nSamples
658 660 self.profileIndex += 1
659 661
660 662 if self.profileIndex == self.dataOut.nProfiles:
661 663 self.profileIndex = 0
662 664 else:
663 665 # ojo debo anadir el readNextBLock y el __isBufferEmpty(
664 666 self.dataOut.flagNoData = False
665 667 buffer = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex + self.__samples_to_read]
666 668 buffer = buffer.reshape((self.__nChannels, self.nProfileBlocks, int(self.__samples_to_read/self.nProfileBlocks)))
667 669 self.dataOut.nProfileBlocks = self.nProfileBlocks
668 670 self.dataOut.data = buffer
669 671 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
670 672 self.profileIndex += self.__samples_to_read
671 673 self.__bufferIndex += self.__samples_to_read
672 674 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
673 675 return True
674 676
675 677
676 678 def printInfo(self):
677 679 '''
678 680 '''
679 681 if self.__printInfo == False:
680 682 return
681 683
682 684 # self.systemHeaderObj.printInfo()
683 685 # self.radarControllerHeaderObj.printInfo()
684 686
685 687 self.__printInfo = False
686 688
687 689 def printNumberOfBlock(self):
688 690 '''
689 691 '''
690 692 return
691 693 # print self.profileIndex
692 694
693 695 def run(self, **kwargs):
694 696 '''
695 697 This method will be called many times so here you should put all your code
696 698 '''
697 699
698 700 if not self.isConfig:
699 701 self.setup(**kwargs)
700 702
701 703 self.getData(seconds=self.__delay)
702 704
703 705 return
704 706
705 707 @MPDecorator
706 708 class DigitalRFWriter(Operation):
707 709 '''
708 710 classdocs
709 711 '''
710 712
711 713 def __init__(self, **kwargs):
712 714 '''
713 715 Constructor
714 716 '''
715 717 Operation.__init__(self, **kwargs)
716 718 self.metadata_dict = {}
717 719 self.dataOut = None
718 720 self.dtype = None
719 721 self.oldAverage = 0
720 722
721 723 def setHeader(self):
722 724
723 725 self.metadata_dict['frequency'] = self.dataOut.frequency
724 726 self.metadata_dict['timezone'] = self.dataOut.timeZone
725 727 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
726 728 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
727 729 self.metadata_dict['heightList'] = self.dataOut.heightList
728 730 self.metadata_dict['channelList'] = self.dataOut.channelList
729 731 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
730 732 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
731 733 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
732 734 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
733 735 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
734 736 self.metadata_dict['type'] = self.dataOut.type
735 737 self.metadata_dict['flagDataAsBlock']= getattr(
736 738 self.dataOut, 'flagDataAsBlock', None) # chequear
737 739
738 740 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
739 741 '''
740 742 In this method we should set all initial parameters.
741 743 Input:
742 744 dataOut: Input data will also be outputa data
743 745 '''
744 746 self.setHeader()
745 747 self.__ippSeconds = dataOut.ippSeconds
746 748 self.__deltaH = dataOut.getDeltaH()
747 749 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
748 750 self.__dtype = dataOut.dtype
749 751 if len(dataOut.dtype) == 2:
750 752 self.__dtype = dataOut.dtype[0]
751 753 self.__nSamples = dataOut.systemHeaderObj.nSamples
752 754 self.__nProfiles = dataOut.nProfiles
753 755
754 756 if self.dataOut.type != 'Voltage':
755 757 raise 'Digital RF cannot be used with this data type'
756 758 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
757 759 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
758 760 else:
759 761 self.arr_data = numpy.ones((self.__nSamples, len(
760 762 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
761 763
762 764 file_cadence_millisecs = 1000
763 765
764 766 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
765 767 sample_rate_numerator = int(sample_rate_fraction.numerator)
766 768 sample_rate_denominator = int(sample_rate_fraction.denominator)
767 769 start_global_index = dataOut.utctime * self.__sample_rate
768 770
769 771 uuid = 'prueba'
770 772 compression_level = 0
771 773 checksum = False
772 774 is_complex = True
773 775 num_subchannels = len(dataOut.channelList)
774 776 is_continuous = True
775 777 marching_periods = False
776 778
777 779 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
778 780 fileCadence, start_global_index,
779 781 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
780 782 is_complex, num_subchannels, is_continuous, marching_periods)
781 783 metadata_dir = os.path.join(path, 'metadata')
782 784 os.system('mkdir %s' % (metadata_dir))
783 785 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
784 786 sample_rate_numerator, sample_rate_denominator,
785 787 metadataFile)
786 788 self.isConfig = True
787 789 self.currentSample = 0
788 790 self.oldAverage = 0
789 791 self.count = 0
790 792 return
791 793
792 794 def writeMetadata(self):
793 795 start_idx = self.__sample_rate * self.dataOut.utctime
794 796
795 797 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
796 798 )
797 799 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
798 800 )
799 801 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
800 802 )
801 803 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
802 804 return
803 805
804 806 def timeit(self, toExecute):
805 807 t0 = time()
806 808 toExecute()
807 809 self.executionTime = time() - t0
808 810 if self.oldAverage is None:
809 811 self.oldAverage = self.executionTime
810 812 self.oldAverage = (self.executionTime + self.count *
811 813 self.oldAverage) / (self.count + 1.0)
812 814 self.count = self.count + 1.0
813 815 return
814 816
815 817 def writeData(self):
816 818 if self.dataOut.type != 'Voltage':
817 819 raise 'Digital RF cannot be used with this data type'
818 820 for channel in self.dataOut.channelList:
819 821 for i in range(self.dataOut.nFFTPoints):
820 822 self.arr_data[1][channel * self.dataOut.nFFTPoints +
821 823 i]['r'] = self.dataOut.data[channel][i].real
822 824 self.arr_data[1][channel * self.dataOut.nFFTPoints +
823 825 i]['i'] = self.dataOut.data[channel][i].imag
824 826 else:
825 827 for i in range(self.dataOut.systemHeaderObj.nSamples):
826 828 for channel in self.dataOut.channelList:
827 829 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
828 830 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
829 831
830 832 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
831 833 self.timeit(f)
832 834
833 835 return
834 836
835 837 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
836 838 '''
837 839 This method will be called many times so here you should put all your code
838 840 Inputs:
839 841 dataOut: object with the data
840 842 '''
841 843 # print dataOut.__dict__
842 844 self.dataOut = dataOut
843 845 if not self.isConfig:
844 846 self.setup(dataOut, path, frequency, fileCadence,
845 847 dirCadence, metadataCadence, **kwargs)
846 848 self.writeMetadata()
847 849
848 850 self.writeData()
849 851
850 852 ## self.currentSample += 1
851 853 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
852 854 # self.writeMetadata()
853 855 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
854 856
855 857 return dataOut# en la version 2.7 no aparece este return
856 858
857 859 def close(self):
858 860 print('[Writing] - Closing files ')
859 861 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
860 862 try:
861 863 self.digitalWriteObj.close()
862 864 except:
863 865 pass
@@ -1,4500 +1,4507
1 1
2 2 import os
3 3 import time
4 4 import math
5 5
6 6 import re
7 7 import datetime
8 8 import copy
9 9 import sys
10 10 import importlib
11 11 import itertools
12 12
13 13 from multiprocessing import Pool, TimeoutError
14 14 from multiprocessing.pool import ThreadPool
15 15 import numpy
16 16 import glob
17 17 import scipy
18 18 import h5py
19 19 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
20 20 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
21 21 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
22 22 from scipy import asarray as ar,exp
23 23 from scipy.optimize import curve_fit
24 24 from schainpy.utils import log
25 25 import schainpy.admin
26 26 import warnings
27 27 from scipy import optimize, interpolate, signal, stats, ndimage
28 28 from scipy.optimize.optimize import OptimizeWarning
29 29 warnings.filterwarnings('ignore')
30 30
31
32 31 SPEED_OF_LIGHT = 299792458
33 32
34 33 '''solving pickling issue'''
35 34
36 35 def _pickle_method(method):
37 36 func_name = method.__func__.__name__
38 37 obj = method.__self__
39 38 cls = method.__self__.__class__
40 39 return _unpickle_method, (func_name, obj, cls)
41 40
42 41 def _unpickle_method(func_name, obj, cls):
43 42 for cls in cls.mro():
44 43 try:
45 44 func = cls.__dict__[func_name]
46 45 except KeyError:
47 46 pass
48 47 else:
49 48 break
50 49 return func.__get__(obj, cls)
51 50
52 51 def isNumber(str):
53 52 try:
54 53 float(str)
55 54 return True
56 55 except:
57 56 return False
58 57
59 58 class ParametersProc(ProcessingUnit):
60 59
61 60 METHODS = {}
62 61 nSeconds = None
63 62
64 63 def __init__(self):
65 64 ProcessingUnit.__init__(self)
66 65
67 66 # self.objectDict = {}
68 67 self.buffer = None
69 68 self.firstdatatime = None
70 69 self.profIndex = 0
71 70 self.dataOut = Parameters()
72 71 self.setupReq = False #Agregar a todas las unidades de proc
73 72
74 73 def __updateObjFromInput(self):
75 74
76 75 self.dataOut.inputUnit = self.dataIn.type
77 76
78 77 self.dataOut.timeZone = self.dataIn.timeZone
79 78 self.dataOut.dstFlag = self.dataIn.dstFlag
80 79 self.dataOut.errorCount = self.dataIn.errorCount
81 80 self.dataOut.useLocalTime = self.dataIn.useLocalTime
82 81
83 82 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
84 83 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
85 84 self.dataOut.channelList = self.dataIn.channelList
86 85 self.dataOut.heightList = self.dataIn.heightList
87 86 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
88 87 # self.dataOut.nHeights = self.dataIn.nHeights
89 88 # self.dataOut.nChannels = self.dataIn.nChannels
90 89 # self.dataOut.nBaud = self.dataIn.nBaud
91 90 # self.dataOut.nCode = self.dataIn.nCode
92 91 # self.dataOut.code = self.dataIn.code
93 92 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
94 93 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
95 94 # self.dataOut.utctime = self.firstdatatime
96 95 self.dataOut.utctime = self.dataIn.utctime
97 96 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
98 97 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
99 98 self.dataOut.nCohInt = self.dataIn.nCohInt
100 99 # self.dataOut.nIncohInt = 1
101 100 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
102 101 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
103 102 self.dataOut.timeInterval1 = self.dataIn.timeInterval
104 103 self.dataOut.heightList = self.dataIn.heightList
105 104 self.dataOut.frequency = self.dataIn.frequency
106 105 # self.dataOut.noise = self.dataIn.noise
107 106 self.dataOut.runNextUnit = self.dataIn.runNextUnit
108 107 self.dataOut.h0 = self.dataIn.h0
109 108
110 109 def run(self, runNextUnit = 0):
111 110
112 111 self.dataIn.runNextUnit = runNextUnit
113 112 #print("HOLA MUNDO SOY YO")
114 113 #---------------------- Voltage Data ---------------------------
115 114
116 115 if self.dataIn.type == "Voltage":
117 116
118 117 self.__updateObjFromInput()
119 118 self.dataOut.data_pre = self.dataIn.data.copy()
120 119 self.dataOut.flagNoData = False
121 120 self.dataOut.utctimeInit = self.dataIn.utctime
122 121 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
123 122
124 123 if hasattr(self.dataIn, 'flagDataAsBlock'):
125 124 self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock
126 125
127 126 if hasattr(self.dataIn, 'profileIndex'):
128 127 self.dataOut.profileIndex = self.dataIn.profileIndex
129 128
130 129 if hasattr(self.dataIn, 'dataPP_POW'):
131 130 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
132 131
133 132 if hasattr(self.dataIn, 'dataPP_POWER'):
134 133 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
135 134
136 135 if hasattr(self.dataIn, 'dataPP_DOP'):
137 136 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
138 137
139 138 if hasattr(self.dataIn, 'dataPP_SNR'):
140 139 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
141 140
142 141 if hasattr(self.dataIn, 'dataPP_WIDTH'):
143 142 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
144 143
145 144 if hasattr(self.dataIn, 'dataPP_CCF'):
146 145 self.dataOut.dataPP_CCF = self.dataIn.dataPP_CCF
147 146
148 147 if hasattr(self.dataIn, 'dataPP_NOISE'):
149 148 self.dataOut.dataPP_NOISE = self.dataIn.dataPP_NOISE
150 149
151 150 if hasattr(self.dataIn, 'flagAskMode'):
152 151 self.dataOut.flagAskMode = self.dataIn.flagAskMode
153 152
154 153 return
155 154
156 155 #---------------------- Spectra Data ---------------------------
157 156
158 157 if self.dataIn.type == "Spectra":
159 158 #print("que paso en spectra")
160 159 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
161 160 self.dataOut.data_spc = self.dataIn.data_spc
162 161 self.dataOut.data_cspc = self.dataIn.data_cspc
163 162 self.dataOut.nProfiles = self.dataIn.nProfiles
164 163 self.dataOut.nIncohInt = self.dataIn.nIncohInt
165 164 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
166 165 self.dataOut.ippFactor = self.dataIn.ippFactor
167 166 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
168 167 self.dataOut.spc_noise = self.dataIn.getNoise()
169 168 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
170 169 # self.dataOut.normFactor = self.dataIn.normFactor
171 170 self.dataOut.pairsList = self.dataIn.pairsList
172 171 self.dataOut.groupList = self.dataIn.pairsList
173 172 self.dataOut.flagNoData = False
174 173
175 174 if hasattr(self.dataIn, 'flagDataAsBlock'):
176 175 self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock
177 176
178 177 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
179 178 self.dataOut.ChanDist = self.dataIn.ChanDist
180 179 else: self.dataOut.ChanDist = None
181 180
182 181 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
183 182 # self.dataOut.VelRange = self.dataIn.VelRange
184 183 #else: self.dataOut.VelRange = None
185 184
186 185 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
187 186 self.dataOut.RadarConst = self.dataIn.RadarConst
188 187
189 188 if hasattr(self.dataIn, 'NPW'): #NPW
190 189 self.dataOut.NPW = self.dataIn.NPW
191 190
192 191 if hasattr(self.dataIn, 'COFA'): #COFA
193 192 self.dataOut.COFA = self.dataIn.COFA
194 193
195 194
196 195
197 196 #---------------------- Correlation Data ---------------------------
198 197
199 198 if self.dataIn.type == "Correlation":
200 199 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
201 200
202 201 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
203 202 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
204 203 self.dataOut.groupList = (acf_pairs, ccf_pairs)
205 204
206 205 self.dataOut.abscissaList = self.dataIn.lagRange
207 206 self.dataOut.noise = self.dataIn.noise
208 207 self.dataOut.data_snr = self.dataIn.SNR
209 208 self.dataOut.flagNoData = False
210 209 self.dataOut.nAvg = self.dataIn.nAvg
211 210
212 211 #---------------------- Parameters Data ---------------------------
213 212
214 213 if self.dataIn.type == "Parameters":
215 214 self.dataOut.copy(self.dataIn)
216 215 self.dataOut.flagNoData = False
217 216 #print("yo si entre")
218 217
219 218 return True
220 219
221 220 self.__updateObjFromInput()
222 221 #print("yo si entre2")
223 222
224 223 self.dataOut.utctimeInit = self.dataIn.utctime
225 224 self.dataOut.paramInterval = self.dataIn.timeInterval
226 225 #print("soy spectra ",self.dataOut.utctimeInit)
227 226 return
228 227
229 228
230 229 def target(tups):
231 230
232 231 obj, args = tups
233 232
234 233 return obj.FitGau(args)
235 234
236 235 class RemoveWideGC(Operation):
237 236 ''' This class remove the wide clutter and replace it with a simple interpolation points
238 237 This mainly applies to CLAIRE radar
239 238
240 239 ClutterWidth : Width to look for the clutter peak
241 240
242 241 Input:
243 242
244 243 self.dataOut.data_pre : SPC and CSPC
245 244 self.dataOut.spc_range : To select wind and rainfall velocities
246 245
247 246 Affected:
248 247
249 248 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
250 249
251 250 Written by D. ScipiΓ³n 25.02.2021
252 251 '''
253 252 def __init__(self):
254 253 Operation.__init__(self)
255 254 self.i = 0
256 255 self.ich = 0
257 256 self.ir = 0
258 257
259 258 def run(self, dataOut, ClutterWidth=2.5):
260 259 # print ('Entering RemoveWideGC ... ')
261 260
262 261 self.spc = dataOut.data_pre[0].copy()
263 262 self.spc_out = dataOut.data_pre[0].copy()
264 263 self.Num_Chn = self.spc.shape[0]
265 264 self.Num_Hei = self.spc.shape[2]
266 265 VelRange = dataOut.spc_range[2][:-1]
267 266 dv = VelRange[1]-VelRange[0]
268 267
269 268 # Find the velocities that corresponds to zero
270 269 gc_values = numpy.squeeze(numpy.where(numpy.abs(VelRange) <= ClutterWidth))
271 270
272 271 # Removing novalid data from the spectra
273 272 for ich in range(self.Num_Chn) :
274 273 for ir in range(self.Num_Hei) :
275 274 # Estimate the noise at each range
276 275 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
277 276
278 277 # Removing the noise floor at each range
279 278 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
280 279 self.spc[ich,novalid,ir] = HSn
281 280
282 281 junk = numpy.append(numpy.insert(numpy.squeeze(self.spc[ich,gc_values,ir]),0,HSn),HSn)
283 282 j1index = numpy.squeeze(numpy.where(numpy.diff(junk)>0))
284 283 j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
285 284 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
286 285 continue
287 286 junk3 = numpy.squeeze(numpy.diff(j1index))
288 287 junk4 = numpy.squeeze(numpy.diff(j2index))
289 288
290 289 valleyindex = j2index[numpy.where(junk4>1)]
291 290 peakindex = j1index[numpy.where(junk3>1)]
292 291
293 292 isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
294 293 if numpy.size(isvalid) == 0 :
295 294 continue
296 295 if numpy.size(isvalid) >1 :
297 296 vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
298 297 isvalid = isvalid[vindex]
299 298
300 299 # clutter peak
301 300 gcpeak = peakindex[isvalid]
302 301 vl = numpy.where(valleyindex < gcpeak)
303 302 if numpy.size(vl) == 0:
304 303 continue
305 304 gcvl = valleyindex[vl[0][-1]]
306 305 vr = numpy.where(valleyindex > gcpeak)
307 306 if numpy.size(vr) == 0:
308 307 continue
309 308 gcvr = valleyindex[vr[0][0]]
310 309
311 310 # Removing the clutter
312 311 interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
313 312 gcindex = gc_values[gcvl+1:gcvr-1]
314 313 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
315 314
316 315 dataOut.data_pre[0] = self.spc_out
317 316 #print ('Leaving RemoveWideGC ... ')
318 317 return dataOut
319 318
320 319 class SpectralFilters(Operation):
321 320 ''' This class allows to replace the novalid values with noise for each channel
322 321 This applies to CLAIRE RADAR
323 322
324 323 PositiveLimit : RightLimit of novalid data
325 324 NegativeLimit : LeftLimit of novalid data
326 325
327 326 Input:
328 327
329 328 self.dataOut.data_pre : SPC and CSPC
330 329 self.dataOut.spc_range : To select wind and rainfall velocities
331 330
332 331 Affected:
333 332
334 333 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
335 334
336 335 Written by D. ScipiΓ³n 29.01.2021
337 336 '''
338 337 def __init__(self):
339 338 Operation.__init__(self)
340 339 self.i = 0
341 340
342 341 def run(self, dataOut, ):
343 342
344 343 self.spc = dataOut.data_pre[0].copy()
345 344 self.Num_Chn = self.spc.shape[0]
346 345 VelRange = dataOut.spc_range[2]
347 346
348 347 # novalid corresponds to data within the Negative and PositiveLimit
349 348
350 349
351 350 # Removing novalid data from the spectra
352 351 for i in range(self.Num_Chn):
353 352 self.spc[i,novalid,:] = dataOut.noise[i]
354 353 dataOut.data_pre[0] = self.spc
355 354 return dataOut
356 355
357 356 class GaussianFit(Operation):
358 357
359 358 '''
360 359 Function that fit of one and two generalized gaussians (gg) based
361 360 on the PSD shape across an "power band" identified from a cumsum of
362 361 the measured spectrum - noise.
363 362
364 363 Input:
365 364 self.dataOut.data_pre : SelfSpectra
366 365
367 366 Output:
368 367 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
369 368
370 369 '''
371 370 def __init__(self):
372 371 Operation.__init__(self)
373 372 self.i=0
374 373
375 374
376 375 # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
377 376 def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
378 377 """This routine will find a couple of generalized Gaussians to a power spectrum
379 378 methods: generalized, squared
380 379 input: spc
381 380 output:
382 381 noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
383 382 """
384 383 print ('Entering ',method,' double Gaussian fit')
385 384 self.spc = dataOut.data_pre[0].copy()
386 385 self.Num_Hei = self.spc.shape[2]
387 386 self.Num_Bin = self.spc.shape[1]
388 387 self.Num_Chn = self.spc.shape[0]
389 388
390 389 start_time = time.time()
391 390
392 391 pool = Pool(processes=self.Num_Chn)
393 392 args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
394 393 objs = [self for __ in range(self.Num_Chn)]
395 394 attrs = list(zip(objs, args))
396 395 DGauFitParam = pool.map(target, attrs)
397 396 # Parameters:
398 397 # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
399 398 dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
400 399
401 400 # Double Gaussian Curves
402 401 gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
403 402 gau0[:] = numpy.NaN
404 403 gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
405 404 gau1[:] = numpy.NaN
406 405 x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
407 406 for iCh in range(self.Num_Chn):
408 407 N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
409 408 N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
410 409 A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
411 410 A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
412 411 v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
413 412 v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
414 413 s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
415 414 s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
416 415 if method == 'genealized':
417 416 p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
418 417 p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
419 418 elif method == 'squared':
420 419 p0 = 2.
421 420 p1 = 2.
422 421 gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
423 422 gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
424 423 dataOut.GaussFit0 = gau0
425 424 dataOut.GaussFit1 = gau1
426 425
427 426 print('Leaving ',method ,' double Gaussian fit')
428 427 return dataOut
429 428
430 429 def FitGau(self, X):
431 430 # print('Entering FitGau')
432 431 # Assigning the variables
433 432 Vrange, ch, wnoise, num_intg, SNRlimit = X
434 433 # Noise Limits
435 434 noisebl = wnoise * 0.9
436 435 noisebh = wnoise * 1.1
437 436 # Radar Velocity
438 437 Va = max(Vrange)
439 438 deltav = Vrange[1] - Vrange[0]
440 439 x = numpy.arange(self.Num_Bin)
441 440
442 441 # print ('stop 0')
443 442
444 443 # 5 parameters, 2 Gaussians
445 444 DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
446 445 DGauFitParam[:] = numpy.NaN
447 446
448 447 # SPCparam = []
449 448 # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
450 449 # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
451 450 # SPC_ch1[:] = 0 #numpy.NaN
452 451 # SPC_ch2[:] = 0 #numpy.NaN
453 452 # print ('stop 1')
454 453 for ht in range(self.Num_Hei):
455 454 # print (ht)
456 455 # print ('stop 2')
457 456 # Spectra at each range
458 457 spc = numpy.asarray(self.spc)[ch,:,ht]
459 458 snr = ( spc.mean() - wnoise ) / wnoise
460 459 snrdB = 10.*numpy.log10(snr)
461 460
462 461 #print ('stop 3')
463 462 if snrdB < SNRlimit :
464 463 # snr = numpy.NaN
465 464 # SPC_ch1[:,ht] = 0#numpy.NaN
466 465 # SPC_ch1[:,ht] = 0#numpy.NaN
467 466 # SPCparam = (SPC_ch1,SPC_ch2)
468 467 # print ('SNR less than SNRth')
469 468 continue
470 469 # wnoise = hildebrand_sekhon(spc,num_intg)
471 470 # print ('stop 2.01')
472 471 #############################################
473 472 # normalizing spc and noise
474 473 # This part differs from gg1
475 474 # spc_norm_max = max(spc) #commented by D. ScipiΓ³n 19.03.2021
476 475 #spc = spc / spc_norm_max
477 476 # pnoise = pnoise #/ spc_norm_max #commented by D. ScipiΓ³n 19.03.2021
478 477 #############################################
479 478
480 479 # print ('stop 2.1')
481 480 fatspectra=1.0
482 481 # noise per channel.... we might want to use the noise at each range
483 482
484 483 # wnoise = noise_ #/ spc_norm_max #commented by D. ScipiΓ³n 19.03.2021
485 484 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
486 485 #if wnoise>1.1*pnoise: # to be tested later
487 486 # wnoise=pnoise
488 487 # noisebl = wnoise*0.9
489 488 # noisebh = wnoise*1.1
490 489 spc = spc - wnoise # signal
491 490
492 491 # print ('stop 2.2')
493 492 minx = numpy.argmin(spc)
494 493 #spcs=spc.copy()
495 494 spcs = numpy.roll(spc,-minx)
496 495 cum = numpy.cumsum(spcs)
497 496 # tot_noise = wnoise * self.Num_Bin #64;
498 497
499 498 # print ('stop 2.3')
500 499 # snr = sum(spcs) / tot_noise
501 500 # snrdB = 10.*numpy.log10(snr)
502 501 #print ('stop 3')
503 502 # if snrdB < SNRlimit :
504 503 # snr = numpy.NaN
505 504 # SPC_ch1[:,ht] = 0#numpy.NaN
506 505 # SPC_ch1[:,ht] = 0#numpy.NaN
507 506 # SPCparam = (SPC_ch1,SPC_ch2)
508 507 # print ('SNR less than SNRth')
509 508 # continue
510 509
511 510
512 511 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
513 512 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
514 513 # print ('stop 4')
515 514 cummax = max(cum)
516 515 epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
517 516 cumlo = cummax * epsi
518 517 cumhi = cummax * (1-epsi)
519 518 powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
520 519
521 520 # print ('stop 5')
522 521 if len(powerindex) < 1:# case for powerindex 0
523 522 # print ('powerindex < 1')
524 523 continue
525 524 powerlo = powerindex[0]
526 525 powerhi = powerindex[-1]
527 526 powerwidth = powerhi-powerlo
528 527 if powerwidth <= 1:
529 528 # print('powerwidth <= 1')
530 529 continue
531 530
532 531 # print ('stop 6')
533 532 firstpeak = powerlo + powerwidth/10.# first gaussian energy location
534 533 secondpeak = powerhi - powerwidth/10. #second gaussian energy location
535 534 midpeak = (firstpeak + secondpeak)/2.
536 535 firstamp = spcs[int(firstpeak)]
537 536 secondamp = spcs[int(secondpeak)]
538 537 midamp = spcs[int(midpeak)]
539 538
540 539 y_data = spc + wnoise
541 540
542 541 ''' single Gaussian '''
543 542 shift0 = numpy.mod(midpeak+minx, self.Num_Bin )
544 543 width0 = powerwidth/4.#Initialization entire power of spectrum divided by 4
545 544 power0 = 2.
546 545 amplitude0 = midamp
547 546 state0 = [shift0,width0,amplitude0,power0,wnoise]
548 547 bnds = ((0,self.Num_Bin-1),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
549 548 lsq1 = fmin_l_bfgs_b(self.misfit1, state0, args=(y_data,x,num_intg), bounds=bnds, approx_grad=True)
550 549 # print ('stop 7.1')
551 550 # print (bnds)
552 551
553 552 chiSq1=lsq1[1]
554 553
555 554 # print ('stop 8')
556 555 if fatspectra<1.0 and powerwidth<4:
557 556 choice=0
558 557 Amplitude0=lsq1[0][2]
559 558 shift0=lsq1[0][0]
560 559 width0=lsq1[0][1]
561 560 p0=lsq1[0][3]
562 561 Amplitude1=0.
563 562 shift1=0.
564 563 width1=0.
565 564 p1=0.
566 565 noise=lsq1[0][4]
567 566 #return (numpy.array([shift0,width0,Amplitude0,p0]),
568 567 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
569 568
570 569 # print ('stop 9')
571 570 ''' two Gaussians '''
572 571 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
573 572 shift0 = numpy.mod(firstpeak+minx, self.Num_Bin )
574 573 shift1 = numpy.mod(secondpeak+minx, self.Num_Bin )
575 574 width0 = powerwidth/6.
576 575 width1 = width0
577 576 power0 = 2.
578 577 power1 = power0
579 578 amplitude0 = firstamp
580 579 amplitude1 = secondamp
581 580 state0 = [shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
582 581 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
583 582 bnds=((0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
584 583 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
585 584
586 585 # print ('stop 10')
587 586 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
588 587
589 588 # print ('stop 11')
590 589 chiSq2 = lsq2[1]
591 590
592 591 # print ('stop 12')
593 592
594 593 oneG = (chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
595 594
596 595 # print ('stop 13')
597 596 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
598 597 if oneG:
599 598 choice = 0
600 599 else:
601 600 w1 = lsq2[0][1]; w2 = lsq2[0][5]
602 601 a1 = lsq2[0][2]; a2 = lsq2[0][6]
603 602 p1 = lsq2[0][3]; p2 = lsq2[0][7]
604 603 s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
605 604 s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
606 605 gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
607 606
608 607 if gp1>gp2:
609 608 if a1>0.7*a2:
610 609 choice = 1
611 610 else:
612 611 choice = 2
613 612 elif gp2>gp1:
614 613 if a2>0.7*a1:
615 614 choice = 2
616 615 else:
617 616 choice = 1
618 617 else:
619 618 choice = numpy.argmax([a1,a2])+1
620 619 #else:
621 620 #choice=argmin([std2a,std2b])+1
622 621
623 622 else: # with low SNR go to the most energetic peak
624 623 choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
625 624
626 625 # print ('stop 14')
627 626 shift0 = lsq2[0][0]
628 627 vel0 = Vrange[0] + shift0 * deltav
629 628 shift1 = lsq2[0][4]
630 629 # vel1=Vrange[0] + shift1 * deltav
631 630
632 631 # max_vel = 1.0
633 632 # Va = max(Vrange)
634 633 # deltav = Vrange[1]-Vrange[0]
635 634 # print ('stop 15')
636 635 #first peak will be 0, second peak will be 1
637 636 # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.ScipiΓ³n 19.03.2021
638 637 if vel0 > -Va and vel0 < Va : #first peak is in the correct range
639 638 shift0 = lsq2[0][0]
640 639 width0 = lsq2[0][1]
641 640 Amplitude0 = lsq2[0][2]
642 641 p0 = lsq2[0][3]
643 642
644 643 shift1 = lsq2[0][4]
645 644 width1 = lsq2[0][5]
646 645 Amplitude1 = lsq2[0][6]
647 646 p1 = lsq2[0][7]
648 647 noise = lsq2[0][8]
649 648 else:
650 649 shift1 = lsq2[0][0]
651 650 width1 = lsq2[0][1]
652 651 Amplitude1 = lsq2[0][2]
653 652 p1 = lsq2[0][3]
654 653
655 654 shift0 = lsq2[0][4]
656 655 width0 = lsq2[0][5]
657 656 Amplitude0 = lsq2[0][6]
658 657 p0 = lsq2[0][7]
659 658 noise = lsq2[0][8]
660 659
661 660 if Amplitude0<0.05: # in case the peak is noise
662 661 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
663 662 if Amplitude1<0.05:
664 663 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
665 664
666 665 # print ('stop 16 ')
667 666 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
668 667 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
669 668 # SPCparam = (SPC_ch1,SPC_ch2)
670 669
671 670 DGauFitParam[0,ht,0] = noise
672 671 DGauFitParam[0,ht,1] = noise
673 672 DGauFitParam[1,ht,0] = Amplitude0
674 673 DGauFitParam[1,ht,1] = Amplitude1
675 674 DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
676 675 DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
677 676 DGauFitParam[3,ht,0] = width0 * deltav
678 677 DGauFitParam[3,ht,1] = width1 * deltav
679 678 DGauFitParam[4,ht,0] = p0
680 679 DGauFitParam[4,ht,1] = p1
681 680
682 681 # print (DGauFitParam.shape)
683 682 # print ('Leaving FitGau')
684 683 return DGauFitParam
685 684 # return SPCparam
686 685 # return GauSPC
687 686
688 687 def y_model1(self,x,state):
689 688 shift0, width0, amplitude0, power0, noise = state
690 689 model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
691 690 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
692 691 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
693 692 return model0 + model0u + model0d + noise
694 693
695 694 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
696 695 shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
697 696 model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
698 697 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
699 698 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
700 699
701 700 model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
702 701 model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
703 702 model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
704 703 return model0 + model0u + model0d + model1 + model1u + model1d + noise
705 704
706 705 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
707 706
708 707 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
709 708
710 709 def misfit2(self,state,y_data,x,num_intg):
711 710 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
712 711
713 712
714 713
715 714 class PrecipitationProc(Operation):
716 715
717 716 '''
718 717 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
719 718
720 719 Input:
721 720 self.dataOut.data_pre : SelfSpectra
722 721
723 722 Output:
724 723
725 724 self.dataOut.data_output : Reflectivity factor, rainfall Rate
726 725
727 726
728 727 Parameters affected:
729 728 '''
730 729
731 730 def __init__(self):
732 731 Operation.__init__(self)
733 732 self.i=0
734 733
735 734 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
736 735 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
737 736
738 737 # print ('Entering PrecepitationProc ... ')
739 738
740 739 if radar == "MIRA35C" :
741 740
742 741 self.spc = dataOut.data_pre[0].copy()
743 742 self.Num_Hei = self.spc.shape[2]
744 743 self.Num_Bin = self.spc.shape[1]
745 744 self.Num_Chn = self.spc.shape[0]
746 745 Ze = self.dBZeMODE2(dataOut)
747 746
748 747 else:
749 748
750 749 self.spc = dataOut.data_pre[0].copy()
751 750
752 751 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
753 752 self.spc[:,:,0:7]= numpy.NaN
754 753
755 754 self.Num_Hei = self.spc.shape[2]
756 755 self.Num_Bin = self.spc.shape[1]
757 756 self.Num_Chn = self.spc.shape[0]
758 757
759 758 VelRange = dataOut.spc_range[2]
760 759
761 760 ''' Se obtiene la constante del RADAR '''
762 761
763 762 self.Pt = Pt
764 763 self.Gt = Gt
765 764 self.Gr = Gr
766 765 self.Lambda = Lambda
767 766 self.aL = aL
768 767 self.tauW = tauW
769 768 self.ThetaT = ThetaT
770 769 self.ThetaR = ThetaR
771 770 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
772 771 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
773 772 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
774 773
775 774 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
776 775 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
777 776 RadarConstant = 10e-26 * Numerator / Denominator #
778 777 ExpConstant = 10**(40/10) #Constante Experimental
779 778
780 779 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
781 780 for i in range(self.Num_Chn):
782 781 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
783 782 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
784 783
785 784 SPCmean = numpy.mean(SignalPower, 0)
786 785 Pr = SPCmean[:,:]/dataOut.normFactor
787 786
788 787 # Declaring auxiliary variables
789 788 Range = dataOut.heightList*1000. #Range in m
790 789 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
791 790 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
792 791 zMtrx = rMtrx+Altitude
793 792 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
794 793 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
795 794
796 795 # height dependence to air density Foote and Du Toit (1969)
797 796 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
798 797 VMtrx = VelMtrx / delv_z #Normalized velocity
799 798 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
800 799 # Diameter is related to the fall speed of falling drops
801 800 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
802 801 # Only valid for D>= 0.16 mm
803 802 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
804 803
805 804 #Calculate Radar Reflectivity ETAn
806 805 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
807 806 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
808 807 # Radar Cross Section
809 808 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
810 809 # Drop Size Distribution
811 810 DSD = ETAn / sigmaD
812 811 # Equivalente Reflectivy
813 812 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
814 813 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
815 814 # RainFall Rate
816 815 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
817 816
818 817 # Censoring the data
819 818 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
820 819 SNRth = 10**(SNRdBlimit/10) #-30dB
821 820 novalid = numpy.where((dataOut.data_snr[0,:] <SNRth) | (dataOut.data_snr[1,:] <SNRth) | (dataOut.data_snr[2,:] <SNRth)) # AND condition. Maybe OR condition better
822 821 W = numpy.nanmean(dataOut.data_dop,0)
823 822 W[novalid] = numpy.NaN
824 823 Ze_org[novalid] = numpy.NaN
825 824 RR[novalid] = numpy.NaN
826 825
827 826 dataOut.data_output = RR[8]
828 827 dataOut.data_param = numpy.ones([3,self.Num_Hei])
829 828 dataOut.channelList = [0,1,2]
830 829
831 830 dataOut.data_param[0]=10*numpy.log10(Ze_org)
832 831 dataOut.data_param[1]=-W
833 832 dataOut.data_param[2]=RR
834 833
835 834 # print ('Leaving PrecepitationProc ... ')
836 835 return dataOut
837 836
838 837 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
839 838
840 839 NPW = dataOut.NPW
841 840 COFA = dataOut.COFA
842 841
843 842 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
844 843 RadarConst = dataOut.RadarConst
845 844 #frequency = 34.85*10**9
846 845
847 846 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
848 847 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
849 848
850 849 ETA = numpy.sum(SNR,1)
851 850
852 851 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
853 852
854 853 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
855 854
856 855 for r in range(self.Num_Hei):
857 856
858 857 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
859 858 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
860 859
861 860 return Ze
862 861
863 862 # def GetRadarConstant(self):
864 863 #
865 864 # """
866 865 # Constants:
867 866 #
868 867 # Pt: Transmission Power dB 5kW 5000
869 868 # Gt: Transmission Gain dB 24.7 dB 295.1209
870 869 # Gr: Reception Gain dB 18.5 dB 70.7945
871 870 # Lambda: Wavelenght m 0.6741 m 0.6741
872 871 # aL: Attenuation loses dB 4dB 2.5118
873 872 # tauW: Width of transmission pulse s 4us 4e-6
874 873 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
875 874 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
876 875 #
877 876 # """
878 877 #
879 878 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
880 879 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
881 880 # RadarConstant = Numerator / Denominator
882 881 #
883 882 # return RadarConstant
884 883
885 884
886 885
887 886 class FullSpectralAnalysis(Operation):
888 887
889 888 """
890 889 Function that implements Full Spectral Analysis technique.
891 890
892 891 Input:
893 892 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
894 893 self.dataOut.groupList : Pairlist of channels
895 894 self.dataOut.ChanDist : Physical distance between receivers
896 895
897 896
898 897 Output:
899 898
900 899 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
901 900
902 901
903 902 Parameters affected: Winds, height range, SNR
904 903
905 904 """
906 905 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
907 906 minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
908 907
909 908 spc = dataOut.data_pre[0].copy()
910 909 cspc = dataOut.data_pre[1]
911 910 nHeights = spc.shape[2]
912 911
913 912 # first_height = 0.75 #km (ref: data header 20170822)
914 913 # resolution_height = 0.075 #km
915 914 '''
916 915 finding height range. check this when radar parameters are changed!
917 916 '''
918 917 if maxheight is not None:
919 918 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
920 919 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
921 920 else:
922 921 range_max = nHeights
923 922 if minheight is not None:
924 923 # range_min = int((minheight - first_height) / resolution_height) # theoretical
925 924 range_min = int(13.26 * minheight - 5) # empirical, works better
926 925 if range_min < 0:
927 926 range_min = 0
928 927 else:
929 928 range_min = 0
930 929
931 930 pairsList = dataOut.groupList
932 931 if dataOut.ChanDist is not None :
933 932 ChanDist = dataOut.ChanDist
934 933 else:
935 934 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
936 935
937 936 # 4 variables: zonal, meridional, vertical, and average SNR
938 937 data_param = numpy.zeros([4,nHeights]) * numpy.NaN
939 938 velocityX = numpy.zeros([nHeights]) * numpy.NaN
940 939 velocityY = numpy.zeros([nHeights]) * numpy.NaN
941 940 velocityZ = numpy.zeros([nHeights]) * numpy.NaN
942 941
943 942 dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
944 943
945 944 '''***********************************************WIND ESTIMATION**************************************'''
946 945 for Height in range(nHeights):
947 946
948 947 if Height >= range_min and Height < range_max:
949 948 # error_code will be useful in future analysis
950 949 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
951 950 ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
952 951
953 952 if abs(Vzon) < 100. and abs(Vmer) < 100.:
954 953 velocityX[Height] = Vzon
955 954 velocityY[Height] = -Vmer
956 955 velocityZ[Height] = Vver
957 956
958 957 # Censoring data with SNR threshold
959 958 dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
960 959
961 960 data_param[0] = velocityX
962 961 data_param[1] = velocityY
963 962 data_param[2] = velocityZ
964 963 data_param[3] = dbSNR
965 964 dataOut.data_param = data_param
966 965 return dataOut
967 966
968 967 def moving_average(self,x, N=2):
969 968 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
970 969 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
971 970
972 971 def gaus(self,xSamples,Amp,Mu,Sigma):
973 972 return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
974 973
975 974 def Moments(self, ySamples, xSamples):
976 975 Power = numpy.nanmean(ySamples) # Power, 0th Moment
977 976 yNorm = ySamples / numpy.nansum(ySamples)
978 977 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
979 978 Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
980 979 StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
981 980 return numpy.array([Power,RadVel,StdDev])
982 981
983 982 def StopWindEstimation(self, error_code):
984 983 Vzon = numpy.NaN
985 984 Vmer = numpy.NaN
986 985 Vver = numpy.NaN
987 986 return Vzon, Vmer, Vver, error_code
988 987
989 988 def AntiAliasing(self, interval, maxstep):
990 989 """
991 990 function to prevent errors from aliased values when computing phaseslope
992 991 """
993 992 antialiased = numpy.zeros(len(interval))
994 993 copyinterval = interval.copy()
995 994
996 995 antialiased[0] = copyinterval[0]
997 996
998 997 for i in range(1,len(antialiased)):
999 998 step = interval[i] - interval[i-1]
1000 999 if step > maxstep:
1001 1000 copyinterval -= 2*numpy.pi
1002 1001 antialiased[i] = copyinterval[i]
1003 1002 elif step < maxstep*(-1):
1004 1003 copyinterval += 2*numpy.pi
1005 1004 antialiased[i] = copyinterval[i]
1006 1005 else:
1007 1006 antialiased[i] = copyinterval[i].copy()
1008 1007
1009 1008 return antialiased
1010 1009
1011 1010 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
1012 1011 """
1013 1012 Function that Calculates Zonal, Meridional and Vertical wind velocities.
1014 1013 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
1015 1014
1016 1015 Input:
1017 1016 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
1018 1017 pairsList : Pairlist of channels
1019 1018 ChanDist : array of xi_ij and eta_ij
1020 1019 Height : height at which data is processed
1021 1020 noise : noise in [channels] format for specific height
1022 1021 Abbsisarange : range of the frequencies or velocities
1023 1022 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
1024 1023
1025 1024 Output:
1026 1025 Vzon, Vmer, Vver : wind velocities
1027 1026 error_code : int that states where code is terminated
1028 1027
1029 1028 0 : no error detected
1030 1029 1 : Gaussian of mean spc exceeds widthlimit
1031 1030 2 : no Gaussian of mean spc found
1032 1031 3 : SNR to low or velocity to high -> prec. e.g.
1033 1032 4 : at least one Gaussian of cspc exceeds widthlimit
1034 1033 5 : zero out of three cspc Gaussian fits converged
1035 1034 6 : phase slope fit could not be found
1036 1035 7 : arrays used to fit phase have different length
1037 1036 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
1038 1037
1039 1038 """
1040 1039
1041 1040 error_code = 0
1042 1041
1043 1042 nChan = spc.shape[0]
1044 1043 nProf = spc.shape[1]
1045 1044 nPair = cspc.shape[0]
1046 1045
1047 1046 SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
1048 1047 CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
1049 1048 phase = numpy.zeros([nPair, nProf]) # phase between channels
1050 1049 PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
1051 1050 PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
1052 1051 xFrec = AbbsisaRange[0][:-1] # frequency range
1053 1052 xVel = AbbsisaRange[2][:-1] # velocity range
1054 1053 xSamples = xFrec # the frequency range is taken
1055 1054 delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
1056 1055
1057 1056 # only consider velocities with in NegativeLimit and PositiveLimit
1058 1057 if (NegativeLimit is None):
1059 1058 NegativeLimit = numpy.min(xVel)
1060 1059 if (PositiveLimit is None):
1061 1060 PositiveLimit = numpy.max(xVel)
1062 1061 xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
1063 1062 xSamples_zoom = xSamples[xvalid]
1064 1063
1065 1064 '''Getting Eij and Nij'''
1066 1065 Xi01, Xi02, Xi12 = ChanDist[:,0]
1067 1066 Eta01, Eta02, Eta12 = ChanDist[:,1]
1068 1067
1069 1068 # spwd limit - updated by D. ScipiΓ³n 30.03.2021
1070 1069 widthlimit = 10
1071 1070 '''************************* SPC is normalized ********************************'''
1072 1071 spc_norm = spc.copy()
1073 1072 # For each channel
1074 1073 for i in range(nChan):
1075 1074 spc_sub = spc_norm[i,:] - noise[i] # only the signal power
1076 1075 SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
1077 1076
1078 1077 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1079 1078
1080 1079 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1081 1080 you only fit the curve and don't need the absolute value of height for calculation,
1082 1081 only for estimation of width. for normalization of cross spectra, you need initial,
1083 1082 unnormalized self-spectra With noise.
1084 1083
1085 1084 Technically, you don't even need to normalize the self-spectra, as you only need the
1086 1085 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1087 1086 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1088 1087 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1089 1088 """
1090 1089 # initial conditions
1091 1090 popt = [1e-10,0,1e-10]
1092 1091 # Spectra average
1093 1092 SPCMean = numpy.average(SPC_Samples,0)
1094 1093 # Moments in frequency
1095 1094 SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
1096 1095
1097 1096 # Gauss Fit SPC in frequency domain
1098 1097 if dbSNR > SNRlimit: # only if SNR > SNRth
1099 1098 try:
1100 1099 popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
1101 1100 if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
1102 1101 return self.StopWindEstimation(error_code = 1)
1103 1102 FitGauss = self.gaus(xSamples_zoom,*popt)
1104 1103 except :#RuntimeError:
1105 1104 return self.StopWindEstimation(error_code = 2)
1106 1105 else:
1107 1106 return self.StopWindEstimation(error_code = 3)
1108 1107
1109 1108 '''***************************** CSPC Normalization *************************
1110 1109 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1111 1110 influence the norm which is not desired. First, a range is identified where the
1112 1111 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1113 1112 around it gets cut off and values replaced by mean determined by the boundary
1114 1113 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1115 1114
1116 1115 The sums are then added and multiplied by range/datapoints, because you need
1117 1116 an integral and not a sum for normalization.
1118 1117
1119 1118 A norm is found according to Briggs 92.
1120 1119 '''
1121 1120 # for each pair
1122 1121 for i in range(nPair):
1123 1122 cspc_norm = cspc[i,:].copy()
1124 1123 chan_index0 = pairsList[i][0]
1125 1124 chan_index1 = pairsList[i][1]
1126 1125 CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
1127 1126 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1128 1127
1129 1128 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
1130 1129 self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
1131 1130 self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
1132 1131
1133 1132 popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
1134 1133 FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
1135 1134
1136 1135 '''*******************************FIT GAUSS CSPC************************************'''
1137 1136 try:
1138 1137 popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
1139 1138 if popt01[2] > widthlimit: # CONDITION
1140 1139 return self.StopWindEstimation(error_code = 4)
1141 1140 popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
1142 1141 if popt02[2] > widthlimit: # CONDITION
1143 1142 return self.StopWindEstimation(error_code = 4)
1144 1143 popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
1145 1144 if popt12[2] > widthlimit: # CONDITION
1146 1145 return self.StopWindEstimation(error_code = 4)
1147 1146
1148 1147 FitGauss01 = self.gaus(xSamples_zoom, *popt01)
1149 1148 FitGauss02 = self.gaus(xSamples_zoom, *popt02)
1150 1149 FitGauss12 = self.gaus(xSamples_zoom, *popt12)
1151 1150 except:
1152 1151 return self.StopWindEstimation(error_code = 5)
1153 1152
1154 1153
1155 1154 '''************* Getting Fij ***************'''
1156 1155 # x-axis point of the gaussian where the center is located from GaussFit of spectra
1157 1156 GaussCenter = popt[1]
1158 1157 ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
1159 1158 PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
1160 1159
1161 1160 # Point where e^-1 is located in the gaussian
1162 1161 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1163 1162 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
1164 1163 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1165 1164 Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
1166 1165
1167 1166 '''********** Taking frequency ranges from mean SPCs **********'''
1168 1167 GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
1169 1168 Range = numpy.empty(2)
1170 1169 Range[0] = GaussCenter - GauWidth
1171 1170 Range[1] = GaussCenter + GauWidth
1172 1171 # Point in x-axis where the bandwidth is located (min:max)
1173 1172 ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
1174 1173 ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
1175 1174 PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
1176 1175 PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
1177 1176 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1178 1177 FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
1179 1178
1180 1179 '''************************** Getting Phase Slope ***************************'''
1181 1180 for i in range(nPair):
1182 1181 if len(FrecRange) > 5:
1183 1182 PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
1184 1183 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1185 1184 if len(FrecRange) == len(PhaseRange):
1186 1185 try:
1187 1186 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1188 1187 PhaseSlope[i] = slope
1189 1188 PhaseInter[i] = intercept
1190 1189 except:
1191 1190 return self.StopWindEstimation(error_code = 6)
1192 1191 else:
1193 1192 return self.StopWindEstimation(error_code = 7)
1194 1193 else:
1195 1194 return self.StopWindEstimation(error_code = 8)
1196 1195
1197 1196 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1198 1197
1199 1198 '''Getting constant C'''
1200 1199 cC=(Fij*numpy.pi)**2
1201 1200
1202 1201 '''****** Getting constants F and G ******'''
1203 1202 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1204 1203 # MijEijNij = numpy.array([[Xi01,Eta01], [Xi02,Eta02], [Xi12,Eta12]])
1205 1204 # MijResult0 = (-PhaseSlope[0] * cC) / (2*numpy.pi)
1206 1205 MijResult1 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1207 1206 MijResult2 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1208 1207 # MijResults = numpy.array([MijResult0, MijResult1, MijResult2])
1209 1208 MijResults = numpy.array([MijResult1, MijResult2])
1210 1209 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1211 1210
1212 1211 '''****** Getting constants A, B and H ******'''
1213 1212 W01 = numpy.nanmax( FitGauss01 )
1214 1213 W02 = numpy.nanmax( FitGauss02 )
1215 1214 W12 = numpy.nanmax( FitGauss12 )
1216 1215
1217 1216 WijResult01 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1218 1217 WijResult02 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1219 1218 WijResult12 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1220 1219 WijResults = numpy.array([WijResult01, WijResult02, WijResult12])
1221 1220
1222 1221 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1223 1222 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1224 1223
1225 1224 VxVy = numpy.array([[cA,cH],[cH,cB]])
1226 1225 VxVyResults = numpy.array([-cF,-cG])
1227 1226 (Vmer,Vzon) = numpy.linalg.solve(VxVy, VxVyResults)
1228 1227 Vver = -SPCMoments[1]*SPEED_OF_LIGHT/(2*radfreq)
1229 1228 error_code = 0
1230 1229
1231 1230 return Vzon, Vmer, Vver, error_code
1232 1231
1233 1232 class SpectralMoments(Operation):
1234 1233
1235 1234 '''
1236 1235 Function SpectralMoments()
1237 1236
1238 1237 Calculates moments (power, mean, standard deviation) and SNR of the signal
1239 1238
1240 1239 Type of dataIn: Spectra
1241 1240
1242 1241 Configuration Parameters:
1243 1242
1244 1243 dirCosx : Cosine director in X axis
1245 1244 dirCosy : Cosine director in Y axis
1246 1245
1247 1246 elevation :
1248 1247 azimuth :
1249 1248
1250 1249 Input:
1251 1250 channelList : simple channel list to select e.g. [2,3,7]
1252 1251 self.dataOut.data_pre : Spectral data
1253 1252 self.dataOut.abscissaList : List of frequencies
1254 1253 self.dataOut.noise : Noise level per channel
1255 1254
1256 1255 Affected:
1257 1256 self.dataOut.moments : Parameters per channel
1258 1257 self.dataOut.data_snr : SNR per channel
1259 1258
1260 1259 '''
1261 1260
1262 1261 def run(self, dataOut,wradar=False):
1263 1262
1264 1263 data = dataOut.data_pre[0]
1265 1264 absc = dataOut.abscissaList[:-1]
1266 1265 noise = dataOut.noise
1267 1266 nChannel = data.shape[0]
1268 1267 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1269 1268
1270 1269 for ind in range(nChannel):
1271 1270 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind],wradar=wradar )
1272 1271
1273 1272 dataOut.moments = data_param[:,1:,:]
1274 1273 dataOut.data_snr = data_param[:,0]
1275 1274 dataOut.data_pow = data_param[:,1]
1276 1275 dataOut.data_dop = data_param[:,2]
1277 1276 dataOut.data_width = data_param[:,3]
1278 1277 return dataOut
1279 1278
1280 1279 def __calculateMoments(self, oldspec, oldfreq, n0,
1281 1280 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None,wradar=None):
1282 1281
1283 1282 if (nicoh is None): nicoh = 1
1284 1283 if (graph is None): graph = 0
1285 1284 if (smooth is None): smooth = 0
1286 1285 elif (self.smooth < 3): smooth = 0
1287 1286
1288 1287 if (type1 is None): type1 = 0
1289 1288 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1290 1289 if (snrth is None): snrth = -3
1291 1290 if (dc is None): dc = 0
1292 1291 if (aliasing is None): aliasing = 0
1293 1292 if (oldfd is None): oldfd = 0
1294 1293 if (wwauto is None): wwauto = 0
1295 1294
1296 1295 if (n0 < 1.e-20): n0 = 1.e-20
1297 1296
1298 1297 freq = oldfreq
1299 1298 vec_power = numpy.zeros(oldspec.shape[1])
1300 1299 vec_fd = numpy.zeros(oldspec.shape[1])
1301 1300 vec_w = numpy.zeros(oldspec.shape[1])
1302 1301 vec_snr = numpy.zeros(oldspec.shape[1])
1303 1302
1304 1303 # oldspec = numpy.ma.masked_invalid(oldspec)
1305 1304 for ind in range(oldspec.shape[1]):
1306 1305
1307 1306 spec = oldspec[:,ind]
1308 1307 aux = spec*fwindow
1309 1308 max_spec = aux.max()
1310 1309 m = aux.tolist().index(max_spec)
1311 1310
1312 1311 # Smooth
1313 1312 if (smooth == 0):
1314 1313 spec2 = spec
1315 1314 else:
1316 1315 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1317 1316
1318 1317 # Moments Estimation
1319 1318 bb = spec2[numpy.arange(m,spec2.size)]
1320 1319 bb = (bb<n0).nonzero()
1321 1320 bb = bb[0]
1322 1321
1323 1322 ss = spec2[numpy.arange(0,m + 1)]
1324 1323 ss = (ss<n0).nonzero()
1325 1324 ss = ss[0]
1326 1325
1327 1326 if (bb.size == 0):
1328 1327 bb0 = spec.size - 1 - m
1329 1328 else:
1330 1329 bb0 = bb[0] - 1
1331 1330 if (bb0 < 0):
1332 1331 bb0 = 0
1333 1332
1334 1333 if (ss.size == 0):
1335 1334 ss1 = 1
1336 1335 else:
1337 1336 ss1 = max(ss) + 1
1338 1337
1339 1338 if (ss1 > m):
1340 1339 ss1 = m
1341 1340
1342 1341 #valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1343 1342 valid = numpy.arange(1,oldspec.shape[0])# valid perfil completo igual pulsepair
1344 1343 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. ScipiΓ³n added with correct definition
1345 1344 total_power = (spec2[valid] * fwindow[valid]).mean() # D. ScipiΓ³n added with correct definition
1346 1345 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1347 1346 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1348 1347 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1349 1348 snr = (spec2.mean()-n0)/n0
1350 1349 if (snr < 1.e-20) :
1351 1350 snr = 1.e-20
1352 1351
1353 1352 # vec_power[ind] = power #D. ScipiΓ³n replaced with the line below
1354 1353 if wradar ==False:
1355 1354 vec_power[ind] = total_power
1356 1355 else:
1357 1356 vec_power[ind] = signal_power
1358 1357
1359 1358 vec_fd[ind] = fd
1360 1359 vec_w[ind] = w
1361 1360 vec_snr[ind] = snr
1362 1361 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1363 1362
1364 1363 #------------------ Get SA Parameters --------------------------
1365 1364
1366 1365 def GetSAParameters(self):
1367 1366 #SA en frecuencia
1368 1367 pairslist = self.dataOut.groupList
1369 1368 num_pairs = len(pairslist)
1370 1369
1371 1370 vel = self.dataOut.abscissaList
1372 1371 spectra = self.dataOut.data_pre
1373 1372 cspectra = self.dataIn.data_cspc
1374 1373 delta_v = vel[1] - vel[0]
1375 1374
1376 1375 #Calculating the power spectrum
1377 1376 spc_pow = numpy.sum(spectra, 3)*delta_v
1378 1377 #Normalizing Spectra
1379 1378 norm_spectra = spectra/spc_pow
1380 1379 #Calculating the norm_spectra at peak
1381 1380 max_spectra = numpy.max(norm_spectra, 3)
1382 1381
1383 1382 #Normalizing Cross Spectra
1384 1383 norm_cspectra = numpy.zeros(cspectra.shape)
1385 1384
1386 1385 for i in range(num_chan):
1387 1386 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1388 1387
1389 1388 max_cspectra = numpy.max(norm_cspectra,2)
1390 1389 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1391 1390
1392 1391 for i in range(num_pairs):
1393 1392 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1394 1393 #------------------- Get Lags ----------------------------------
1395 1394
1396 1395 class SALags(Operation):
1397 1396 '''
1398 1397 Function GetMoments()
1399 1398
1400 1399 Input:
1401 1400 self.dataOut.data_pre
1402 1401 self.dataOut.abscissaList
1403 1402 self.dataOut.noise
1404 1403 self.dataOut.normFactor
1405 1404 self.dataOut.data_snr
1406 1405 self.dataOut.groupList
1407 1406 self.dataOut.nChannels
1408 1407
1409 1408 Affected:
1410 1409 self.dataOut.data_param
1411 1410
1412 1411 '''
1413 1412 def run(self, dataOut):
1414 1413 data_acf = dataOut.data_pre[0]
1415 1414 data_ccf = dataOut.data_pre[1]
1416 1415 normFactor_acf = dataOut.normFactor[0]
1417 1416 normFactor_ccf = dataOut.normFactor[1]
1418 1417 pairs_acf = dataOut.groupList[0]
1419 1418 pairs_ccf = dataOut.groupList[1]
1420 1419
1421 1420 nHeights = dataOut.nHeights
1422 1421 absc = dataOut.abscissaList
1423 1422 noise = dataOut.noise
1424 1423 SNR = dataOut.data_snr
1425 1424 nChannels = dataOut.nChannels
1426 1425 # pairsList = dataOut.groupList
1427 1426 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1428 1427
1429 1428 for l in range(len(pairs_acf)):
1430 1429 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1431 1430
1432 1431 for l in range(len(pairs_ccf)):
1433 1432 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1434 1433
1435 1434 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1436 1435 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1437 1436 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1438 1437 return
1439 1438
1440 1439 # def __getPairsAutoCorr(self, pairsList, nChannels):
1441 1440 #
1442 1441 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1443 1442 #
1444 1443 # for l in range(len(pairsList)):
1445 1444 # firstChannel = pairsList[l][0]
1446 1445 # secondChannel = pairsList[l][1]
1447 1446 #
1448 1447 # #Obteniendo pares de Autocorrelacion
1449 1448 # if firstChannel == secondChannel:
1450 1449 # pairsAutoCorr[firstChannel] = int(l)
1451 1450 #
1452 1451 # pairsAutoCorr = pairsAutoCorr.astype(int)
1453 1452 #
1454 1453 # pairsCrossCorr = range(len(pairsList))
1455 1454 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1456 1455 #
1457 1456 # return pairsAutoCorr, pairsCrossCorr
1458 1457
1459 1458 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1460 1459
1461 1460 lag0 = data_acf.shape[1]/2
1462 1461 #Funcion de Autocorrelacion
1463 1462 mean_acf = stats.nanmean(data_acf, axis = 0)
1464 1463
1465 1464 #Obtencion Indice de TauCross
1466 1465 ind_ccf = data_ccf.argmax(axis = 1)
1467 1466 #Obtencion Indice de TauAuto
1468 1467 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1469 1468 ccf_lag0 = data_ccf[:,lag0,:]
1470 1469
1471 1470 for i in range(ccf_lag0.shape[0]):
1472 1471 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1473 1472
1474 1473 #Obtencion de TauCross y TauAuto
1475 1474 tau_ccf = lagRange[ind_ccf]
1476 1475 tau_acf = lagRange[ind_acf]
1477 1476
1478 1477 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1479 1478
1480 1479 tau_ccf[Nan1,Nan2] = numpy.nan
1481 1480 tau_acf[Nan1,Nan2] = numpy.nan
1482 1481 tau = numpy.vstack((tau_ccf,tau_acf))
1483 1482
1484 1483 return tau
1485 1484
1486 1485 def __calculateLag1Phase(self, data, lagTRange):
1487 1486 data1 = stats.nanmean(data, axis = 0)
1488 1487 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1489 1488
1490 1489 phase = numpy.angle(data1[lag1,:])
1491 1490
1492 1491 return phase
1493 1492
1494 1493 class SpectralFitting(Operation):
1495 1494 '''
1496 1495 Function GetMoments()
1497 1496
1498 1497 Input:
1499 1498 Output:
1500 1499 Variables modified:
1501 1500 '''
1502 1501
1503 1502 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1504 1503
1505 1504
1506 1505 if path != None:
1507 1506 sys.path.append(path)
1508 1507 self.dataOut.library = importlib.import_module(file)
1509 1508
1510 1509 #To be inserted as a parameter
1511 1510 groupArray = numpy.array(groupList)
1512 1511 # groupArray = numpy.array([[0,1],[2,3]])
1513 1512 self.dataOut.groupList = groupArray
1514 1513
1515 1514 nGroups = groupArray.shape[0]
1516 1515 nChannels = self.dataIn.nChannels
1517 1516 nHeights=self.dataIn.heightList.size
1518 1517
1519 1518 #Parameters Array
1520 1519 self.dataOut.data_param = None
1521 1520
1522 1521 #Set constants
1523 1522 constants = self.dataOut.library.setConstants(self.dataIn)
1524 1523 self.dataOut.constants = constants
1525 1524 M = self.dataIn.normFactor
1526 1525 N = self.dataIn.nFFTPoints
1527 1526 ippSeconds = self.dataIn.ippSeconds
1528 1527 K = self.dataIn.nIncohInt
1529 1528 pairsArray = numpy.array(self.dataIn.pairsList)
1530 1529
1531 1530 #List of possible combinations
1532 1531 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1533 1532 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1534 1533
1535 1534 if getSNR:
1536 1535 listChannels = groupArray.reshape((groupArray.size))
1537 1536 listChannels.sort()
1538 1537 noise = self.dataIn.getNoise()
1539 1538 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1540 1539
1541 1540 for i in range(nGroups):
1542 1541 coord = groupArray[i,:]
1543 1542
1544 1543 #Input data array
1545 1544 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1546 1545 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1547 1546
1548 1547 #Cross Spectra data array for Covariance Matrixes
1549 1548 ind = 0
1550 1549 for pairs in listComb:
1551 1550 pairsSel = numpy.array([coord[x],coord[y]])
1552 1551 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1553 1552 ind += 1
1554 1553 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1555 1554 dataCross = dataCross**2/K
1556 1555
1557 1556 for h in range(nHeights):
1558 1557
1559 1558 #Input
1560 1559 d = data[:,h]
1561 1560
1562 1561 #Covariance Matrix
1563 1562 D = numpy.diag(d**2/K)
1564 1563 ind = 0
1565 1564 for pairs in listComb:
1566 1565 #Coordinates in Covariance Matrix
1567 1566 x = pairs[0]
1568 1567 y = pairs[1]
1569 1568 #Channel Index
1570 1569 S12 = dataCross[ind,:,h]
1571 1570 D12 = numpy.diag(S12)
1572 1571 #Completing Covariance Matrix with Cross Spectras
1573 1572 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1574 1573 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1575 1574 ind += 1
1576 1575 Dinv=numpy.linalg.inv(D)
1577 1576 L=numpy.linalg.cholesky(Dinv)
1578 1577 LT=L.T
1579 1578
1580 1579 dp = numpy.dot(LT,d)
1581 1580
1582 1581 #Initial values
1583 1582 data_spc = self.dataIn.data_spc[coord,:,h]
1584 1583
1585 1584 if (h>0)and(error1[3]<5):
1586 1585 p0 = self.dataOut.data_param[i,:,h-1]
1587 1586 else:
1588 1587 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1589 1588
1590 1589 try:
1591 1590 #Least Squares
1592 1591 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1593 1592 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1594 1593 #Chi square error
1595 1594 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1596 1595 #Error with Jacobian
1597 1596 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1598 1597 except:
1599 1598 minp = p0*numpy.nan
1600 1599 error0 = numpy.nan
1601 1600 error1 = p0*numpy.nan
1602 1601
1603 1602 #Save
1604 1603 if self.dataOut.data_param is None:
1605 1604 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1606 1605 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1607 1606
1608 1607 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1609 1608 self.dataOut.data_param[i,:,h] = minp
1610 1609 return
1611 1610
1612 1611 def __residFunction(self, p, dp, LT, constants):
1613 1612
1614 1613 fm = self.dataOut.library.modelFunction(p, constants)
1615 1614 fmp=numpy.dot(LT,fm)
1616 1615
1617 1616 return dp-fmp
1618 1617
1619 1618 def __getSNR(self, z, noise):
1620 1619
1621 1620 avg = numpy.average(z, axis=1)
1622 1621 SNR = (avg.T-noise)/noise
1623 1622 SNR = SNR.T
1624 1623 return SNR
1625 1624
1626 1625 def __chisq(p,chindex,hindex):
1627 1626 #similar to Resid but calculates CHI**2
1628 1627 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1629 1628 dp=numpy.dot(LT,d)
1630 1629 fmp=numpy.dot(LT,fm)
1631 1630 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1632 1631 return chisq
1633 1632
1634 1633 class WindProfiler(Operation):
1635 1634
1636 1635 __isConfig = False
1637 1636
1638 1637 __initime = None
1639 1638 __lastdatatime = None
1640 1639 __integrationtime = None
1641 1640
1642 1641 __buffer = None
1643 1642
1644 1643 __dataReady = False
1645 1644
1646 1645 __firstdata = None
1647 1646
1648 1647 n = None
1649 1648
1650 1649 def __init__(self):
1651 1650 Operation.__init__(self)
1652 1651
1653 1652 def __calculateCosDir(self, elev, azim):
1654 1653 zen = (90 - elev)*numpy.pi/180
1655 1654 azim = azim*numpy.pi/180
1656 1655 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1657 1656 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1658 1657
1659 1658 signX = numpy.sign(numpy.cos(azim))
1660 1659 signY = numpy.sign(numpy.sin(azim))
1661 1660
1662 1661 cosDirX = numpy.copysign(cosDirX, signX)
1663 1662 cosDirY = numpy.copysign(cosDirY, signY)
1664 1663 return cosDirX, cosDirY
1665 1664
1666 1665 def __calculateAngles(self, theta_x, theta_y, azimuth):
1667 1666
1668 1667 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1669 1668 zenith_arr = numpy.arccos(dir_cosw)
1670 1669 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1671 1670
1672 1671 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1673 1672 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1674 1673
1675 1674 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1676 1675
1677 1676 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1678 1677
1679 1678 #
1680 1679 if horOnly:
1681 1680 A = numpy.c_[dir_cosu,dir_cosv]
1682 1681 else:
1683 1682 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1684 1683 A = numpy.asmatrix(A)
1685 1684 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1686 1685
1687 1686 return A1
1688 1687
1689 1688 def __correctValues(self, heiRang, phi, velRadial, SNR):
1690 1689 listPhi = phi.tolist()
1691 1690 maxid = listPhi.index(max(listPhi))
1692 1691 minid = listPhi.index(min(listPhi))
1693 1692
1694 1693 rango = list(range(len(phi)))
1695 1694 # rango = numpy.delete(rango,maxid)
1696 1695
1697 1696 heiRang1 = heiRang*math.cos(phi[maxid])
1698 1697 heiRangAux = heiRang*math.cos(phi[minid])
1699 1698 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1700 1699 heiRang1 = numpy.delete(heiRang1,indOut)
1701 1700
1702 1701 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1703 1702 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1704 1703
1705 1704 for i in rango:
1706 1705 x = heiRang*math.cos(phi[i])
1707 1706 y1 = velRadial[i,:]
1708 1707 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1709 1708
1710 1709 x1 = heiRang1
1711 1710 y11 = f1(x1)
1712 1711
1713 1712 y2 = SNR[i,:]
1714 1713 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1715 1714 y21 = f2(x1)
1716 1715
1717 1716 velRadial1[i,:] = y11
1718 1717 SNR1[i,:] = y21
1719 1718
1720 1719 return heiRang1, velRadial1, SNR1
1721 1720
1722 1721 def __calculateVelUVW(self, A, velRadial):
1723 1722
1724 1723 #Operacion Matricial
1725 1724 # velUVW = numpy.zeros((velRadial.shape[1],3))
1726 1725 # for ind in range(velRadial.shape[1]):
1727 1726 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1728 1727 # velUVW = velUVW.transpose()
1729 1728 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1730 1729 velUVW[:,:] = numpy.dot(A,velRadial)
1731 1730
1732 1731
1733 1732 return velUVW
1734 1733
1735 1734 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1736 1735
1737 1736 def techniqueDBS(self, kwargs):
1738 1737 """
1739 1738 Function that implements Doppler Beam Swinging (DBS) technique.
1740 1739
1741 1740 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1742 1741 Direction correction (if necessary), Ranges and SNR
1743 1742
1744 1743 Output: Winds estimation (Zonal, Meridional and Vertical)
1745 1744
1746 1745 Parameters affected: Winds, height range, SNR
1747 1746 """
1748 1747 velRadial0 = kwargs['velRadial']
1749 1748 heiRang = kwargs['heightList']
1750 1749 SNR0 = kwargs['SNR']
1751 1750
1752 1751 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1753 1752 theta_x = numpy.array(kwargs['dirCosx'])
1754 1753 theta_y = numpy.array(kwargs['dirCosy'])
1755 1754 else:
1756 1755 elev = numpy.array(kwargs['elevation'])
1757 1756 azim = numpy.array(kwargs['azimuth'])
1758 1757 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1759 1758 azimuth = kwargs['correctAzimuth']
1760 1759 if 'horizontalOnly' in kwargs:
1761 1760 horizontalOnly = kwargs['horizontalOnly']
1762 1761 else: horizontalOnly = False
1763 1762 if 'correctFactor' in kwargs:
1764 1763 correctFactor = kwargs['correctFactor']
1765 1764 else: correctFactor = 1
1766 1765 if 'channelList' in kwargs:
1767 1766 channelList = kwargs['channelList']
1768 1767 if len(channelList) == 2:
1769 1768 horizontalOnly = True
1770 1769 arrayChannel = numpy.array(channelList)
1771 1770 param = param[arrayChannel,:,:]
1772 1771 theta_x = theta_x[arrayChannel]
1773 1772 theta_y = theta_y[arrayChannel]
1774 1773
1775 1774 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1776 1775 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1777 1776 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1778 1777
1779 1778 #Calculo de Componentes de la velocidad con DBS
1780 1779 winds = self.__calculateVelUVW(A,velRadial1)
1781 1780
1782 1781 return winds, heiRang1, SNR1
1783 1782
1784 1783 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1785 1784
1786 1785 nPairs = len(pairs_ccf)
1787 1786 posx = numpy.asarray(posx)
1788 1787 posy = numpy.asarray(posy)
1789 1788
1790 1789 #Rotacion Inversa para alinear con el azimuth
1791 1790 if azimuth!= None:
1792 1791 azimuth = azimuth*math.pi/180
1793 1792 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1794 1793 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1795 1794 else:
1796 1795 posx1 = posx
1797 1796 posy1 = posy
1798 1797
1799 1798 #Calculo de Distancias
1800 1799 distx = numpy.zeros(nPairs)
1801 1800 disty = numpy.zeros(nPairs)
1802 1801 dist = numpy.zeros(nPairs)
1803 1802 ang = numpy.zeros(nPairs)
1804 1803
1805 1804 for i in range(nPairs):
1806 1805 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1807 1806 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1808 1807 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1809 1808 ang[i] = numpy.arctan2(disty[i],distx[i])
1810 1809
1811 1810 return distx, disty, dist, ang
1812 1811 #Calculo de Matrices
1813 1812 # nPairs = len(pairs)
1814 1813 # ang1 = numpy.zeros((nPairs, 2, 1))
1815 1814 # dist1 = numpy.zeros((nPairs, 2, 1))
1816 1815 #
1817 1816 # for j in range(nPairs):
1818 1817 # dist1[j,0,0] = dist[pairs[j][0]]
1819 1818 # dist1[j,1,0] = dist[pairs[j][1]]
1820 1819 # ang1[j,0,0] = ang[pairs[j][0]]
1821 1820 # ang1[j,1,0] = ang[pairs[j][1]]
1822 1821 #
1823 1822 # return distx,disty, dist1,ang1
1824 1823
1825 1824
1826 1825 def __calculateVelVer(self, phase, lagTRange, _lambda):
1827 1826
1828 1827 Ts = lagTRange[1] - lagTRange[0]
1829 1828 velW = -_lambda*phase/(4*math.pi*Ts)
1830 1829
1831 1830 return velW
1832 1831
1833 1832 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1834 1833 nPairs = tau1.shape[0]
1835 1834 nHeights = tau1.shape[1]
1836 1835 vel = numpy.zeros((nPairs,3,nHeights))
1837 1836 dist1 = numpy.reshape(dist, (dist.size,1))
1838 1837
1839 1838 angCos = numpy.cos(ang)
1840 1839 angSin = numpy.sin(ang)
1841 1840
1842 1841 vel0 = dist1*tau1/(2*tau2**2)
1843 1842 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1844 1843 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1845 1844
1846 1845 ind = numpy.where(numpy.isinf(vel))
1847 1846 vel[ind] = numpy.nan
1848 1847
1849 1848 return vel
1850 1849
1851 1850 # def __getPairsAutoCorr(self, pairsList, nChannels):
1852 1851 #
1853 1852 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1854 1853 #
1855 1854 # for l in range(len(pairsList)):
1856 1855 # firstChannel = pairsList[l][0]
1857 1856 # secondChannel = pairsList[l][1]
1858 1857 #
1859 1858 # #Obteniendo pares de Autocorrelacion
1860 1859 # if firstChannel == secondChannel:
1861 1860 # pairsAutoCorr[firstChannel] = int(l)
1862 1861 #
1863 1862 # pairsAutoCorr = pairsAutoCorr.astype(int)
1864 1863 #
1865 1864 # pairsCrossCorr = range(len(pairsList))
1866 1865 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1867 1866 #
1868 1867 # return pairsAutoCorr, pairsCrossCorr
1869 1868
1870 1869 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1871 1870 def techniqueSA(self, kwargs):
1872 1871
1873 1872 """
1874 1873 Function that implements Spaced Antenna (SA) technique.
1875 1874
1876 1875 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1877 1876 Direction correction (if necessary), Ranges and SNR
1878 1877
1879 1878 Output: Winds estimation (Zonal, Meridional and Vertical)
1880 1879
1881 1880 Parameters affected: Winds
1882 1881 """
1883 1882 position_x = kwargs['positionX']
1884 1883 position_y = kwargs['positionY']
1885 1884 azimuth = kwargs['azimuth']
1886 1885
1887 1886 if 'correctFactor' in kwargs:
1888 1887 correctFactor = kwargs['correctFactor']
1889 1888 else:
1890 1889 correctFactor = 1
1891 1890
1892 1891 groupList = kwargs['groupList']
1893 1892 pairs_ccf = groupList[1]
1894 1893 tau = kwargs['tau']
1895 1894 _lambda = kwargs['_lambda']
1896 1895
1897 1896 #Cross Correlation pairs obtained
1898 1897 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1899 1898 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1900 1899 # pairsSelArray = numpy.array(pairsSelected)
1901 1900 # pairs = []
1902 1901 #
1903 1902 # #Wind estimation pairs obtained
1904 1903 # for i in range(pairsSelArray.shape[0]/2):
1905 1904 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1906 1905 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1907 1906 # pairs.append((ind1,ind2))
1908 1907
1909 1908 indtau = tau.shape[0]/2
1910 1909 tau1 = tau[:indtau,:]
1911 1910 tau2 = tau[indtau:-1,:]
1912 1911 # tau1 = tau1[pairs,:]
1913 1912 # tau2 = tau2[pairs,:]
1914 1913 phase1 = tau[-1,:]
1915 1914
1916 1915 #---------------------------------------------------------------------
1917 1916 #Metodo Directo
1918 1917 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1919 1918 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1920 1919 winds = stats.nanmean(winds, axis=0)
1921 1920 #---------------------------------------------------------------------
1922 1921 #Metodo General
1923 1922 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1924 1923 # #Calculo Coeficientes de Funcion de Correlacion
1925 1924 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1926 1925 # #Calculo de Velocidades
1927 1926 # winds = self.calculateVelUV(F,G,A,B,H)
1928 1927
1929 1928 #---------------------------------------------------------------------
1930 1929 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1931 1930 winds = correctFactor*winds
1932 1931 return winds
1933 1932
1934 1933 def __checkTime(self, currentTime, paramInterval, outputInterval):
1935 1934
1936 1935 dataTime = currentTime + paramInterval
1937 1936 deltaTime = dataTime - self.__initime
1938 1937
1939 1938 if deltaTime >= outputInterval or deltaTime < 0:
1940 1939 self.__dataReady = True
1941 1940 return
1942 1941
1943 1942 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1944 1943 '''
1945 1944 Function that implements winds estimation technique with detected meteors.
1946 1945
1947 1946 Input: Detected meteors, Minimum meteor quantity to wind estimation
1948 1947
1949 1948 Output: Winds estimation (Zonal and Meridional)
1950 1949
1951 1950 Parameters affected: Winds
1952 1951 '''
1953 1952 #Settings
1954 1953 nInt = (heightMax - heightMin)/2
1955 1954 nInt = int(nInt)
1956 1955 winds = numpy.zeros((2,nInt))*numpy.nan
1957 1956
1958 1957 #Filter errors
1959 1958 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1960 1959 finalMeteor = arrayMeteor[error,:]
1961 1960
1962 1961 #Meteor Histogram
1963 1962 finalHeights = finalMeteor[:,2]
1964 1963 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1965 1964 nMeteorsPerI = hist[0]
1966 1965 heightPerI = hist[1]
1967 1966
1968 1967 #Sort of meteors
1969 1968 indSort = finalHeights.argsort()
1970 1969 finalMeteor2 = finalMeteor[indSort,:]
1971 1970
1972 1971 # Calculating winds
1973 1972 ind1 = 0
1974 1973 ind2 = 0
1975 1974
1976 1975 for i in range(nInt):
1977 1976 nMet = nMeteorsPerI[i]
1978 1977 ind1 = ind2
1979 1978 ind2 = ind1 + nMet
1980 1979
1981 1980 meteorAux = finalMeteor2[ind1:ind2,:]
1982 1981
1983 1982 if meteorAux.shape[0] >= meteorThresh:
1984 1983 vel = meteorAux[:, 6]
1985 1984 zen = meteorAux[:, 4]*numpy.pi/180
1986 1985 azim = meteorAux[:, 3]*numpy.pi/180
1987 1986
1988 1987 n = numpy.cos(zen)
1989 1988 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1990 1989 # l = m*numpy.tan(azim)
1991 1990 l = numpy.sin(zen)*numpy.sin(azim)
1992 1991 m = numpy.sin(zen)*numpy.cos(azim)
1993 1992
1994 1993 A = numpy.vstack((l, m)).transpose()
1995 1994 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1996 1995 windsAux = numpy.dot(A1, vel)
1997 1996
1998 1997 winds[0,i] = windsAux[0]
1999 1998 winds[1,i] = windsAux[1]
2000 1999
2001 2000 return winds, heightPerI[:-1]
2002 2001
2003 2002 def techniqueNSM_SA(self, **kwargs):
2004 2003 metArray = kwargs['metArray']
2005 2004 heightList = kwargs['heightList']
2006 2005 timeList = kwargs['timeList']
2007 2006
2008 2007 rx_location = kwargs['rx_location']
2009 2008 groupList = kwargs['groupList']
2010 2009 azimuth = kwargs['azimuth']
2011 2010 dfactor = kwargs['dfactor']
2012 2011 k = kwargs['k']
2013 2012
2014 2013 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
2015 2014 d = dist*dfactor
2016 2015 #Phase calculation
2017 2016 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
2018 2017
2019 2018 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
2020 2019
2021 2020 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2022 2021 azimuth1 = azimuth1*numpy.pi/180
2023 2022
2024 2023 for i in range(heightList.size):
2025 2024 h = heightList[i]
2026 2025 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
2027 2026 metHeight = metArray1[indH,:]
2028 2027 if metHeight.shape[0] >= 2:
2029 2028 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
2030 2029 iazim = metHeight[:,1].astype(int)
2031 2030 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
2032 2031 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
2033 2032 A = numpy.asmatrix(A)
2034 2033 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
2035 2034 velHor = numpy.dot(A1,velAux)
2036 2035
2037 2036 velEst[i,:] = numpy.squeeze(velHor)
2038 2037 return velEst
2039 2038
2040 2039 def __getPhaseSlope(self, metArray, heightList, timeList):
2041 2040 meteorList = []
2042 2041 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2043 2042 #Putting back together the meteor matrix
2044 2043 utctime = metArray[:,0]
2045 2044 uniqueTime = numpy.unique(utctime)
2046 2045
2047 2046 phaseDerThresh = 0.5
2048 2047 ippSeconds = timeList[1] - timeList[0]
2049 2048 sec = numpy.where(timeList>1)[0][0]
2050 2049 nPairs = metArray.shape[1] - 6
2051 2050 nHeights = len(heightList)
2052 2051
2053 2052 for t in uniqueTime:
2054 2053 metArray1 = metArray[utctime==t,:]
2055 2054 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2056 2055 tmet = metArray1[:,1].astype(int)
2057 2056 hmet = metArray1[:,2].astype(int)
2058 2057
2059 2058 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2060 2059 metPhase[:,:] = numpy.nan
2061 2060 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2062 2061
2063 2062 #Delete short trails
2064 2063 metBool = ~numpy.isnan(metPhase[0,:,:])
2065 2064 heightVect = numpy.sum(metBool, axis = 1)
2066 2065 metBool[heightVect<sec,:] = False
2067 2066 metPhase[:,heightVect<sec,:] = numpy.nan
2068 2067
2069 2068 #Derivative
2070 2069 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2071 2070 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2072 2071 metPhase[phDerAux] = numpy.nan
2073 2072
2074 2073 #--------------------------METEOR DETECTION -----------------------------------------
2075 2074 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2076 2075
2077 2076 for p in numpy.arange(nPairs):
2078 2077 phase = metPhase[p,:,:]
2079 2078 phDer = metDer[p,:,:]
2080 2079
2081 2080 for h in indMet:
2082 2081 height = heightList[h]
2083 2082 phase1 = phase[h,:] #82
2084 2083 phDer1 = phDer[h,:]
2085 2084
2086 2085 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2087 2086
2088 2087 indValid = numpy.where(~numpy.isnan(phase1))[0]
2089 2088 initMet = indValid[0]
2090 2089 endMet = 0
2091 2090
2092 2091 for i in range(len(indValid)-1):
2093 2092
2094 2093 #Time difference
2095 2094 inow = indValid[i]
2096 2095 inext = indValid[i+1]
2097 2096 idiff = inext - inow
2098 2097 #Phase difference
2099 2098 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2100 2099
2101 2100 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2102 2101 sizeTrail = inow - initMet + 1
2103 2102 if sizeTrail>3*sec: #Too short meteors
2104 2103 x = numpy.arange(initMet,inow+1)*ippSeconds
2105 2104 y = phase1[initMet:inow+1]
2106 2105 ynnan = ~numpy.isnan(y)
2107 2106 x = x[ynnan]
2108 2107 y = y[ynnan]
2109 2108 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2110 2109 ylin = x*slope + intercept
2111 2110 rsq = r_value**2
2112 2111 if rsq > 0.5:
2113 2112 vel = slope#*height*1000/(k*d)
2114 2113 estAux = numpy.array([utctime,p,height, vel, rsq])
2115 2114 meteorList.append(estAux)
2116 2115 initMet = inext
2117 2116 metArray2 = numpy.array(meteorList)
2118 2117
2119 2118 return metArray2
2120 2119
2121 2120 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2122 2121
2123 2122 azimuth1 = numpy.zeros(len(pairslist))
2124 2123 dist = numpy.zeros(len(pairslist))
2125 2124
2126 2125 for i in range(len(rx_location)):
2127 2126 ch0 = pairslist[i][0]
2128 2127 ch1 = pairslist[i][1]
2129 2128
2130 2129 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2131 2130 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2132 2131 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2133 2132 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2134 2133
2135 2134 azimuth1 -= azimuth0
2136 2135 return azimuth1, dist
2137 2136
2138 2137 def techniqueNSM_DBS(self, **kwargs):
2139 2138 metArray = kwargs['metArray']
2140 2139 heightList = kwargs['heightList']
2141 2140 timeList = kwargs['timeList']
2142 2141 azimuth = kwargs['azimuth']
2143 2142 theta_x = numpy.array(kwargs['theta_x'])
2144 2143 theta_y = numpy.array(kwargs['theta_y'])
2145 2144
2146 2145 utctime = metArray[:,0]
2147 2146 cmet = metArray[:,1].astype(int)
2148 2147 hmet = metArray[:,3].astype(int)
2149 2148 SNRmet = metArray[:,4]
2150 2149 vmet = metArray[:,5]
2151 2150 spcmet = metArray[:,6]
2152 2151
2153 2152 nChan = numpy.max(cmet) + 1
2154 2153 nHeights = len(heightList)
2155 2154
2156 2155 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2157 2156 hmet = heightList[hmet]
2158 2157 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2159 2158
2160 2159 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2161 2160
2162 2161 for i in range(nHeights - 1):
2163 2162 hmin = heightList[i]
2164 2163 hmax = heightList[i + 1]
2165 2164
2166 2165 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2167 2166 indthisH = numpy.where(thisH)
2168 2167
2169 2168 if numpy.size(indthisH) > 3:
2170 2169
2171 2170 vel_aux = vmet[thisH]
2172 2171 chan_aux = cmet[thisH]
2173 2172 cosu_aux = dir_cosu[chan_aux]
2174 2173 cosv_aux = dir_cosv[chan_aux]
2175 2174 cosw_aux = dir_cosw[chan_aux]
2176 2175
2177 2176 nch = numpy.size(numpy.unique(chan_aux))
2178 2177 if nch > 1:
2179 2178 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2180 2179 velEst[i,:] = numpy.dot(A,vel_aux)
2181 2180
2182 2181 return velEst
2183 2182
2184 2183 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2185 2184
2186 2185 param = dataOut.data_param
2187 2186 if dataOut.abscissaList != None:
2188 2187 absc = dataOut.abscissaList[:-1]
2189 2188 # noise = dataOut.noise
2190 2189 heightList = dataOut.heightList
2191 2190 SNR = dataOut.data_snr
2192 2191
2193 2192 if technique == 'DBS':
2194 2193
2195 2194 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2196 2195 kwargs['heightList'] = heightList
2197 2196 kwargs['SNR'] = SNR
2198 2197
2199 2198 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
2200 2199 dataOut.utctimeInit = dataOut.utctime
2201 2200 dataOut.outputInterval = dataOut.paramInterval
2202 2201
2203 2202 elif technique == 'SA':
2204 2203
2205 2204 #Parameters
2206 2205 # position_x = kwargs['positionX']
2207 2206 # position_y = kwargs['positionY']
2208 2207 # azimuth = kwargs['azimuth']
2209 2208 #
2210 2209 # if kwargs.has_key('crosspairsList'):
2211 2210 # pairs = kwargs['crosspairsList']
2212 2211 # else:
2213 2212 # pairs = None
2214 2213 #
2215 2214 # if kwargs.has_key('correctFactor'):
2216 2215 # correctFactor = kwargs['correctFactor']
2217 2216 # else:
2218 2217 # correctFactor = 1
2219 2218
2220 2219 # tau = dataOut.data_param
2221 2220 # _lambda = dataOut.C/dataOut.frequency
2222 2221 # pairsList = dataOut.groupList
2223 2222 # nChannels = dataOut.nChannels
2224 2223
2225 2224 kwargs['groupList'] = dataOut.groupList
2226 2225 kwargs['tau'] = dataOut.data_param
2227 2226 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2228 2227 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2229 2228 dataOut.data_output = self.techniqueSA(kwargs)
2230 2229 dataOut.utctimeInit = dataOut.utctime
2231 2230 dataOut.outputInterval = dataOut.timeInterval
2232 2231
2233 2232 elif technique == 'Meteors':
2234 2233 dataOut.flagNoData = True
2235 2234 self.__dataReady = False
2236 2235
2237 2236 if 'nHours' in kwargs:
2238 2237 nHours = kwargs['nHours']
2239 2238 else:
2240 2239 nHours = 1
2241 2240
2242 2241 if 'meteorsPerBin' in kwargs:
2243 2242 meteorThresh = kwargs['meteorsPerBin']
2244 2243 else:
2245 2244 meteorThresh = 6
2246 2245
2247 2246 if 'hmin' in kwargs:
2248 2247 hmin = kwargs['hmin']
2249 2248 else: hmin = 70
2250 2249 if 'hmax' in kwargs:
2251 2250 hmax = kwargs['hmax']
2252 2251 else: hmax = 110
2253 2252
2254 2253 dataOut.outputInterval = nHours*3600
2255 2254
2256 2255 if self.__isConfig == False:
2257 2256 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2258 2257 #Get Initial LTC time
2259 2258 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2260 2259 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2261 2260
2262 2261 self.__isConfig = True
2263 2262
2264 2263 if self.__buffer is None:
2265 2264 self.__buffer = dataOut.data_param
2266 2265 self.__firstdata = copy.copy(dataOut)
2267 2266
2268 2267 else:
2269 2268 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2270 2269
2271 2270 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2272 2271
2273 2272 if self.__dataReady:
2274 2273 dataOut.utctimeInit = self.__initime
2275 2274
2276 2275 self.__initime += dataOut.outputInterval #to erase time offset
2277 2276
2278 2277 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2279 2278 dataOut.flagNoData = False
2280 2279 self.__buffer = None
2281 2280
2282 2281 elif technique == 'Meteors1':
2283 2282 dataOut.flagNoData = True
2284 2283 self.__dataReady = False
2285 2284
2286 2285 if 'nMins' in kwargs:
2287 2286 nMins = kwargs['nMins']
2288 2287 else: nMins = 20
2289 2288 if 'rx_location' in kwargs:
2290 2289 rx_location = kwargs['rx_location']
2291 2290 else: rx_location = [(0,1),(1,1),(1,0)]
2292 2291 if 'azimuth' in kwargs:
2293 2292 azimuth = kwargs['azimuth']
2294 2293 else: azimuth = 51.06
2295 2294 if 'dfactor' in kwargs:
2296 2295 dfactor = kwargs['dfactor']
2297 2296 if 'mode' in kwargs:
2298 2297 mode = kwargs['mode']
2299 2298 if 'theta_x' in kwargs:
2300 2299 theta_x = kwargs['theta_x']
2301 2300 if 'theta_y' in kwargs:
2302 2301 theta_y = kwargs['theta_y']
2303 2302 else: mode = 'SA'
2304 2303
2305 2304 #Borrar luego esto
2306 2305 if dataOut.groupList is None:
2307 2306 dataOut.groupList = [(0,1),(0,2),(1,2)]
2308 2307 groupList = dataOut.groupList
2309 2308 C = 3e8
2310 2309 freq = 50e6
2311 2310 lamb = C/freq
2312 2311 k = 2*numpy.pi/lamb
2313 2312
2314 2313 timeList = dataOut.abscissaList
2315 2314 heightList = dataOut.heightList
2316 2315
2317 2316 if self.__isConfig == False:
2318 2317 dataOut.outputInterval = nMins*60
2319 2318 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2320 2319 #Get Initial LTC time
2321 2320 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2322 2321 minuteAux = initime.minute
2323 2322 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2324 2323 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2325 2324
2326 2325 self.__isConfig = True
2327 2326
2328 2327 if self.__buffer is None:
2329 2328 self.__buffer = dataOut.data_param
2330 2329 self.__firstdata = copy.copy(dataOut)
2331 2330
2332 2331 else:
2333 2332 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2334 2333
2335 2334 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2336 2335
2337 2336 if self.__dataReady:
2338 2337 dataOut.utctimeInit = self.__initime
2339 2338 self.__initime += dataOut.outputInterval #to erase time offset
2340 2339
2341 2340 metArray = self.__buffer
2342 2341 if mode == 'SA':
2343 2342 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2344 2343 elif mode == 'DBS':
2345 2344 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2346 2345 dataOut.data_output = dataOut.data_output.T
2347 2346 dataOut.flagNoData = False
2348 2347 self.__buffer = None
2349 2348
2350 2349 return
2351 2350
2352 2351 class EWDriftsEstimation(Operation):
2353 2352
2354 2353 def __init__(self):
2355 2354 Operation.__init__(self)
2356 2355
2357 2356 def __correctValues(self, heiRang, phi, velRadial, SNR):
2358 2357 listPhi = phi.tolist()
2359 2358 maxid = listPhi.index(max(listPhi))
2360 2359 minid = listPhi.index(min(listPhi))
2361 2360
2362 2361 rango = list(range(len(phi)))
2363 2362 # rango = numpy.delete(rango,maxid)
2364 2363
2365 2364 heiRang1 = heiRang*math.cos(phi[maxid])
2366 2365 heiRangAux = heiRang*math.cos(phi[minid])
2367 2366 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2368 2367 heiRang1 = numpy.delete(heiRang1,indOut)
2369 2368
2370 2369 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2371 2370 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2372 2371
2373 2372 for i in rango:
2374 2373 x = heiRang*math.cos(phi[i])
2375 2374 y1 = velRadial[i,:]
2376 2375 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2377 2376
2378 2377 x1 = heiRang1
2379 2378 y11 = f1(x1)
2380 2379
2381 2380 y2 = SNR[i,:]
2382 2381 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2383 2382 y21 = f2(x1)
2384 2383
2385 2384 velRadial1[i,:] = y11
2386 2385 SNR1[i,:] = y21
2387 2386
2388 2387 return heiRang1, velRadial1, SNR1
2389 2388
2390 2389 def run(self, dataOut, zenith, zenithCorrection):
2391 2390 heiRang = dataOut.heightList
2392 2391 velRadial = dataOut.data_param[:,3,:]
2393 2392 SNR = dataOut.data_snr
2394 2393
2395 2394 zenith = numpy.array(zenith)
2396 2395 zenith -= zenithCorrection
2397 2396 zenith *= numpy.pi/180
2398 2397
2399 2398 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2400 2399
2401 2400 alp = zenith[0]
2402 2401 bet = zenith[1]
2403 2402
2404 2403 w_w = velRadial1[0,:]
2405 2404 w_e = velRadial1[1,:]
2406 2405
2407 2406 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2408 2407 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2409 2408
2410 2409 winds = numpy.vstack((u,w))
2411 2410
2412 2411 dataOut.heightList = heiRang1
2413 2412 dataOut.data_output = winds
2414 2413 dataOut.data_snr = SNR1
2415 2414
2416 2415 dataOut.utctimeInit = dataOut.utctime
2417 2416 dataOut.outputInterval = dataOut.timeInterval
2418 2417 return
2419 2418
2420 2419 #--------------- Non Specular Meteor ----------------
2421 2420
2422 2421 class NonSpecularMeteorDetection(Operation):
2423 2422
2424 2423 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2425 2424 data_acf = dataOut.data_pre[0]
2426 2425 data_ccf = dataOut.data_pre[1]
2427 2426 pairsList = dataOut.groupList[1]
2428 2427
2429 2428 lamb = dataOut.C/dataOut.frequency
2430 2429 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2431 2430 paramInterval = dataOut.paramInterval
2432 2431
2433 2432 nChannels = data_acf.shape[0]
2434 2433 nLags = data_acf.shape[1]
2435 2434 nProfiles = data_acf.shape[2]
2436 2435 nHeights = dataOut.nHeights
2437 2436 nCohInt = dataOut.nCohInt
2438 2437 sec = numpy.round(nProfiles/dataOut.paramInterval)
2439 2438 heightList = dataOut.heightList
2440 2439 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2441 2440 utctime = dataOut.utctime
2442 2441
2443 2442 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2444 2443
2445 2444 #------------------------ SNR --------------------------------------
2446 2445 power = data_acf[:,0,:,:].real
2447 2446 noise = numpy.zeros(nChannels)
2448 2447 SNR = numpy.zeros(power.shape)
2449 2448 for i in range(nChannels):
2450 2449 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2451 2450 SNR[i] = (power[i]-noise[i])/noise[i]
2452 2451 SNRm = numpy.nanmean(SNR, axis = 0)
2453 2452 SNRdB = 10*numpy.log10(SNR)
2454 2453
2455 2454 if mode == 'SA':
2456 2455 dataOut.groupList = dataOut.groupList[1]
2457 2456 nPairs = data_ccf.shape[0]
2458 2457 #---------------------- Coherence and Phase --------------------------
2459 2458 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2460 2459 # phase1 = numpy.copy(phase)
2461 2460 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2462 2461
2463 2462 for p in range(nPairs):
2464 2463 ch0 = pairsList[p][0]
2465 2464 ch1 = pairsList[p][1]
2466 2465 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2467 2466 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2468 2467 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2469 2468 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2470 2469 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2471 2470 coh = numpy.nanmax(coh1, axis = 0)
2472 2471 # struc = numpy.ones((5,1))
2473 2472 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2474 2473 #---------------------- Radial Velocity ----------------------------
2475 2474 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2476 2475 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2477 2476
2478 2477 if allData:
2479 2478 boolMetFin = ~numpy.isnan(SNRm)
2480 2479 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2481 2480 else:
2482 2481 #------------------------ Meteor mask ---------------------------------
2483 2482 # #SNR mask
2484 2483 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2485 2484 #
2486 2485 # #Erase small objects
2487 2486 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2488 2487 #
2489 2488 # auxEEJ = numpy.sum(boolMet1,axis=0)
2490 2489 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2491 2490 # indEEJ = numpy.where(indOver)[0]
2492 2491 # indNEEJ = numpy.where(~indOver)[0]
2493 2492 #
2494 2493 # boolMetFin = boolMet1
2495 2494 #
2496 2495 # if indEEJ.size > 0:
2497 2496 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2498 2497 #
2499 2498 # boolMet2 = coh > cohThresh
2500 2499 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2501 2500 #
2502 2501 # #Final Meteor mask
2503 2502 # boolMetFin = boolMet1|boolMet2
2504 2503
2505 2504 #Coherence mask
2506 2505 boolMet1 = coh > 0.75
2507 2506 struc = numpy.ones((30,1))
2508 2507 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2509 2508
2510 2509 #Derivative mask
2511 2510 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2512 2511 boolMet2 = derPhase < 0.2
2513 2512 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2514 2513 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2515 2514 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2516 2515 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2517 2516 # #Final mask
2518 2517 # boolMetFin = boolMet2
2519 2518 boolMetFin = boolMet1&boolMet2
2520 2519 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2521 2520 #Creating data_param
2522 2521 coordMet = numpy.where(boolMetFin)
2523 2522
2524 2523 tmet = coordMet[0]
2525 2524 hmet = coordMet[1]
2526 2525
2527 2526 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2528 2527 data_param[:,0] = utctime
2529 2528 data_param[:,1] = tmet
2530 2529 data_param[:,2] = hmet
2531 2530 data_param[:,3] = SNRm[tmet,hmet]
2532 2531 data_param[:,4] = velRad[tmet,hmet]
2533 2532 data_param[:,5] = coh[tmet,hmet]
2534 2533 data_param[:,6:] = phase[:,tmet,hmet].T
2535 2534
2536 2535 elif mode == 'DBS':
2537 2536 dataOut.groupList = numpy.arange(nChannels)
2538 2537
2539 2538 #Radial Velocities
2540 2539 phase = numpy.angle(data_acf[:,1,:,:])
2541 2540 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2542 2541 velRad = phase*lamb/(4*numpy.pi*tSamp)
2543 2542
2544 2543 #Spectral width
2545 2544 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2546 2545 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2547 2546 acf1 = data_acf[:,1,:,:]
2548 2547 acf2 = data_acf[:,2,:,:]
2549 2548
2550 2549 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2551 2550 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2552 2551 if allData:
2553 2552 boolMetFin = ~numpy.isnan(SNRdB)
2554 2553 else:
2555 2554 #SNR
2556 2555 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2557 2556 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2558 2557
2559 2558 #Radial velocity
2560 2559 boolMet2 = numpy.abs(velRad) < 20
2561 2560 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2562 2561
2563 2562 #Spectral Width
2564 2563 boolMet3 = spcWidth < 30
2565 2564 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2566 2565 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2567 2566 boolMetFin = boolMet1&boolMet2&boolMet3
2568 2567
2569 2568 #Creating data_param
2570 2569 coordMet = numpy.where(boolMetFin)
2571 2570
2572 2571 cmet = coordMet[0]
2573 2572 tmet = coordMet[1]
2574 2573 hmet = coordMet[2]
2575 2574
2576 2575 data_param = numpy.zeros((tmet.size, 7))
2577 2576 data_param[:,0] = utctime
2578 2577 data_param[:,1] = cmet
2579 2578 data_param[:,2] = tmet
2580 2579 data_param[:,3] = hmet
2581 2580 data_param[:,4] = SNR[cmet,tmet,hmet].T
2582 2581 data_param[:,5] = velRad[cmet,tmet,hmet].T
2583 2582 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2584 2583
2585 2584 # self.dataOut.data_param = data_int
2586 2585 if len(data_param) == 0:
2587 2586 dataOut.flagNoData = True
2588 2587 else:
2589 2588 dataOut.data_param = data_param
2590 2589
2591 2590 def __erase_small(self, binArray, threshX, threshY):
2592 2591 labarray, numfeat = ndimage.measurements.label(binArray)
2593 2592 binArray1 = numpy.copy(binArray)
2594 2593
2595 2594 for i in range(1,numfeat + 1):
2596 2595 auxBin = (labarray==i)
2597 2596 auxSize = auxBin.sum()
2598 2597
2599 2598 x,y = numpy.where(auxBin)
2600 2599 widthX = x.max() - x.min()
2601 2600 widthY = y.max() - y.min()
2602 2601
2603 2602 #width X: 3 seg -> 12.5*3
2604 2603 #width Y:
2605 2604
2606 2605 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2607 2606 binArray1[auxBin] = False
2608 2607
2609 2608 return binArray1
2610 2609
2611 2610 #--------------- Specular Meteor ----------------
2612 2611
2613 2612 class SMDetection(Operation):
2614 2613 '''
2615 2614 Function DetectMeteors()
2616 2615 Project developed with paper:
2617 2616 HOLDSWORTH ET AL. 2004
2618 2617
2619 2618 Input:
2620 2619 self.dataOut.data_pre
2621 2620
2622 2621 centerReceiverIndex: From the channels, which is the center receiver
2623 2622
2624 2623 hei_ref: Height reference for the Beacon signal extraction
2625 2624 tauindex:
2626 2625 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2627 2626
2628 2627 cohDetection: Whether to user Coherent detection or not
2629 2628 cohDet_timeStep: Coherent Detection calculation time step
2630 2629 cohDet_thresh: Coherent Detection phase threshold to correct phases
2631 2630
2632 2631 noise_timeStep: Noise calculation time step
2633 2632 noise_multiple: Noise multiple to define signal threshold
2634 2633
2635 2634 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2636 2635 multDet_rangeLimit: Multiple Detection Removal range limit in km
2637 2636
2638 2637 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2639 2638 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2640 2639
2641 2640 hmin: Minimum Height of the meteor to use it in the further wind estimations
2642 2641 hmax: Maximum Height of the meteor to use it in the further wind estimations
2643 2642 azimuth: Azimuth angle correction
2644 2643
2645 2644 Affected:
2646 2645 self.dataOut.data_param
2647 2646
2648 2647 Rejection Criteria (Errors):
2649 2648 0: No error; analysis OK
2650 2649 1: SNR < SNR threshold
2651 2650 2: angle of arrival (AOA) ambiguously determined
2652 2651 3: AOA estimate not feasible
2653 2652 4: Large difference in AOAs obtained from different antenna baselines
2654 2653 5: echo at start or end of time series
2655 2654 6: echo less than 5 examples long; too short for analysis
2656 2655 7: echo rise exceeds 0.3s
2657 2656 8: echo decay time less than twice rise time
2658 2657 9: large power level before echo
2659 2658 10: large power level after echo
2660 2659 11: poor fit to amplitude for estimation of decay time
2661 2660 12: poor fit to CCF phase variation for estimation of radial drift velocity
2662 2661 13: height unresolvable echo: not valid height within 70 to 110 km
2663 2662 14: height ambiguous echo: more then one possible height within 70 to 110 km
2664 2663 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2665 2664 16: oscilatory echo, indicating event most likely not an underdense echo
2666 2665
2667 2666 17: phase difference in meteor Reestimation
2668 2667
2669 2668 Data Storage:
2670 2669 Meteors for Wind Estimation (8):
2671 2670 Utc Time | Range Height
2672 2671 Azimuth Zenith errorCosDir
2673 2672 VelRad errorVelRad
2674 2673 Phase0 Phase1 Phase2 Phase3
2675 2674 TypeError
2676 2675
2677 2676 '''
2678 2677
2679 2678 def run(self, dataOut, hei_ref = None, tauindex = 0,
2680 2679 phaseOffsets = None,
2681 2680 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2682 2681 noise_timeStep = 4, noise_multiple = 4,
2683 2682 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2684 2683 phaseThresh = 20, SNRThresh = 5,
2685 2684 hmin = 50, hmax=150, azimuth = 0,
2686 2685 channelPositions = None) :
2687 2686
2688 2687
2689 2688 #Getting Pairslist
2690 2689 if channelPositions is None:
2691 2690 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2692 2691 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2693 2692 meteorOps = SMOperations()
2694 2693 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2695 2694 heiRang = dataOut.heightList
2696 2695 #Get Beacon signal - No Beacon signal anymore
2697 2696 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2698 2697 #
2699 2698 # if hei_ref != None:
2700 2699 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2701 2700 #
2702 2701
2703 2702
2704 2703 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2705 2704 # see if the user put in pre defined phase shifts
2706 2705 voltsPShift = dataOut.data_pre.copy()
2707 2706
2708 2707 # if predefinedPhaseShifts != None:
2709 2708 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2710 2709 #
2711 2710 # # elif beaconPhaseShifts:
2712 2711 # # #get hardware phase shifts using beacon signal
2713 2712 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2714 2713 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2715 2714 #
2716 2715 # else:
2717 2716 # hardwarePhaseShifts = numpy.zeros(5)
2718 2717 #
2719 2718 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2720 2719 # for i in range(self.dataOut.data_pre.shape[0]):
2721 2720 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2722 2721
2723 2722 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2724 2723
2725 2724 #Remove DC
2726 2725 voltsDC = numpy.mean(voltsPShift,1)
2727 2726 voltsDC = numpy.mean(voltsDC,1)
2728 2727 for i in range(voltsDC.shape[0]):
2729 2728 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2730 2729
2731 2730 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2732 2731 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2733 2732
2734 2733 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2735 2734 #Coherent Detection
2736 2735 if cohDetection:
2737 2736 #use coherent detection to get the net power
2738 2737 cohDet_thresh = cohDet_thresh*numpy.pi/180
2739 2738 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2740 2739
2741 2740 #Non-coherent detection!
2742 2741 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2743 2742 #********** END OF COH/NON-COH POWER CALCULATION**********************
2744 2743
2745 2744 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2746 2745 #Get noise
2747 2746 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2748 2747 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2749 2748 #Get signal threshold
2750 2749 signalThresh = noise_multiple*noise
2751 2750 #Meteor echoes detection
2752 2751 listMeteors = self.__findMeteors(powerNet, signalThresh)
2753 2752 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2754 2753
2755 2754 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2756 2755 #Parameters
2757 2756 heiRange = dataOut.heightList
2758 2757 rangeInterval = heiRange[1] - heiRange[0]
2759 2758 rangeLimit = multDet_rangeLimit/rangeInterval
2760 2759 timeLimit = multDet_timeLimit/dataOut.timeInterval
2761 2760 #Multiple detection removals
2762 2761 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2763 2762 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2764 2763
2765 2764 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2766 2765 #Parameters
2767 2766 phaseThresh = phaseThresh*numpy.pi/180
2768 2767 thresh = [phaseThresh, noise_multiple, SNRThresh]
2769 2768 #Meteor reestimation (Errors N 1, 6, 12, 17)
2770 2769 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2771 2770 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2772 2771 #Estimation of decay times (Errors N 7, 8, 11)
2773 2772 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2774 2773 #******************* END OF METEOR REESTIMATION *******************
2775 2774
2776 2775 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2777 2776 #Calculating Radial Velocity (Error N 15)
2778 2777 radialStdThresh = 10
2779 2778 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2780 2779
2781 2780 if len(listMeteors4) > 0:
2782 2781 #Setting New Array
2783 2782 date = dataOut.utctime
2784 2783 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2785 2784
2786 2785 #Correcting phase offset
2787 2786 if phaseOffsets != None:
2788 2787 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2789 2788 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2790 2789
2791 2790 #Second Pairslist
2792 2791 pairsList = []
2793 2792 pairx = (0,1)
2794 2793 pairy = (2,3)
2795 2794 pairsList.append(pairx)
2796 2795 pairsList.append(pairy)
2797 2796
2798 2797 jph = numpy.array([0,0,0,0])
2799 2798 h = (hmin,hmax)
2800 2799 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2801 2800
2802 2801 # #Calculate AOA (Error N 3, 4)
2803 2802 # #JONES ET AL. 1998
2804 2803 # error = arrayParameters[:,-1]
2805 2804 # AOAthresh = numpy.pi/8
2806 2805 # phases = -arrayParameters[:,9:13]
2807 2806 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2808 2807 #
2809 2808 # #Calculate Heights (Error N 13 and 14)
2810 2809 # error = arrayParameters[:,-1]
2811 2810 # Ranges = arrayParameters[:,2]
2812 2811 # zenith = arrayParameters[:,5]
2813 2812 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2814 2813 # error = arrayParameters[:,-1]
2815 2814 #********************* END OF PARAMETERS CALCULATION **************************
2816 2815
2817 2816 #***************************+ PASS DATA TO NEXT STEP **********************
2818 2817 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2819 2818 dataOut.data_param = arrayParameters
2820 2819
2821 2820 if arrayParameters is None:
2822 2821 dataOut.flagNoData = True
2823 2822 else:
2824 2823 dataOut.flagNoData = True
2825 2824
2826 2825 return
2827 2826
2828 2827 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2829 2828
2830 2829 minIndex = min(newheis[0])
2831 2830 maxIndex = max(newheis[0])
2832 2831
2833 2832 voltage = voltage0[:,:,minIndex:maxIndex+1]
2834 2833 nLength = voltage.shape[1]/n
2835 2834 nMin = 0
2836 2835 nMax = 0
2837 2836 phaseOffset = numpy.zeros((len(pairslist),n))
2838 2837
2839 2838 for i in range(n):
2840 2839 nMax += nLength
2841 2840 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2842 2841 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2843 2842 phaseOffset[:,i] = phaseCCF.transpose()
2844 2843 nMin = nMax
2845 2844 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2846 2845
2847 2846 #Remove Outliers
2848 2847 factor = 2
2849 2848 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2850 2849 dw = numpy.std(wt,axis = 1)
2851 2850 dw = dw.reshape((dw.size,1))
2852 2851 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2853 2852 phaseOffset[ind] = numpy.nan
2854 2853 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2855 2854
2856 2855 return phaseOffset
2857 2856
2858 2857 def __shiftPhase(self, data, phaseShift):
2859 2858 #this will shift the phase of a complex number
2860 2859 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2861 2860 return dataShifted
2862 2861
2863 2862 def __estimatePhaseDifference(self, array, pairslist):
2864 2863 nChannel = array.shape[0]
2865 2864 nHeights = array.shape[2]
2866 2865 numPairs = len(pairslist)
2867 2866 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2868 2867 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2869 2868
2870 2869 #Correct phases
2871 2870 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2872 2871 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2873 2872
2874 2873 if indDer[0].shape[0] > 0:
2875 2874 for i in range(indDer[0].shape[0]):
2876 2875 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2877 2876 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2878 2877
2879 2878 # for j in range(numSides):
2880 2879 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2881 2880 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2882 2881 #
2883 2882 #Linear
2884 2883 phaseInt = numpy.zeros((numPairs,1))
2885 2884 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2886 2885 for j in range(numPairs):
2887 2886 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2888 2887 phaseInt[j] = fit[1]
2889 2888 #Phase Differences
2890 2889 phaseDiff = phaseInt - phaseCCF[:,2,:]
2891 2890 phaseArrival = phaseInt.reshape(phaseInt.size)
2892 2891
2893 2892 #Dealias
2894 2893 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2895 2894 # indAlias = numpy.where(phaseArrival > numpy.pi)
2896 2895 # phaseArrival[indAlias] -= 2*numpy.pi
2897 2896 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2898 2897 # phaseArrival[indAlias] += 2*numpy.pi
2899 2898
2900 2899 return phaseDiff, phaseArrival
2901 2900
2902 2901 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2903 2902 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2904 2903 #find the phase shifts of each channel over 1 second intervals
2905 2904 #only look at ranges below the beacon signal
2906 2905 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2907 2906 numBlocks = int(volts.shape[1]/numProfPerBlock)
2908 2907 numHeights = volts.shape[2]
2909 2908 nChannel = volts.shape[0]
2910 2909 voltsCohDet = volts.copy()
2911 2910
2912 2911 pairsarray = numpy.array(pairslist)
2913 2912 indSides = pairsarray[:,1]
2914 2913 # indSides = numpy.array(range(nChannel))
2915 2914 # indSides = numpy.delete(indSides, indCenter)
2916 2915 #
2917 2916 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2918 2917 listBlocks = numpy.array_split(volts, numBlocks, 1)
2919 2918
2920 2919 startInd = 0
2921 2920 endInd = 0
2922 2921
2923 2922 for i in range(numBlocks):
2924 2923 startInd = endInd
2925 2924 endInd = endInd + listBlocks[i].shape[1]
2926 2925
2927 2926 arrayBlock = listBlocks[i]
2928 2927 # arrayBlockCenter = listCenter[i]
2929 2928
2930 2929 #Estimate the Phase Difference
2931 2930 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2932 2931 #Phase Difference RMS
2933 2932 arrayPhaseRMS = numpy.abs(phaseDiff)
2934 2933 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2935 2934 indPhase = numpy.where(phaseRMSaux==4)
2936 2935 #Shifting
2937 2936 if indPhase[0].shape[0] > 0:
2938 2937 for j in range(indSides.size):
2939 2938 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2940 2939 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2941 2940
2942 2941 return voltsCohDet
2943 2942
2944 2943 def __calculateCCF(self, volts, pairslist ,laglist):
2945 2944
2946 2945 nHeights = volts.shape[2]
2947 2946 nPoints = volts.shape[1]
2948 2947 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2949 2948
2950 2949 for i in range(len(pairslist)):
2951 2950 volts1 = volts[pairslist[i][0]]
2952 2951 volts2 = volts[pairslist[i][1]]
2953 2952
2954 2953 for t in range(len(laglist)):
2955 2954 idxT = laglist[t]
2956 2955 if idxT >= 0:
2957 2956 vStacked = numpy.vstack((volts2[idxT:,:],
2958 2957 numpy.zeros((idxT, nHeights),dtype='complex')))
2959 2958 else:
2960 2959 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2961 2960 volts2[:(nPoints + idxT),:]))
2962 2961 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2963 2962
2964 2963 vStacked = None
2965 2964 return voltsCCF
2966 2965
2967 2966 def __getNoise(self, power, timeSegment, timeInterval):
2968 2967 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2969 2968 numBlocks = int(power.shape[0]/numProfPerBlock)
2970 2969 numHeights = power.shape[1]
2971 2970
2972 2971 listPower = numpy.array_split(power, numBlocks, 0)
2973 2972 noise = numpy.zeros((power.shape[0], power.shape[1]))
2974 2973 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2975 2974
2976 2975 startInd = 0
2977 2976 endInd = 0
2978 2977
2979 2978 for i in range(numBlocks): #split por canal
2980 2979 startInd = endInd
2981 2980 endInd = endInd + listPower[i].shape[0]
2982 2981
2983 2982 arrayBlock = listPower[i]
2984 2983 noiseAux = numpy.mean(arrayBlock, 0)
2985 2984 # noiseAux = numpy.median(noiseAux)
2986 2985 # noiseAux = numpy.mean(arrayBlock)
2987 2986 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2988 2987
2989 2988 noiseAux1 = numpy.mean(arrayBlock)
2990 2989 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2991 2990
2992 2991 return noise, noise1
2993 2992
2994 2993 def __findMeteors(self, power, thresh):
2995 2994 nProf = power.shape[0]
2996 2995 nHeights = power.shape[1]
2997 2996 listMeteors = []
2998 2997
2999 2998 for i in range(nHeights):
3000 2999 powerAux = power[:,i]
3001 3000 threshAux = thresh[:,i]
3002 3001
3003 3002 indUPthresh = numpy.where(powerAux > threshAux)[0]
3004 3003 indDNthresh = numpy.where(powerAux <= threshAux)[0]
3005 3004
3006 3005 j = 0
3007 3006
3008 3007 while (j < indUPthresh.size - 2):
3009 3008 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
3010 3009 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
3011 3010 indDNthresh = indDNthresh[indDNAux]
3012 3011
3013 3012 if (indDNthresh.size > 0):
3014 3013 indEnd = indDNthresh[0] - 1
3015 3014 indInit = indUPthresh[j]
3016 3015
3017 3016 meteor = powerAux[indInit:indEnd + 1]
3018 3017 indPeak = meteor.argmax() + indInit
3019 3018 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
3020 3019
3021 3020 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
3022 3021 j = numpy.where(indUPthresh == indEnd)[0] + 1
3023 3022 else: j+=1
3024 3023 else: j+=1
3025 3024
3026 3025 return listMeteors
3027 3026
3028 3027 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
3029 3028
3030 3029 arrayMeteors = numpy.asarray(listMeteors)
3031 3030 listMeteors1 = []
3032 3031
3033 3032 while arrayMeteors.shape[0] > 0:
3034 3033 FLAs = arrayMeteors[:,4]
3035 3034 maxFLA = FLAs.argmax()
3036 3035 listMeteors1.append(arrayMeteors[maxFLA,:])
3037 3036
3038 3037 MeteorInitTime = arrayMeteors[maxFLA,1]
3039 3038 MeteorEndTime = arrayMeteors[maxFLA,3]
3040 3039 MeteorHeight = arrayMeteors[maxFLA,0]
3041 3040
3042 3041 #Check neighborhood
3043 3042 maxHeightIndex = MeteorHeight + rangeLimit
3044 3043 minHeightIndex = MeteorHeight - rangeLimit
3045 3044 minTimeIndex = MeteorInitTime - timeLimit
3046 3045 maxTimeIndex = MeteorEndTime + timeLimit
3047 3046
3048 3047 #Check Heights
3049 3048 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3050 3049 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3051 3050 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3052 3051
3053 3052 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3054 3053
3055 3054 return listMeteors1
3056 3055
3057 3056 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3058 3057 numHeights = volts.shape[2]
3059 3058 nChannel = volts.shape[0]
3060 3059
3061 3060 thresholdPhase = thresh[0]
3062 3061 thresholdNoise = thresh[1]
3063 3062 thresholdDB = float(thresh[2])
3064 3063
3065 3064 thresholdDB1 = 10**(thresholdDB/10)
3066 3065 pairsarray = numpy.array(pairslist)
3067 3066 indSides = pairsarray[:,1]
3068 3067
3069 3068 pairslist1 = list(pairslist)
3070 3069 pairslist1.append((0,1))
3071 3070 pairslist1.append((3,4))
3072 3071
3073 3072 listMeteors1 = []
3074 3073 listPowerSeries = []
3075 3074 listVoltageSeries = []
3076 3075 #volts has the war data
3077 3076
3078 3077 if frequency == 30e6:
3079 3078 timeLag = 45*10**-3
3080 3079 else:
3081 3080 timeLag = 15*10**-3
3082 3081 lag = numpy.ceil(timeLag/timeInterval)
3083 3082
3084 3083 for i in range(len(listMeteors)):
3085 3084
3086 3085 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3087 3086 meteorAux = numpy.zeros(16)
3088 3087
3089 3088 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3090 3089 mHeight = listMeteors[i][0]
3091 3090 mStart = listMeteors[i][1]
3092 3091 mPeak = listMeteors[i][2]
3093 3092 mEnd = listMeteors[i][3]
3094 3093
3095 3094 #get the volt data between the start and end times of the meteor
3096 3095 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3097 3096 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3098 3097
3099 3098 #3.6. Phase Difference estimation
3100 3099 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3101 3100
3102 3101 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3103 3102 #meteorVolts0.- all Channels, all Profiles
3104 3103 meteorVolts0 = volts[:,:,mHeight]
3105 3104 meteorThresh = noise[:,mHeight]*thresholdNoise
3106 3105 meteorNoise = noise[:,mHeight]
3107 3106 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3108 3107 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3109 3108
3110 3109 #Times reestimation
3111 3110 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3112 3111 if mStart1.size > 0:
3113 3112 mStart1 = mStart1[-1] + 1
3114 3113
3115 3114 else:
3116 3115 mStart1 = mPeak
3117 3116
3118 3117 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3119 3118 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3120 3119 if mEndDecayTime1.size == 0:
3121 3120 mEndDecayTime1 = powerNet0.size
3122 3121 else:
3123 3122 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3124 3123 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3125 3124
3126 3125 #meteorVolts1.- all Channels, from start to end
3127 3126 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3128 3127 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3129 3128 if meteorVolts2.shape[1] == 0:
3130 3129 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3131 3130 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3132 3131 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3133 3132 ##################### END PARAMETERS REESTIMATION #########################
3134 3133
3135 3134 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3136 3135 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3137 3136 if meteorVolts2.shape[1] > 0:
3138 3137 #Phase Difference re-estimation
3139 3138 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3140 3139 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3141 3140 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3142 3141 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3143 3142 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3144 3143
3145 3144 #Phase Difference RMS
3146 3145 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3147 3146 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3148 3147 #Data from Meteor
3149 3148 mPeak1 = powerNet1.argmax() + mStart1
3150 3149 mPeakPower1 = powerNet1.max()
3151 3150 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3152 3151 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3153 3152 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3154 3153 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3155 3154 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3156 3155 #Vectorize
3157 3156 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3158 3157 meteorAux[7:11] = phaseDiffint[0:4]
3159 3158
3160 3159 #Rejection Criterions
3161 3160 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3162 3161 meteorAux[-1] = 17
3163 3162 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3164 3163 meteorAux[-1] = 1
3165 3164
3166 3165
3167 3166 else:
3168 3167 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3169 3168 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3170 3169 PowerSeries = 0
3171 3170
3172 3171 listMeteors1.append(meteorAux)
3173 3172 listPowerSeries.append(PowerSeries)
3174 3173 listVoltageSeries.append(meteorVolts1)
3175 3174
3176 3175 return listMeteors1, listPowerSeries, listVoltageSeries
3177 3176
3178 3177 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3179 3178
3180 3179 threshError = 10
3181 3180 #Depending if it is 30 or 50 MHz
3182 3181 if frequency == 30e6:
3183 3182 timeLag = 45*10**-3
3184 3183 else:
3185 3184 timeLag = 15*10**-3
3186 3185 lag = numpy.ceil(timeLag/timeInterval)
3187 3186
3188 3187 listMeteors1 = []
3189 3188
3190 3189 for i in range(len(listMeteors)):
3191 3190 meteorPower = listPower[i]
3192 3191 meteorAux = listMeteors[i]
3193 3192
3194 3193 if meteorAux[-1] == 0:
3195 3194
3196 3195 try:
3197 3196 indmax = meteorPower.argmax()
3198 3197 indlag = indmax + lag
3199 3198
3200 3199 y = meteorPower[indlag:]
3201 3200 x = numpy.arange(0, y.size)*timeLag
3202 3201
3203 3202 #first guess
3204 3203 a = y[0]
3205 3204 tau = timeLag
3206 3205 #exponential fit
3207 3206 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3208 3207 y1 = self.__exponential_function(x, *popt)
3209 3208 #error estimation
3210 3209 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3211 3210
3212 3211 decayTime = popt[1]
3213 3212 riseTime = indmax*timeInterval
3214 3213 meteorAux[11:13] = [decayTime, error]
3215 3214
3216 3215 #Table items 7, 8 and 11
3217 3216 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3218 3217 meteorAux[-1] = 7
3219 3218 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3220 3219 meteorAux[-1] = 8
3221 3220 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3222 3221 meteorAux[-1] = 11
3223 3222
3224 3223
3225 3224 except:
3226 3225 meteorAux[-1] = 11
3227 3226
3228 3227
3229 3228 listMeteors1.append(meteorAux)
3230 3229
3231 3230 return listMeteors1
3232 3231
3233 3232 #Exponential Function
3234 3233
3235 3234 def __exponential_function(self, x, a, tau):
3236 3235 y = a*numpy.exp(-x/tau)
3237 3236 return y
3238 3237
3239 3238 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3240 3239
3241 3240 pairslist1 = list(pairslist)
3242 3241 pairslist1.append((0,1))
3243 3242 pairslist1.append((3,4))
3244 3243 numPairs = len(pairslist1)
3245 3244 #Time Lag
3246 3245 timeLag = 45*10**-3
3247 3246 c = 3e8
3248 3247 lag = numpy.ceil(timeLag/timeInterval)
3249 3248 freq = 30e6
3250 3249
3251 3250 listMeteors1 = []
3252 3251
3253 3252 for i in range(len(listMeteors)):
3254 3253 meteorAux = listMeteors[i]
3255 3254 if meteorAux[-1] == 0:
3256 3255 mStart = listMeteors[i][1]
3257 3256 mPeak = listMeteors[i][2]
3258 3257 mLag = mPeak - mStart + lag
3259 3258
3260 3259 #get the volt data between the start and end times of the meteor
3261 3260 meteorVolts = listVolts[i]
3262 3261 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3263 3262
3264 3263 #Get CCF
3265 3264 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3266 3265
3267 3266 #Method 2
3268 3267 slopes = numpy.zeros(numPairs)
3269 3268 time = numpy.array([-2,-1,1,2])*timeInterval
3270 3269 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3271 3270
3272 3271 #Correct phases
3273 3272 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3274 3273 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3275 3274
3276 3275 if indDer[0].shape[0] > 0:
3277 3276 for i in range(indDer[0].shape[0]):
3278 3277 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3279 3278 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3280 3279
3281 3280 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3282 3281 for j in range(numPairs):
3283 3282 fit = stats.linregress(time, angAllCCF[j,:])
3284 3283 slopes[j] = fit[0]
3285 3284
3286 3285 #Remove Outlier
3287 3286 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3288 3287 # slopes = numpy.delete(slopes,indOut)
3289 3288 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3290 3289 # slopes = numpy.delete(slopes,indOut)
3291 3290
3292 3291 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3293 3292 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3294 3293 meteorAux[-2] = radialError
3295 3294 meteorAux[-3] = radialVelocity
3296 3295
3297 3296 #Setting Error
3298 3297 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3299 3298 if numpy.abs(radialVelocity) > 200:
3300 3299 meteorAux[-1] = 15
3301 3300 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3302 3301 elif radialError > radialStdThresh:
3303 3302 meteorAux[-1] = 12
3304 3303
3305 3304 listMeteors1.append(meteorAux)
3306 3305 return listMeteors1
3307 3306
3308 3307 def __setNewArrays(self, listMeteors, date, heiRang):
3309 3308
3310 3309 #New arrays
3311 3310 arrayMeteors = numpy.array(listMeteors)
3312 3311 arrayParameters = numpy.zeros((len(listMeteors), 13))
3313 3312
3314 3313 #Date inclusion
3315 3314 # date = re.findall(r'\((.*?)\)', date)
3316 3315 # date = date[0].split(',')
3317 3316 # date = map(int, date)
3318 3317 #
3319 3318 # if len(date)<6:
3320 3319 # date.append(0)
3321 3320 #
3322 3321 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3323 3322 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3324 3323 arrayDate = numpy.tile(date, (len(listMeteors)))
3325 3324
3326 3325 #Meteor array
3327 3326 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3328 3327 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3329 3328
3330 3329 #Parameters Array
3331 3330 arrayParameters[:,0] = arrayDate #Date
3332 3331 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3333 3332 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3334 3333 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3335 3334 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3336 3335
3337 3336
3338 3337 return arrayParameters
3339 3338
3340 3339 class CorrectSMPhases(Operation):
3341 3340
3342 3341 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3343 3342
3344 3343 arrayParameters = dataOut.data_param
3345 3344 pairsList = []
3346 3345 pairx = (0,1)
3347 3346 pairy = (2,3)
3348 3347 pairsList.append(pairx)
3349 3348 pairsList.append(pairy)
3350 3349 jph = numpy.zeros(4)
3351 3350
3352 3351 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3353 3352 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3354 3353 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3355 3354
3356 3355 meteorOps = SMOperations()
3357 3356 if channelPositions is None:
3358 3357 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3359 3358 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3360 3359
3361 3360 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3362 3361 h = (hmin,hmax)
3363 3362
3364 3363 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3365 3364
3366 3365 dataOut.data_param = arrayParameters
3367 3366 return
3368 3367
3369 3368 class SMPhaseCalibration(Operation):
3370 3369
3371 3370 __buffer = None
3372 3371
3373 3372 __initime = None
3374 3373
3375 3374 __dataReady = False
3376 3375
3377 3376 __isConfig = False
3378 3377
3379 3378 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3380 3379
3381 3380 dataTime = currentTime + paramInterval
3382 3381 deltaTime = dataTime - initTime
3383 3382
3384 3383 if deltaTime >= outputInterval or deltaTime < 0:
3385 3384 return True
3386 3385
3387 3386 return False
3388 3387
3389 3388 def __getGammas(self, pairs, d, phases):
3390 3389 gammas = numpy.zeros(2)
3391 3390
3392 3391 for i in range(len(pairs)):
3393 3392
3394 3393 pairi = pairs[i]
3395 3394
3396 3395 phip3 = phases[:,pairi[0]]
3397 3396 d3 = d[pairi[0]]
3398 3397 phip2 = phases[:,pairi[1]]
3399 3398 d2 = d[pairi[1]]
3400 3399 #Calculating gamma
3401 3400 # jdcos = alp1/(k*d1)
3402 3401 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3403 3402 jgamma = -phip2*d3/d2 - phip3
3404 3403 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3405 3404 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3406 3405 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3407 3406
3408 3407 #Revised distribution
3409 3408 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3410 3409
3411 3410 #Histogram
3412 3411 nBins = 64
3413 3412 rmin = -0.5*numpy.pi
3414 3413 rmax = 0.5*numpy.pi
3415 3414 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3416 3415
3417 3416 meteorsY = phaseHisto[0]
3418 3417 phasesX = phaseHisto[1][:-1]
3419 3418 width = phasesX[1] - phasesX[0]
3420 3419 phasesX += width/2
3421 3420
3422 3421 #Gaussian aproximation
3423 3422 bpeak = meteorsY.argmax()
3424 3423 peak = meteorsY.max()
3425 3424 jmin = bpeak - 5
3426 3425 jmax = bpeak + 5 + 1
3427 3426
3428 3427 if jmin<0:
3429 3428 jmin = 0
3430 3429 jmax = 6
3431 3430 elif jmax > meteorsY.size:
3432 3431 jmin = meteorsY.size - 6
3433 3432 jmax = meteorsY.size
3434 3433
3435 3434 x0 = numpy.array([peak,bpeak,50])
3436 3435 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3437 3436
3438 3437 #Gammas
3439 3438 gammas[i] = coeff[0][1]
3440 3439
3441 3440 return gammas
3442 3441
3443 3442 def __residualFunction(self, coeffs, y, t):
3444 3443
3445 3444 return y - self.__gauss_function(t, coeffs)
3446 3445
3447 3446 def __gauss_function(self, t, coeffs):
3448 3447
3449 3448 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3450 3449
3451 3450 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3452 3451 meteorOps = SMOperations()
3453 3452 nchan = 4
3454 3453 pairx = pairsList[0] #x es 0
3455 3454 pairy = pairsList[1] #y es 1
3456 3455 center_xangle = 0
3457 3456 center_yangle = 0
3458 3457 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3459 3458 ntimes = len(range_angle)
3460 3459
3461 3460 nstepsx = 20
3462 3461 nstepsy = 20
3463 3462
3464 3463 for iz in range(ntimes):
3465 3464 min_xangle = -range_angle[iz]/2 + center_xangle
3466 3465 max_xangle = range_angle[iz]/2 + center_xangle
3467 3466 min_yangle = -range_angle[iz]/2 + center_yangle
3468 3467 max_yangle = range_angle[iz]/2 + center_yangle
3469 3468
3470 3469 inc_x = (max_xangle-min_xangle)/nstepsx
3471 3470 inc_y = (max_yangle-min_yangle)/nstepsy
3472 3471
3473 3472 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3474 3473 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3475 3474 penalty = numpy.zeros((nstepsx,nstepsy))
3476 3475 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3477 3476 jph = numpy.zeros(nchan)
3478 3477
3479 3478 # Iterations looking for the offset
3480 3479 for iy in range(int(nstepsy)):
3481 3480 for ix in range(int(nstepsx)):
3482 3481 d3 = d[pairsList[1][0]]
3483 3482 d2 = d[pairsList[1][1]]
3484 3483 d5 = d[pairsList[0][0]]
3485 3484 d4 = d[pairsList[0][1]]
3486 3485
3487 3486 alp2 = alpha_y[iy] #gamma 1
3488 3487 alp4 = alpha_x[ix] #gamma 0
3489 3488
3490 3489 alp3 = -alp2*d3/d2 - gammas[1]
3491 3490 alp5 = -alp4*d5/d4 - gammas[0]
3492 3491 # jph[pairy[1]] = alpha_y[iy]
3493 3492 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3494 3493
3495 3494 # jph[pairx[1]] = alpha_x[ix]
3496 3495 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3497 3496 jph[pairsList[0][1]] = alp4
3498 3497 jph[pairsList[0][0]] = alp5
3499 3498 jph[pairsList[1][0]] = alp3
3500 3499 jph[pairsList[1][1]] = alp2
3501 3500 jph_array[:,ix,iy] = jph
3502 3501 # d = [2.0,2.5,2.5,2.0]
3503 3502 #falta chequear si va a leer bien los meteoros
3504 3503 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3505 3504 error = meteorsArray1[:,-1]
3506 3505 ind1 = numpy.where(error==0)[0]
3507 3506 penalty[ix,iy] = ind1.size
3508 3507
3509 3508 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3510 3509 phOffset = jph_array[:,i,j]
3511 3510
3512 3511 center_xangle = phOffset[pairx[1]]
3513 3512 center_yangle = phOffset[pairy[1]]
3514 3513
3515 3514 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3516 3515 phOffset = phOffset*180/numpy.pi
3517 3516 return phOffset
3518 3517
3519 3518
3520 3519 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3521 3520
3522 3521 dataOut.flagNoData = True
3523 3522 self.__dataReady = False
3524 3523 dataOut.outputInterval = nHours*3600
3525 3524
3526 3525 if self.__isConfig == False:
3527 3526 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3528 3527 #Get Initial LTC time
3529 3528 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3530 3529 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3531 3530
3532 3531 self.__isConfig = True
3533 3532
3534 3533 if self.__buffer is None:
3535 3534 self.__buffer = dataOut.data_param.copy()
3536 3535
3537 3536 else:
3538 3537 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3539 3538
3540 3539 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3541 3540
3542 3541 if self.__dataReady:
3543 3542 dataOut.utctimeInit = self.__initime
3544 3543 self.__initime += dataOut.outputInterval #to erase time offset
3545 3544
3546 3545 freq = dataOut.frequency
3547 3546 c = dataOut.C #m/s
3548 3547 lamb = c/freq
3549 3548 k = 2*numpy.pi/lamb
3550 3549 azimuth = 0
3551 3550 h = (hmin, hmax)
3552 3551 # pairs = ((0,1),(2,3)) #Estrella
3553 3552 # pairs = ((1,0),(2,3)) #T
3554 3553
3555 3554 if channelPositions is None:
3556 3555 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3557 3556 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3558 3557 meteorOps = SMOperations()
3559 3558 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3560 3559
3561 3560 #Checking correct order of pairs
3562 3561 pairs = []
3563 3562 if distances[1] > distances[0]:
3564 3563 pairs.append((1,0))
3565 3564 else:
3566 3565 pairs.append((0,1))
3567 3566
3568 3567 if distances[3] > distances[2]:
3569 3568 pairs.append((3,2))
3570 3569 else:
3571 3570 pairs.append((2,3))
3572 3571 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3573 3572
3574 3573 meteorsArray = self.__buffer
3575 3574 error = meteorsArray[:,-1]
3576 3575 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3577 3576 ind1 = numpy.where(boolError)[0]
3578 3577 meteorsArray = meteorsArray[ind1,:]
3579 3578 meteorsArray[:,-1] = 0
3580 3579 phases = meteorsArray[:,8:12]
3581 3580
3582 3581 #Calculate Gammas
3583 3582 gammas = self.__getGammas(pairs, distances, phases)
3584 3583 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3585 3584 #Calculate Phases
3586 3585 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3587 3586 phasesOff = phasesOff.reshape((1,phasesOff.size))
3588 3587 dataOut.data_output = -phasesOff
3589 3588 dataOut.flagNoData = False
3590 3589 self.__buffer = None
3591 3590
3592 3591
3593 3592 return
3594 3593
3595 3594 class SMOperations():
3596 3595
3597 3596 def __init__(self):
3598 3597
3599 3598 return
3600 3599
3601 3600 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3602 3601
3603 3602 arrayParameters = arrayParameters0.copy()
3604 3603 hmin = h[0]
3605 3604 hmax = h[1]
3606 3605
3607 3606 #Calculate AOA (Error N 3, 4)
3608 3607 #JONES ET AL. 1998
3609 3608 AOAthresh = numpy.pi/8
3610 3609 error = arrayParameters[:,-1]
3611 3610 phases = -arrayParameters[:,8:12] + jph
3612 3611 # phases = numpy.unwrap(phases)
3613 3612 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3614 3613
3615 3614 #Calculate Heights (Error N 13 and 14)
3616 3615 error = arrayParameters[:,-1]
3617 3616 Ranges = arrayParameters[:,1]
3618 3617 zenith = arrayParameters[:,4]
3619 3618 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3620 3619
3621 3620 #----------------------- Get Final data ------------------------------------
3622 3621 # error = arrayParameters[:,-1]
3623 3622 # ind1 = numpy.where(error==0)[0]
3624 3623 # arrayParameters = arrayParameters[ind1,:]
3625 3624
3626 3625 return arrayParameters
3627 3626
3628 3627 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3629 3628
3630 3629 arrayAOA = numpy.zeros((phases.shape[0],3))
3631 3630 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3632 3631
3633 3632 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3634 3633 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3635 3634 arrayAOA[:,2] = cosDirError
3636 3635
3637 3636 azimuthAngle = arrayAOA[:,0]
3638 3637 zenithAngle = arrayAOA[:,1]
3639 3638
3640 3639 #Setting Error
3641 3640 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3642 3641 error[indError] = 0
3643 3642 #Number 3: AOA not fesible
3644 3643 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3645 3644 error[indInvalid] = 3
3646 3645 #Number 4: Large difference in AOAs obtained from different antenna baselines
3647 3646 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3648 3647 error[indInvalid] = 4
3649 3648 return arrayAOA, error
3650 3649
3651 3650 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3652 3651
3653 3652 #Initializing some variables
3654 3653 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3655 3654 ang_aux = ang_aux.reshape(1,ang_aux.size)
3656 3655
3657 3656 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3658 3657 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3659 3658
3660 3659
3661 3660 for i in range(2):
3662 3661 ph0 = arrayPhase[:,pairsList[i][0]]
3663 3662 ph1 = arrayPhase[:,pairsList[i][1]]
3664 3663 d0 = distances[pairsList[i][0]]
3665 3664 d1 = distances[pairsList[i][1]]
3666 3665
3667 3666 ph0_aux = ph0 + ph1
3668 3667 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3669 3668 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3670 3669 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3671 3670 #First Estimation
3672 3671 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3673 3672
3674 3673 #Most-Accurate Second Estimation
3675 3674 phi1_aux = ph0 - ph1
3676 3675 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3677 3676 #Direction Cosine 1
3678 3677 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3679 3678
3680 3679 #Searching the correct Direction Cosine
3681 3680 cosdir0_aux = cosdir0[:,i]
3682 3681 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3683 3682 #Minimum Distance
3684 3683 cosDiff = (cosdir1 - cosdir0_aux)**2
3685 3684 indcos = cosDiff.argmin(axis = 1)
3686 3685 #Saving Value obtained
3687 3686 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3688 3687
3689 3688 return cosdir0, cosdir
3690 3689
3691 3690 def __calculateAOA(self, cosdir, azimuth):
3692 3691 cosdirX = cosdir[:,0]
3693 3692 cosdirY = cosdir[:,1]
3694 3693
3695 3694 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3696 3695 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3697 3696 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3698 3697
3699 3698 return angles
3700 3699
3701 3700 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3702 3701
3703 3702 Ramb = 375 #Ramb = c/(2*PRF)
3704 3703 Re = 6371 #Earth Radius
3705 3704 heights = numpy.zeros(Ranges.shape)
3706 3705
3707 3706 R_aux = numpy.array([0,1,2])*Ramb
3708 3707 R_aux = R_aux.reshape(1,R_aux.size)
3709 3708
3710 3709 Ranges = Ranges.reshape(Ranges.size,1)
3711 3710
3712 3711 Ri = Ranges + R_aux
3713 3712 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3714 3713
3715 3714 #Check if there is a height between 70 and 110 km
3716 3715 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3717 3716 ind_h = numpy.where(h_bool == 1)[0]
3718 3717
3719 3718 hCorr = hi[ind_h, :]
3720 3719 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3721 3720
3722 3721 hCorr = hi[ind_hCorr][:len(ind_h)]
3723 3722 heights[ind_h] = hCorr
3724 3723
3725 3724 #Setting Error
3726 3725 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3727 3726 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3728 3727 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3729 3728 error[indError] = 0
3730 3729 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3731 3730 error[indInvalid2] = 14
3732 3731 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3733 3732 error[indInvalid1] = 13
3734 3733
3735 3734 return heights, error
3736 3735
3737 3736 def getPhasePairs(self, channelPositions):
3738 3737 chanPos = numpy.array(channelPositions)
3739 3738 listOper = list(itertools.combinations(list(range(5)),2))
3740 3739
3741 3740 distances = numpy.zeros(4)
3742 3741 axisX = []
3743 3742 axisY = []
3744 3743 distX = numpy.zeros(3)
3745 3744 distY = numpy.zeros(3)
3746 3745 ix = 0
3747 3746 iy = 0
3748 3747
3749 3748 pairX = numpy.zeros((2,2))
3750 3749 pairY = numpy.zeros((2,2))
3751 3750
3752 3751 for i in range(len(listOper)):
3753 3752 pairi = listOper[i]
3754 3753
3755 3754 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3756 3755
3757 3756 if posDif[0] == 0:
3758 3757 axisY.append(pairi)
3759 3758 distY[iy] = posDif[1]
3760 3759 iy += 1
3761 3760 elif posDif[1] == 0:
3762 3761 axisX.append(pairi)
3763 3762 distX[ix] = posDif[0]
3764 3763 ix += 1
3765 3764
3766 3765 for i in range(2):
3767 3766 if i==0:
3768 3767 dist0 = distX
3769 3768 axis0 = axisX
3770 3769 else:
3771 3770 dist0 = distY
3772 3771 axis0 = axisY
3773 3772
3774 3773 side = numpy.argsort(dist0)[:-1]
3775 3774 axis0 = numpy.array(axis0)[side,:]
3776 3775 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3777 3776 axis1 = numpy.unique(numpy.reshape(axis0,4))
3778 3777 side = axis1[axis1 != chanC]
3779 3778 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3780 3779 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3781 3780 if diff1<0:
3782 3781 chan2 = side[0]
3783 3782 d2 = numpy.abs(diff1)
3784 3783 chan1 = side[1]
3785 3784 d1 = numpy.abs(diff2)
3786 3785 else:
3787 3786 chan2 = side[1]
3788 3787 d2 = numpy.abs(diff2)
3789 3788 chan1 = side[0]
3790 3789 d1 = numpy.abs(diff1)
3791 3790
3792 3791 if i==0:
3793 3792 chanCX = chanC
3794 3793 chan1X = chan1
3795 3794 chan2X = chan2
3796 3795 distances[0:2] = numpy.array([d1,d2])
3797 3796 else:
3798 3797 chanCY = chanC
3799 3798 chan1Y = chan1
3800 3799 chan2Y = chan2
3801 3800 distances[2:4] = numpy.array([d1,d2])
3802 3801 # axisXsides = numpy.reshape(axisX[ix,:],4)
3803 3802 #
3804 3803 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3805 3804 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3806 3805 #
3807 3806 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3808 3807 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3809 3808 # channel25X = int(pairX[0,ind25X])
3810 3809 # channel20X = int(pairX[1,ind20X])
3811 3810 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3812 3811 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3813 3812 # channel25Y = int(pairY[0,ind25Y])
3814 3813 # channel20Y = int(pairY[1,ind20Y])
3815 3814
3816 3815 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3817 3816 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3818 3817
3819 3818 return pairslist, distances
3820 3819 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3821 3820 #
3822 3821 # arrayAOA = numpy.zeros((phases.shape[0],3))
3823 3822 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3824 3823 #
3825 3824 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3826 3825 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3827 3826 # arrayAOA[:,2] = cosDirError
3828 3827 #
3829 3828 # azimuthAngle = arrayAOA[:,0]
3830 3829 # zenithAngle = arrayAOA[:,1]
3831 3830 #
3832 3831 # #Setting Error
3833 3832 # #Number 3: AOA not fesible
3834 3833 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3835 3834 # error[indInvalid] = 3
3836 3835 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3837 3836 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3838 3837 # error[indInvalid] = 4
3839 3838 # return arrayAOA, error
3840 3839 #
3841 3840 # def __getDirectionCosines(self, arrayPhase, pairsList):
3842 3841 #
3843 3842 # #Initializing some variables
3844 3843 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3845 3844 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3846 3845 #
3847 3846 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3848 3847 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3849 3848 #
3850 3849 #
3851 3850 # for i in range(2):
3852 3851 # #First Estimation
3853 3852 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3854 3853 # #Dealias
3855 3854 # indcsi = numpy.where(phi0_aux > numpy.pi)
3856 3855 # phi0_aux[indcsi] -= 2*numpy.pi
3857 3856 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3858 3857 # phi0_aux[indcsi] += 2*numpy.pi
3859 3858 # #Direction Cosine 0
3860 3859 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3861 3860 #
3862 3861 # #Most-Accurate Second Estimation
3863 3862 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3864 3863 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3865 3864 # #Direction Cosine 1
3866 3865 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3867 3866 #
3868 3867 # #Searching the correct Direction Cosine
3869 3868 # cosdir0_aux = cosdir0[:,i]
3870 3869 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3871 3870 # #Minimum Distance
3872 3871 # cosDiff = (cosdir1 - cosdir0_aux)**2
3873 3872 # indcos = cosDiff.argmin(axis = 1)
3874 3873 # #Saving Value obtained
3875 3874 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3876 3875 #
3877 3876 # return cosdir0, cosdir
3878 3877 #
3879 3878 # def __calculateAOA(self, cosdir, azimuth):
3880 3879 # cosdirX = cosdir[:,0]
3881 3880 # cosdirY = cosdir[:,1]
3882 3881 #
3883 3882 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3884 3883 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3885 3884 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3886 3885 #
3887 3886 # return angles
3888 3887 #
3889 3888 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3890 3889 #
3891 3890 # Ramb = 375 #Ramb = c/(2*PRF)
3892 3891 # Re = 6371 #Earth Radius
3893 3892 # heights = numpy.zeros(Ranges.shape)
3894 3893 #
3895 3894 # R_aux = numpy.array([0,1,2])*Ramb
3896 3895 # R_aux = R_aux.reshape(1,R_aux.size)
3897 3896 #
3898 3897 # Ranges = Ranges.reshape(Ranges.size,1)
3899 3898 #
3900 3899 # Ri = Ranges + R_aux
3901 3900 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3902 3901 #
3903 3902 # #Check if there is a height between 70 and 110 km
3904 3903 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3905 3904 # ind_h = numpy.where(h_bool == 1)[0]
3906 3905 #
3907 3906 # hCorr = hi[ind_h, :]
3908 3907 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3909 3908 #
3910 3909 # hCorr = hi[ind_hCorr]
3911 3910 # heights[ind_h] = hCorr
3912 3911 #
3913 3912 # #Setting Error
3914 3913 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3915 3914 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3916 3915 #
3917 3916 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3918 3917 # error[indInvalid2] = 14
3919 3918 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3920 3919 # error[indInvalid1] = 13
3921 3920 #
3922 3921 # return heights, error
3923 3922
3924 3923
3925 3924 class WeatherRadar(Operation):
3926 3925 '''
3927 3926 Function tat implements Weather Radar operations-
3928 3927 Input:
3929 3928 Output:
3930 3929 Parameters affected:
3931 3930
3932 3931 Conversion Watt
3933 3932 Referencia
3934 3933 https://www.tek.com/en/blog/calculating-rf-power-iq-samples
3935 3934
3936 3935 data_param = (nCh, 8, nHeis)
3937 3936 S, V, W, SNR, Z, D, P, R
3938 3937 Power, Velocity, Spectral width, SNR, Reflectivity, Differential reflectivity, PHI DP, RHO HV
3939 3938 '''
3940 3939 isConfig = False
3941 3940 variableList = None
3942 3941
3943 3942 def __init__(self):
3944 3943 Operation.__init__(self)
3945 3944
3946 3945 def setup(self,dataOut,variableList= None,Pt=0,Gt=0,Gr=0,Glna=0,lambda_=0, aL=0,
3947 3946 tauW= 0,thetaT=0,thetaR=0,Km =0,CR_Flag=False,min_index=0,sesgoZD=0):
3948 3947
3949 3948 self.nCh = dataOut.nChannels
3950 3949 self.nHeis = dataOut.nHeights
3951 3950 self.min_index= min_index
3952 3951 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
3953 3952 #self.Range = numpy.arange(dataOut.nHeights)*deltaHeight + dataOut.heightList[0]+min_index*deltaHeight
3954 3953 self.Range = dataOut.heightList
3955 3954 self.Range = self.Range.reshape(1,self.nHeis)
3956 3955 self.Range = numpy.tile(self.Range,[self.nCh,1])
3957 3956 '''-----------1 Constante del Radar----------'''
3958 3957 self.Pt = Pt # Pmax =200 W x DC=(0.2 useg/400useg)
3959 3958 self.Gt = Gt # 38 db
3960 3959 self.Gr = Gr # 38 dB
3961 3960 self.Glna = Glna # 60 dB
3962 3961 self.lambda_ = lambda_ # 3.2 cm 0.032 m.
3963 3962 self.aL = aL # Perdidas
3964 3963 self.tauW = tauW #ancho de pulso 0.2useg pulso corto.
3965 3964 self.thetaT = thetaT # 1.8ΒΊ -- 0.0314 rad
3966 3965 self.thetaR = thetaR # 1.8Βͺ --0.0314 rad
3967 3966 self.Km = Km
3968 3967 self.CR_Flag = CR_Flag
3969 3968 self.sesgoZD = sesgoZD
3970 3969 Numerator = ((4*numpy.pi)**3 * aL**2 * 16 *numpy.log(2)*(10**18))
3971 3970 Denominator = (Pt *(10**(Gt/10.0))*(10**(Gr/10.0))*(10**(Glna/10.0))* lambda_**2 * SPEED_OF_LIGHT * tauW * numpy.pi*thetaT*thetaR)
3972 3971 self.RadarConstant = Numerator/Denominator
3973 3972 self.variableList = variableList
3974 3973 if self.variableList== None:
3975 3974 self.variableList= ['Z','D','R','P']
3976 3975
3977 3976 def setMoments(self, dataOut):
3978 3977 # S, V, W, SNR, Z, D, P, R
3979 3978 type = dataOut.inputUnit
3980 3979 nCh = dataOut.nChannels
3981 3980 nHeis = dataOut.nHeights
3982 3981 data_param = numpy.zeros((nCh, 8, nHeis))
3983 3982 if type == "Voltage":
3984 3983 factor = 1
3985 3984 data_param[:,0,:] = dataOut.dataPP_POW/(factor)#dataOut.dataPP_POWER/(factor)
3986 3985 data_param[:,1,:] = dataOut.dataPP_DOP
3987 3986 data_param[:,2,:] = dataOut.dataPP_WIDTH
3988 3987 data_param[:,3,:] = dataOut.dataPP_SNR
3988
3989 3989 if type == "Spectra":
3990 3990 factor = dataOut.normFactor
3991 3991 data_param[:,0,:] = dataOut.data_pow/(factor)
3992 3992 data_param[:,1,:] = dataOut.data_dop
3993 3993 data_param[:,2,:] = dataOut.data_width
3994 3994 data_param[:,3,:] = dataOut.data_snr
3995
3995 3996 return data_param
3996 3997
3997 3998 def getCoeficienteCorrelacionROhv_R(self,dataOut):
3998 3999 type = dataOut.inputUnit
3999 4000 nHeis = dataOut.nHeights
4000 4001 data_RhoHV_R = numpy.zeros((nHeis))
4001 4002 if type == "Voltage":
4002 4003 avgcoherenceComplex= dataOut.dataPP_CCF
4003 4004 data_RhoHV_R = numpy.abs(avgcoherenceComplex)
4004 4005 if type == "Spectra":
4005 4006 data_RhoHV_R = dataOut.getCoherence()
4006 4007
4007 4008 return data_RhoHV_R
4008 4009
4009 4010 def getFasediferencialPhiD_P(self,dataOut,phase= True):
4010 4011 type = dataOut.inputUnit
4011 4012 nHeis = dataOut.nHeights
4012 4013 data_PhiD_P = numpy.zeros((nHeis))
4013 4014 if type == "Voltage":
4014 4015 avgcoherenceComplex= dataOut.dataPP_CCF
4015 4016 if phase:
4016 4017 data_PhiD_P = numpy.arctan2(avgcoherenceComplex.imag,
4017 4018 avgcoherenceComplex.real) * 180 / numpy.pi
4018 4019 if type == "Spectra":
4019 4020 data_PhiD_P = dataOut.getCoherence(phase = phase)
4020 4021
4021 4022 return data_PhiD_P
4022 4023
4023 4024 def getReflectividad_D(self,dataOut,type):
4024 4025 '''-----------------------------Potencia de Radar -Signal S-----------------------------'''
4025 4026
4026 4027 Pr = dataOut.data_param[:,0,:]
4028
4027 4029 '''---------------------------- Calculo de Noise y threshold para Reflectividad---------'''
4028 4030
4029 4031 Pr = Pr/100.0 # Conversion Watt
4030 4032 '''-----------2 Reflectividad del Radar y Factor de Reflectividad------'''
4031 4033 if not self.CR_Flag:
4032 4034 self.n_radar = numpy.zeros((self.nCh,self.nHeis))
4033 4035 self.Z_radar = numpy.zeros((self.nCh,self.nHeis))
4034 4036 for R in range(self.nHeis):
4035 4037 self.n_radar[:,R] = self.RadarConstant*Pr[:,R]* (self.Range[:,R]*(10**3))**2
4036 4038
4037 4039 self.Z_radar[:,R] = self.n_radar[:,R]* self.lambda_**4/( numpy.pi**5 * self.Km**2)
4038 4040
4039 4041 '''----------- Factor de Reflectividad Equivalente lamda_ < 10 cm , lamda_= 3.2cm-------'''
4040 4042 Zeh = self.Z_radar
4041 4043
4042 4044 if self.Pt<0.3:
4043 4045 factor=1
4044 4046 else:
4045 4047 factor=28#23.072
4046 4048
4047 4049 dBZeh = 10*numpy.log10(Zeh) + factor
4048 4050 else:
4049 4051 self.Z_radar = numpy.zeros((self.nCh,self.nHeis))
4050 4052
4051 4053 for R in range(self.nHeis):
4052 4054 self.Z_radar[0,R]= 10*numpy.log10(Pr[0,R])+20*numpy.log10(self.Range[0,R]*10**3)+67.41-10*numpy.log10(self.Pt)-59-10*numpy.log10(self.tauW)#63.58,65.26,68.91
4053 4055 #self.Z_radar[1,R]= 10*numpy.log10(Pr[1,R])+20*numpy.log10(self.Range[1,R]*10**3)+67.17-10*numpy.log10(self.Pt)-59-10*numpy.log10(self.tauW)#64.26,65.79,62.33
4054 4056 dBZeh= self.Z_radar
4055 4057
4056 4058 if type=='N':
4057 4059 return dBZeh
4058 4060 elif type=='D':
4059 4061 Zdb_D = dBZeh[0] - dBZeh[1]- self.sesgoZD
4060 4062 return Zdb_D
4061 4063
4062 4064 def getRadialVelocity_V(self,dataOut):
4063 4065 velRadial_V = dataOut.data_param[:,1,:]
4064 4066 return velRadial_V
4065 4067
4066 4068 def getAnchoEspectral_W(self,dataOut):
4067 4069 Sigmav_W = dataOut.data_param[:,2,:]
4068 4070 return Sigmav_W
4069 4071
4070 4072
4071 4073 def run(self,dataOut,variableList=None,Pt=1.58,Gt=38.5,Gr=38.5,Glna=59.0,lambda_=0.032, aL=1,
4072 4074 tauW= 0.2,thetaT=0.0314,thetaR=0.0314,Km =0.93,CR_Flag=0,min_index=0,sesgoZD=0):
4073 4075 if not self.isConfig:
4074 4076 self.setup(dataOut= dataOut, variableList=variableList,Pt=Pt,Gt=Gt,Gr=Gr,Glna=Glna,lambda_=lambda_, aL=aL,
4075 4077 tauW= tauW,thetaT=thetaT,thetaR=thetaR,Km =Km,CR_Flag=CR_Flag,min_index=min_index,sesgoZD=sesgoZD)
4076 4078 self.isConfig = True
4077 4079
4078 4080 dataOut.data_param = self.setMoments(dataOut)
4079 4081
4080 4082 for i in range(len(self.variableList)):
4081 4083 if self.variableList[i] == 'Z':
4082 4084 dataOut.data_param[:,4,:] = self.getReflectividad_D(dataOut=dataOut,type='N')
4083 4085 if self.variableList[i] == 'D' and dataOut.nChannels>1:
4084 4086 dataOut.data_param[:,5,:] = self.getReflectividad_D(dataOut=dataOut,type='D')
4085 4087 if self.variableList[i] == 'P' and dataOut.nChannels>1:
4086 4088 dataOut.data_param[:,6,:] = self.getFasediferencialPhiD_P(dataOut=dataOut, phase=True)
4087 4089 if self.variableList[i] == 'R' and dataOut.nChannels>1:
4088 4090 dataOut.data_param[:,7,:] = self.getCoeficienteCorrelacionROhv_R(dataOut)
4089 4091
4090 4092 return dataOut
4091 4093
4092 4094 class PedestalInformation(Operation):
4093 4095
4094 4096 def __init__(self):
4095 4097 Operation.__init__(self)
4096 4098 self.filename = False
4097 4099 self.delay = 32
4098 4100 self.nTries = 3
4099 4101 self.nFiles = 5
4100 4102 self.flagAskMode = False
4101 4103
4102 4104 def find_file(self, timestamp):
4103 4105
4104 4106 dt = datetime.datetime.utcfromtimestamp(timestamp)
4105 4107 path = os.path.join(self.path, dt.strftime('%Y-%m-%dT%H-00-00'))
4106 4108
4107 4109 if not os.path.exists(path):
4108 4110 return False
4109 4111 fileList = glob.glob(os.path.join(path, '*.h5'))
4110 4112 fileList.sort()
4111 4113 return fileList
4112 4114
4113 4115 def find_next_file(self):
4114 4116
4115 4117 while True:
4116 4118 if self.utctime < self.utcfile:
4117 4119 self.flagNoData = True
4118 4120 break
4119 4121 self.flagNoData = False
4120 4122 file_size = len(self.fp['Data']['utc'])
4121 4123 if self.utctime < self.utcfile+file_size*self.interval:
4122 4124 break
4123 4125 dt = datetime.datetime.utcfromtimestamp(self.utcfile)
4124 4126 if dt.second > 0:
4125 4127 self.utcfile -= dt.second
4126 4128 self.utcfile += self.samples*self.interval
4127 4129 dt = datetime.datetime.utcfromtimestamp(self.utcfile)
4128 4130 path = os.path.join(self.path, dt.strftime('%Y-%m-%dT%H-00-00'))
4129 4131 self.filename = os.path.join(path, 'pos@{}.000.h5'.format(int(self.utcfile)))
4130 4132
4131 4133 for i in range(self.nFiles):
4132 4134 ok = False
4133 4135 for j in range(self.nTries):
4134 4136 ok = False
4135 4137 try:
4136 4138 if not os.path.exists(self.filename):
4137 4139 log.warning('Waiting {}s for position files...'.format(self.delay), self.name)
4138 4140 time.sleep(1)
4139 4141 continue
4140 4142 self.fp.close()
4141 4143 self.fp = h5py.File(self.filename, 'r')
4142 4144 self.ele = self.fp['Data']['ele_pos'][:]
4143 4145 self.azi = self.fp['Data']['azi_pos'][:] + 26.27 #+ self.heading
4144 4146 self.azi[self.azi>360] = self.azi[self.azi>360] - 360
4145 4147 log.log('Opening file: {}'.format(self.filename), self.name)
4146 4148 ok = True
4147 4149 break
4148 4150 except Exception as e:
4149 4151 log.warning('Waiting {}s for position file to be ready...'.format(self.delay), self.name)
4150 4152 time.sleep(self.delay)
4151 4153 continue
4152 4154 if ok:
4153 4155 break
4154 4156 log.warning('Trying next file...', self.name)
4155 4157 self.utcfile += self.samples*self.interval
4156 4158 dt = datetime.datetime.utcfromtimestamp(self.utcfile)
4157 4159 path = os.path.join(self.path, dt.strftime('%Y-%m-%dT%H-00-00'))
4158 4160 self.filename = os.path.join(path, 'pos@{}.000.h5'.format(int(self.utcfile)))
4159 4161 if not ok:
4160 4162 log.error('No new position files found in {}'.format(path))
4161 4163 raise IOError('No new position files found in {}'.format(path))
4162 4164
4163 4165 def get_values(self):
4164 4166
4165 4167 if self.flagNoData:
4166 4168 return numpy.nan, numpy.nan, numpy.nan #Should be self.mode?
4167 4169 else:
4168 4170 index = int((self.utctime-self.utcfile)/self.interval)
4169 4171 try:
4170 4172 return self.azi[index], self.ele[index], None
4171 4173 except:
4172 4174 return numpy.nan, numpy.nan, numpy.nan
4173 4175
4174 4176 def setup(self, dataOut, path, conf, samples, interval, mode, heading):
4175 4177
4176 4178 self.path = path
4177 4179 self.conf = conf
4178 4180 self.samples = samples
4179 4181 self.interval = interval
4180 4182 self.mode = mode
4181 4183 self.heading = heading
4182 4184 if mode is None:
4183 4185 self.flagAskMode = True
4184 4186 N = 0
4185 4187 while True:
4186 4188 if N == self.nTries+1:
4187 4189 log.error('No position files found in {}'.format(path), self.name)
4188 4190 raise IOError('No position files found in {}'.format(path))
4189 4191 filelist = self.find_file(dataOut.utctime)
4190 4192
4191 4193 if filelist == 0:
4192 4194 N += 1
4193 4195 log.warning('Waiting {}s for position files...'.format(self.delay), self.name)
4194 4196 time.sleep(self.delay)
4195 4197 continue
4196 4198 self.filename = filelist[0]
4197 4199 try:
4198 4200 self.fp = h5py.File(self.filename, 'r')
4199 4201 self.utcfile = int(self.filename.split('/')[-1][4:14])
4200 4202
4201 4203 self.ele = self.fp['Data']['ele_pos'][:]
4202 4204 self.azi = self.fp['Data']['azi_pos'][:] + 26.27 + self.heading
4203 4205 self.azi[self.azi>360] = self.azi[self.azi>360] - 360
4204 4206 break
4205 4207 except:
4206 4208 log.warning('Waiting {}s for position file to be ready...'.format(self.delay), self.name)
4207 4209 time.sleep(self.delay)
4208 4210
4209 4211 def run(self, dataOut, path, conf=None, samples=1500, interval=0.04, time_offset=0, mode=None, heading=0):
4210 4212
4211 4213 if not self.isConfig:
4212 4214 self.setup(dataOut, path, conf, samples, interval, mode, heading)
4213 4215 self.isConfig = True
4214 4216
4215 4217 self.utctime = dataOut.utctime + time_offset
4216 4218
4217 4219 self.find_next_file()
4218 4220
4219 4221 az, el, scan = self.get_values()
4220 4222
4221 4223 dataOut.flagNoData = False
4222 4224 if numpy.isnan(az) or numpy.isnan(el) :
4223 4225 dataOut.flagNoData = True
4224 4226 return dataOut
4225 4227
4226 4228 dataOut.azimuth = round(az, 2)
4227 4229 dataOut.elevation = round(el, 2)
4228 4230 dataOut.mode_op = scan
4229 4231
4230 4232 return dataOut
4231 4233
4232 4234 class Block360(Operation):
4233 4235 '''
4234 4236 '''
4235 4237 isConfig = False
4236 4238 __profIndex = 0
4237 4239 __initime = None
4238 4240 __lastdatatime = None
4239 4241 __buffer = None
4240 4242 __dataReady = False
4241 4243 n = None
4242 4244 index = 0
4243 4245 mode = None
4244 4246
4245 4247 def __init__(self,**kwargs):
4246 4248 Operation.__init__(self,**kwargs)
4247 4249
4248 4250 def setup(self, dataOut, attr, angles,horario,heading,bottom):
4249 4251 '''
4250 4252 n= Numero de PRF's de entrada
4251 4253 '''
4252 4254 self.__initime = None
4253 4255 self.__lastdatatime = 0
4254 4256 self.__dataReady = False
4255 4257 self.__buffer = 0
4256 4258 self.index = 0
4257 4259 self.attr = attr
4258 4260 self.__buffer = []
4259 4261 self.azi = []
4260 4262 self.ele = []
4261 4263 self.__noise = []
4262 4264 self.angles = angles
4263 4265 self.horario= horario
4264 4266 self.heading = heading
4265 4267 self.bottom = bottom
4266 4268
4267 4269 def putData(self, data, attr):
4268 4270 '''
4269 4271 Add a profile to he __buffer and increase in one the __profiel Index
4270 4272 '''
4271 4273 tmp= getattr(data, attr)
4272 4274 self.__buffer.append(tmp)
4273 4275 self.azi.append(data.azimuth)
4274 4276 self.ele.append(data.elevation)
4275 self.__noise.append(data.dataPP_NOISE)
4277 try:
4278 #print("SHOW ------", type(data.dataPP_NOISE),data.dataPP_NOISE.shape,"value:",data.dataPP_NOISE)
4279 self.__noise.append(data.dataPP_NOISE)
4280 except:
4281 #print("SHOW ------", type(data.noise),data.noise.shape,"value:",data.noise)
4282 self.__noise.append(data.noise)
4276 4283 self.__profIndex += 1
4277 4284
4278 4285 def pushData(self, data, case_flag):
4279 4286 '''
4280 4287 '''
4281 4288
4282 4289 data_360 = numpy.array(self.__buffer).transpose(1, 2, 0, 3)
4283 4290 data_p = numpy.array(self.azi)
4284 4291 data_e = numpy.array(self.ele)
4285 4292 data_n = numpy.array(self.__noise)
4286 4293 n = self.__profIndex
4287 4294
4288 4295 self.__buffer = []
4289 4296 self.azi = []
4290 4297 self.ele = []
4291 4298 self.__noise = []
4292 4299 self.__profIndex = 0
4293 4300
4294 4301 if case_flag in (0, 1, -1):
4295 4302 self.putData(data=data, attr = self.attr)
4296 4303
4297 4304 return data_360, n, data_p, data_e, data_n
4298 4305
4299 4306 def byProfiles(self, dataOut):
4300 4307
4301 4308 self.__dataReady = False
4302 4309 data_360 = []
4303 4310 data_p = None
4304 4311 data_e = None
4305 4312 data_n = None
4306 4313
4307 4314 self.putData(data=dataOut, attr = self.attr)
4308 4315
4309 4316 if self.__profIndex > 5:
4310 4317 case_flag = self.checkcase()
4311 4318
4312 4319 if self.flagMode == 1: #'AZI':
4313 4320 if case_flag == 0: #Ya girΓ³
4314 4321 self.__buffer.pop() #Erase last data
4315 4322 self.azi.pop()
4316 4323 self.ele.pop()
4317 4324 data_360 ,n,data_p,data_e,data_n = self.pushData(dataOut, case_flag)
4318 4325 if len(data_p)>350:
4319 4326 self.__dataReady = True
4320 4327 elif self.flagMode == 0: #'ELE'
4321 4328 if case_flag == 1: #Bajada
4322 4329 self.__buffer.pop() #Erase last data
4323 4330 self.azi.pop()
4324 4331 self.ele.pop()
4325 4332 data_360, n, data_p, data_e, data_n = self.pushData(dataOut, case_flag)
4326 4333 self.__dataReady = True
4327 4334 if case_flag == -1: #Subida
4328 4335 self.__buffer.pop() #Erase last data
4329 4336 self.azi.pop()
4330 4337 self.ele.pop()
4331 4338 data_360, n, data_p, data_e, data_n = self.pushData(dataOut, case_flag)
4332 4339 #self.__dataReady = True
4333 4340
4334 4341 return data_360, data_p, data_e, data_n
4335 4342
4336 4343
4337 4344 def blockOp(self, dataOut, datatime= None):
4338 4345 if self.__initime == None:
4339 4346 self.__initime = datatime
4340 4347 data_360, data_p, data_e, data_n = self.byProfiles(dataOut)
4341 4348 self.__lastdatatime = datatime
4342 4349
4343 4350 avgdatatime = self.__initime
4344 4351 if self.n==1:
4345 4352 avgdatatime = datatime
4346 4353
4347 4354 self.__initime = datatime
4348 4355 return data_360, avgdatatime, data_p, data_e, data_n
4349 4356
4350 4357 def checkcase(self):
4351 4358
4352 4359 sigma_ele = numpy.nanstd(self.ele[-5:])
4353 4360 sigma_azi = numpy.nanstd(self.azi[-5:])
4354 4361
4355 4362 if sigma_ele<.5 and sigma_azi<.5:
4356 4363 if sigma_ele<sigma_azi:
4357 4364 self.flagMode = 1
4358 4365 self.mode_op = 'PPI'
4359 4366 else:
4360 4367 self.flagMode = 0
4361 4368 self.mode_op = 'RHI'
4362 4369 elif sigma_ele < .5:
4363 4370 self.flagMode = 1
4364 4371 self.mode_op = 'PPI'
4365 4372 elif sigma_azi < .5:
4366 4373 self.flagMode = 0
4367 4374 self.mode_op = 'RHI'
4368 4375 else:
4369 4376 self.flagMode = None
4370 4377 self.mode_op = 'None'
4371 4378
4372 4379 if self.flagMode == 1: #'AZI'
4373 4380 start = self.azi[-2]
4374 4381 end = self.azi[-1]
4375 4382 diff_angle = (end-start)
4376 4383 if self.horario== True:
4377 4384 if diff_angle < 0: #Ya girΓ³
4378 4385 return 0
4379 4386 else:
4380 4387 if diff_angle > 0: #Ya girΓ³
4381 4388 return 0
4382 4389 elif self.flagMode == 0: #'ELE'
4383 4390
4384 4391 start = self.ele[-3]
4385 4392 middle = self.ele[-2]
4386 4393 end = self.ele[-1]
4387 4394
4388 4395 if end < self.bottom:
4389 4396 return 1
4390 4397 elif (middle>start and end<middle):
4391 4398 return -1
4392 4399
4393 4400 def run(self, dataOut, attr_data='dataPP_POWER', runNextOp = False, angles=[],horario=True,heading=0,bottom=0,**kwargs):
4394 4401
4395 4402 dataOut.attr_data = attr_data
4396 4403 dataOut.runNextOp = runNextOp
4397 4404
4398 4405 if not self.isConfig:
4399 4406 self.setup(dataOut=dataOut, attr=attr_data, angles=angles,horario=horario, heading=heading,bottom=bottom,**kwargs)
4400 4407 self.isConfig = True
4401 4408
4402 4409 data_360, avgdatatime, data_p, data_e, data_n = self.blockOp(dataOut, dataOut.utctime)
4403 4410
4404 4411 dataOut.flagNoData = True
4405 4412 if self.__dataReady:
4406 4413 mean_az = numpy.mean(data_p[25:-25])
4407 4414 mean_el = numpy.mean(data_e[25:-25])
4408 4415 if round(mean_az,1) in angles or round(mean_el,1) in angles:
4409 4416 setattr(dataOut, attr_data, data_360 )
4410 4417 dataOut.data_azi = data_p+self.heading #dataOut.data_azi = data_p
4411 4418 dataOut.data_azi[dataOut.data_azi>360]=dataOut.data_azi[dataOut.data_azi>360]-360 #update new
4412 4419 dataOut.data_ele = data_e
4413 4420 dataOut.utctime = avgdatatime
4414 4421 dataOut.data_noise = data_n
4415 4422 dataOut.flagNoData = False
4416 4423 dataOut.flagMode = self.flagMode
4417 4424 dataOut.mode_op = self.mode_op
4418 4425 else:
4419 4426 log.warning('Skipping angle {} / {}'.format(round(mean_az,1), round(mean_el,1)))
4420 4427
4421 4428 return dataOut
4422 4429
4423 4430 class MergeProc(ProcessingUnit):
4424 4431
4425 4432 def __init__(self):
4426 4433 ProcessingUnit.__init__(self)
4427 4434
4428 4435 def run(self, attr_data, mode=0, index=0):
4429 4436
4430 4437 #exit(1)
4431 4438 self.dataOut = getattr(self, self.inputs[0])
4432 4439 data_inputs = [getattr(self, attr) for attr in self.inputs]
4433 4440 #print(data_inputs)
4434 4441 #print(numpy.shape([getattr(data, attr_data) for data in data_inputs][1]))
4435 4442 #exit(1)
4436 4443 if mode==0:
4437 4444 data = numpy.concatenate([getattr(data, attr_data) for data in data_inputs])
4438 4445 setattr(self.dataOut, attr_data, data)
4439 4446
4440 4447 if mode==1: #Hybrid
4441 4448 #data = numpy.concatenate([getattr(data, attr_data) for data in data_inputs],axis=1)
4442 4449 #setattr(self.dataOut, attr_data, data)
4443 4450 setattr(self.dataOut, 'dataLag_spc', [getattr(data, attr_data) for data in data_inputs][0])
4444 4451 setattr(self.dataOut, 'dataLag_spc_LP', [getattr(data, attr_data) for data in data_inputs][1])
4445 4452 setattr(self.dataOut, 'dataLag_cspc', [getattr(data, attr_data_2) for data in data_inputs][0])
4446 4453 setattr(self.dataOut, 'dataLag_cspc_LP', [getattr(data, attr_data_2) for data in data_inputs][1])
4447 4454 #setattr(self.dataOut, 'nIncohInt', [getattr(data, attr_data_3) for data in data_inputs][0])
4448 4455 #setattr(self.dataOut, 'nIncohInt_LP', [getattr(data, attr_data_3) for data in data_inputs][1])
4449 4456 '''
4450 4457 print(self.dataOut.dataLag_spc_LP.shape)
4451 4458 print(self.dataOut.dataLag_cspc_LP.shape)
4452 4459 exit(1)
4453 4460 '''
4454 4461
4455 4462 #self.dataOut.dataLag_spc_LP = numpy.transpose(self.dataOut.dataLag_spc_LP[0],(2,0,1))
4456 4463 #self.dataOut.dataLag_cspc_LP = numpy.transpose(self.dataOut.dataLag_cspc_LP,(3,1,2,0))
4457 4464 '''
4458 4465 print("Merge")
4459 4466 print(numpy.shape(self.dataOut.dataLag_spc))
4460 4467 print(numpy.shape(self.dataOut.dataLag_spc_LP))
4461 4468 print(numpy.shape(self.dataOut.dataLag_cspc))
4462 4469 print(numpy.shape(self.dataOut.dataLag_cspc_LP))
4463 4470 exit(1)
4464 4471 '''
4465 4472 #print(numpy.sum(self.dataOut.dataLag_spc_LP[2,:,164])/128)
4466 4473 #print(numpy.sum(self.dataOut.dataLag_cspc_LP[0,:,30,1])/128)
4467 4474 #exit(1)
4468 4475 #print(self.dataOut.NDP)
4469 4476 #print(self.dataOut.nNoiseProfiles)
4470 4477
4471 4478 #self.dataOut.nIncohInt_LP = 128
4472 4479 self.dataOut.nProfiles_LP = 128#self.dataOut.nIncohInt_LP
4473 4480 self.dataOut.nIncohInt_LP = self.dataOut.nIncohInt
4474 4481 self.dataOut.NLAG = 16
4475 4482 self.dataOut.NRANGE = 200
4476 4483 self.dataOut.NSCAN = 128
4477 4484 #print(numpy.shape(self.dataOut.data_spc))
4478 4485
4479 4486 if mode==2: #HAE 2022
4480 4487 data = numpy.sum([getattr(data, attr_data) for data in data_inputs],axis=0)
4481 4488 setattr(self.dataOut, attr_data, data)
4482 4489
4483 4490 self.dataOut.nIncohInt *= 2
4484 4491 self.dataOut.freqRange = self.dataOut.getFreqRange(1)/1000.
4485 4492
4486 4493 if mode==7: #RM
4487 4494
4488 4495 f = [getattr(data, attr_data) for data in data_inputs][0][:,:,:,0:index]
4489 4496 g = [getattr(data, attr_data) for data in data_inputs][1][:,:,:,index:]
4490 4497 data = numpy.concatenate((f,g), axis=3)
4491 4498 setattr(self.dataOut, attr_data, data)
4492 4499
4493 4500 # snr
4494 4501 # self.dataOut.data_snr = numpy.concatenate((data_inputs[0].data_snr, data_inputs[1].data_snr), axis=2)
4495 4502
4496 4503 # ranges
4497 4504 # dh = self.dataOut.heightList[1]-self.dataOut.heightList[0]
4498 4505 # heightList_2 = (self.dataOut.heightList[-1]+dh) + numpy.arange(g.shape[-1], dtype=numpy.float) * dh
4499 4506
4500 # self.dataOut.heightList = numpy.concatenate((self.dataOut.heightList,heightList_2)) No newline at end of file
4507 # self.dataOut.heightList = numpy.concatenate((self.dataOut.heightList,heightList_2))
General Comments 0
You need to be logged in to leave comments. Login now