##// END OF EJS Templates
19 DE AGOSTO 2021 RM
avaldez -
r1367:42e9a23049f5
parent child
Show More
@@ -0,0 +1,31
1 import os,sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5 path='/DATA_RM/TEST_HDF5'
6 path_adq=path
7 path_ped='/DATA_RM/TEST_PEDESTAL/P2021200'
8 figpath = '/home/soporte/Pictures'
9 desc = "Simulator Test"
10
11 controllerObj = Project()
12 controllerObj.setup(id='10',name='Test Simulator',description=desc)
13 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
14 path=path,
15 startDate="2021/01/01", #"2020/01/01",#today,
16 endDate= "2021/12/01", #"2020/12/30",#today,
17 startTime='00:00:00',
18 endTime='23:59:59',
19 t_Interval_p=0.01,
20 n_Muestras_p=100,
21 delay=5,
22 #set=0,
23 online=0,
24 walk=1)#1
25
26 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
27
28
29 controllerObj.start()
30 #online 1 utc_adq 1617490240.48
31 #online 0 utc_adq 1617489815.4804
@@ -0,0 +1,92
1 import numpy
2 import sys
3 import zmq
4 import time
5 import h5py
6 import os
7
8 timetuple=time.localtime()
9 meta='P'
10 dir="%s%4.4d%3.3d"%(meta,timetuple.tm_year,timetuple.tm_yday)
11
12 path="/home/soporte/Downloads/PEDESTAL/"+dir
13
14 ext=".hdf5"
15
16 port ="5556"
17 if len(sys.argv)>1:
18 port = sys.argv[1]
19 int(port)
20
21 if len(sys.argv)>2:
22 port1 = sys.argv[2]
23 int(port1)
24
25 #Socket to talk to server
26 context = zmq.Context()
27 socket = context.socket(zmq.SUB)
28
29 print("Collecting updates from weather server...")
30 socket.connect("tcp://localhost:%s"%port)
31
32 if len(sys.argv)>2:
33 socket.connect("tcp://localhost:%s"%port1)
34
35 #Subscribe to zipcode, default is NYC,10001
36 topicfilter = "10001"
37 socket.setsockopt_string(zmq.SUBSCRIBE,topicfilter)
38 #Process 5 updates
39 total_value=0
40 count= -1
41 azi= []
42 elev=[]
43 time0=[]
44 #for update_nbr in range(250):
45 while(True):
46 string= socket.recv()
47 topic,ang_elev,ang_elev_dec,ang_azi,ang_azi_dec,seconds,seconds_dec= string.split()
48 ang_azi =float(ang_azi)+1e-3*float(ang_azi_dec)
49 ang_elev =float(ang_elev)+1e-3*float(ang_elev_dec)
50 seconds =float(seconds) +1e-6*float(seconds_dec)
51 azi.append(ang_azi)
52 elev.append(ang_elev)
53 time0.append(seconds)
54 count +=1
55 if count == 100:
56 timetuple=time.localtime()
57 epoc = time.mktime(timetuple)
58 #print(epoc)
59 fullpath = path + ("/" if path[-1]!="/" else "")
60
61 if not os.path.exists(fullpath):
62 os.mkdir(fullpath)
63
64 azi_array = numpy.array(azi)
65 elev_array = numpy.array(elev)
66 time0_array= numpy.array(time0)
67 pedestal_array=numpy.array([azi,elev,time0])
68 count=0
69 azi= []
70 elev=[]
71 time0=[]
72 #print(pedestal_array[0])
73 #print(pedestal_array[1])
74
75 meta='PE'
76 filex="%s%4.4d%3.3d%10.4d%s"%(meta,timetuple.tm_year,timetuple.tm_yday,epoc,ext)
77 filename = os.path.join(fullpath,filex)
78 fp = h5py.File(filename,'w')
79 #print("Escribiendo HDF5...",epoc)
80 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· DataΒ·....Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
81 grp = fp.create_group("Data")
82 dset = grp.create_dataset("azimuth" , data=pedestal_array[0])
83 dset = grp.create_dataset("elevacion", data=pedestal_array[1])
84 dset = grp.create_dataset("utc" , data=pedestal_array[2])
85 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· MetadataΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
86 grp = fp.create_group("Metadata")
87 dset = grp.create_dataset("utctimeInit", data=pedestal_array[2][0])
88 timeInterval = pedestal_array[2][1]-pedestal_array[2][0]
89 dset = grp.create_dataset("timeInterval", data=timeInterval)
90 fp.close()
91
92 #print ("Average messagedata value for topic '%s' was %dF" % ( topicfilter,total_value / update_nbr))
@@ -0,0 +1,48
1 ###########################################################################
2 ############################### SERVIDOR###################################
3 ######################### SIMULADOR DE PEDESTAL############################
4 ###########################################################################
5 import time
6 import math
7 import numpy
8 import struct
9 from time import sleep
10 import zmq
11 import pickle
12 port="5556"
13 context = zmq.Context()
14 socket = context.socket(zmq.PUB)
15 socket.bind("tcp://*:%s"%port)
16 ###### PARAMETROS DE ENTRADA################################
17 print("PEDESTAL RESOLUCION 0.01")
18 print("MAXIMA VELOCIDAD DEL PEDESTAL")
19 ang_elev = 4.12
20 ang_azi = 30
21 velocidad= input ("Ingresa velocidad:")
22 velocidad= float(velocidad)
23 print (velocidad)
24 ############################################################
25 sleep(3)
26 print("Start program")
27 t1 = time.time()
28 count=0
29 while(True):
30 tmp_vuelta = int(360/velocidad)
31 t1=t1+tmp_vuelta*count
32 count= count+1
33 muestras_seg = 100
34 t2 = time.time()
35 for i in range(tmp_vuelta):
36 for j in range(muestras_seg):
37 tmp_variable = (i+j/100.0)
38 ang_azi = (tmp_variable)*float(velocidad)
39 seconds = t1+ tmp_variable
40 topic=10001
41 print ("AzimΒ°: ","%.4f"%ang_azi,"Time:" ,"%.5f"%seconds)
42 seconds_dec=(seconds-int(seconds))*1e6
43 ang_azi_dec= (ang_azi-int(ang_azi))*1e3
44 ang_elev_dec=(ang_elev-int(ang_elev))*1e3
45 sleep(0.0088)
46 socket.send_string("%d %d %d %d %d %d %d"%(topic,ang_elev,ang_elev_dec,ang_azi,ang_azi_dec,seconds,seconds_dec))
47 t3 = time.time()
48 print ("Total time for 1 vuelta in Seconds",t3-t2)
@@ -0,0 +1,275
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
12
13 from schainpy.controller import Project
14
15 desc = "USRP_test"
16 filename = "USRP_processing.xml"
17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
19
20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21
22 #######################################################################
23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 #path = '/DATA_RM/TEST_INTEGRACION'
27 #path = '/DATA_RM/PRUEBA_USRP_RP'
28 path = '/DATA_RM/PRUEBA_USRP_RP'
29
30 figpath = '/home/soporte/Pictures/TEST_RP_0001'
31 figpath = '/home/soporte/Pictures/TEST_RP_6000'
32 figpath = '/home/soporte/Pictures/USRP'
33 #remotefolder = "/home/wmaster/graficos"
34 #######################################################################
35 ################# RANGO DE PLOTEO######################################
36 #######################################################################
37 dBmin = '-5'
38 dBmax = '20'
39 xmin = '0'
40 xmax ='24'
41 ymin = '0'
42 ymax = '600'
43 #######################################################################
44 ########################FECHA##########################################
45 #######################################################################
46 str = datetime.date.today()
47 today = str.strftime("%Y/%m/%d")
48 str2 = str - datetime.timedelta(days=1)
49 yesterday = str2.strftime("%Y/%m/%d")
50 #######################################################################
51 ######################## UNIDAD DE LECTURA#############################
52 #######################################################################
53 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
54 path=path,
55 startDate="2021/07/02",#today,
56 endDate="2021/07/02",#today,
57 startTime='14:50:00',# inicio libre
58 #startTime='00:00:00',
59 endTime='14:55:59',
60 delay=0,
61 #set=0,
62 online=0,
63 walk=1,
64 ippKm = 6000)
65
66 opObj11 = readUnitConfObj.addOperation(name='printInfo')
67 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
68 #######################################################################
69 ################ OPERACIONES DOMINIO DEL TIEMPO########################
70 #######################################################################
71
72 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
73
74 opObj11 = procUnitConfObjA.addOperation(name='selectHeights')
75 opObj11.addParameter(name='minIndex', value='1', format='int')
76 # opObj11.addParameter(name='maxIndex', value='10000', format='int')
77 opObj11.addParameter(name='maxIndex', value='39980', format='int')
78
79 #
80 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
81 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
82
83 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
84 #opObj11.addParameter(name='frequency', value='49920000')
85
86 '''
87 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
88 opObj11.addParameter(name='n', value='625', format='int')#10
89 opObj11.addParameter(name='removeDC', value=1, format='int')
90 '''
91
92 # Ploteo TEST
93 '''
94 opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
95 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
96 opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
97 #opObj11.addParameter(name='xmax', value=8)
98 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
99 '''
100 # OJO SCOPE
101 #opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
102 #opObj10.addParameter(name='id', value='10', format='int')
103 ##opObj10.addParameter(name='xmin', value='0', format='int')
104 ##opObj10.addParameter(name='xmax', value='50', format='int')
105 #opObj10.addParameter(name='type', value='iq')
106 ##opObj10.addParameter(name='ymin', value='-5000', format='int')
107 ##opObj10.addParameter(name='ymax', value='8500', format='int')
108 #opObj11.addParameter(name='save', value=figpath, format='str')
109 #opObj11.addParameter(name='save_period', value=10, format='int')
110
111 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
112 #opObj10.addParameter(name='h0', value='-5000', format='float')
113
114 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
115 #opObj11.addParameter(name='window', value='1', format='int')
116
117 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
118 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
119 #opObj11.addParameter(name='code', value=codigo, format='floatlist')
120 #opObj11.addParameter(name='nCode', value='1', format='int')
121 #opObj11.addParameter(name='nBaud', value='28', format='int')
122
123 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
124 #opObj11.addParameter(name='n', value='100', format='int')
125
126 #######################################################################
127 ########## OPERACIONES ParametersProc########################
128 #######################################################################
129 ###procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
130 '''
131
132 opObj11 = procUnitConfObjA.addOperation(name='PedestalInformation')
133 opObj11.addParameter(name='path_ped', value=path_ped)
134 opObj11.addParameter(name='path_adq', value=path_adq)
135 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
136 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
137 opObj11.addParameter(name='blocksPerfile', value='100', format='int')
138 opObj11.addParameter(name='f_a_p', value='25', format='int')
139 opObj11.addParameter(name='online', value='0', format='int')
140
141 opObj11 = procUnitConfObjA.addOperation(name='Block360')
142 opObj11.addParameter(name='n', value='40', format='int')
143
144 opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other')
145 opObj11.addParameter(name='save', value=figpath)
146 opObj11.addParameter(name='save_period', value=1)
147
148 8
149 '''
150
151 #######################################################################
152 ########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
153 #######################################################################
154
155 #procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
156 #procUnitConfObjB.addParameter(name='nFFTPoints', value='32', format='int')
157 #procUnitConfObjB.addParameter(name='nProfiles', value='32', format='int')
158
159 procUnitConfObjC = controllerObj.addProcUnit(datatype='SpectraHeisProc', inputId=procUnitConfObjA.getId())
160 #procUnitConfObjB.addParameter(name='nFFTPoints', value='64', format='int')
161 #procUnitConfObjB.addParameter(name='nProfiles', value='64', format='int')
162 opObj11 = procUnitConfObjC.addOperation(name='IncohInt4SpectraHeis', optype='other')
163 #opObj11.addParameter(name='timeInterval', value='4', format='int')
164 opObj11.addParameter(name='n', value='100', format='int')
165
166 #procUnitConfObjB.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
167
168 #opObj13 = procUnitConfObjB.addOperation(name='removeDC')
169 #opObj13.addParameter(name='mode', value='2', format='int')
170
171 #opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
172 #opObj11.addParameter(name='n', value='8', format='float')
173 #######################################################################
174 ########## PLOTEO DOMINIO DE LA FRECUENCIA#############################
175 #######################################################################
176 #----
177
178 opObj11 = procUnitConfObjC.addOperation(name='SpectraHeisPlot')
179 opObj11.addParameter(name='id', value='10', format='int')
180 opObj11.addParameter(name='wintitle', value='Spectra_Alturas', format='str')
181 #opObj11.addParameter(name='xmin', value=-100000, format='float')
182 #opObj11.addParameter(name='xmax', value=100000, format='float')
183 opObj11.addParameter(name='oneFigure', value=False,format='bool')
184 #opObj11.addParameter(name='zmin', value=-10, format='int')
185 #opObj11.addParameter(name='zmax', value=40, format='int')
186 opObj11.addParameter(name='ymin', value=10, format='int')
187 opObj11.addParameter(name='ymax', value=55, format='int')
188 opObj11.addParameter(name='grid', value=True, format='bool')
189 #opObj11.addParameter(name='showprofile', value='1', format='int')
190 opObj11.addParameter(name='save', value=figpath, format='str')
191 #opObj11.addParameter(name='save_period', value=10, format='int')
192
193 '''
194 opObj11 = procUnitConfObjC.addOperation(name='RTIHeisPlot')
195 opObj11.addParameter(name='id', value='10', format='int')
196 opObj11.addParameter(name='wintitle', value='RTI_Alturas', format='str')
197 opObj11.addParameter(name='xmin', value=11.0, format='float')
198 opObj11.addParameter(name='xmax', value=18.0, format='float')
199 opObj11.addParameter(name='zmin', value=10, format='int')
200 opObj11.addParameter(name='zmax', value=30, format='int')
201 opObj11.addParameter(name='ymin', value=5, format='int')
202 opObj11.addParameter(name='ymax', value=28, format='int')
203 opObj11.addParameter(name='showprofile', value='1', format='int')
204 opObj11.addParameter(name='save', value=figpath, format='str')
205 opObj11.addParameter(name='save_period', value=10, format='int')
206 '''
207 '''
208 #SpectraPlot
209
210 opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
211 opObj11.addParameter(name='id', value='1', format='int')
212 opObj11.addParameter(name='wintitle', value='Spectra', format='str')
213 #opObj11.addParameter(name='xmin', value=-0.01, format='float')
214 #opObj11.addParameter(name='xmax', value=0.01, format='float')
215 opObj11.addParameter(name='zmin', value=dBmin, format='int')
216 opObj11.addParameter(name='zmax', value=dBmax, format='int')
217 #opObj11.addParameter(name='ymin', value=ymin, format='int')
218 #opObj11.addParameter(name='ymax', value=ymax, format='int')
219 opObj11.addParameter(name='showprofile', value='1', format='int')
220 opObj11.addParameter(name='save', value=figpath, format='str')
221 opObj11.addParameter(name='save_period', value=10, format='int')
222
223 #RTIPLOT
224
225 opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
226 opObj11.addParameter(name='id', value='2', format='int')
227 opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
228 opObj11.addParameter(name='zmin', value=dBmin, format='int')
229 opObj11.addParameter(name='zmax', value=dBmax, format='int')
230 #opObj11.addParameter(name='ymin', value=ymin, format='int')
231 #opObj11.addParameter(name='ymax', value=ymax, format='int')
232 #opObj11.addParameter(name='xmin', value=15, format='int')
233 #opObj11.addParameter(name='xmax', value=16, format='int')
234
235 opObj11.addParameter(name='showprofile', value='1', format='int')
236 opObj11.addParameter(name='save', value=figpath, format='str')
237 opObj11.addParameter(name='save_period', value=10, format='int')
238
239 '''
240 # opObj11 = procUnitConfObjB.addOperation(name='CrossSpectraPlot', optype='other')
241 # opObj11.addParameter(name='id', value='3', format='int')
242 # opObj11.addParameter(name='wintitle', value='CrossSpectraPlot', format='str')
243 # opObj11.addParameter(name='ymin', value=ymin, format='int')
244 # opObj11.addParameter(name='ymax', value=ymax, format='int')
245 # opObj11.addParameter(name='phase_cmap', value='jet', format='str')
246 # opObj11.addParameter(name='zmin', value=dBmin, format='int')
247 # opObj11.addParameter(name='zmax', value=dBmax, format='int')
248 # opObj11.addParameter(name='figpath', value=figures_path, format='str')
249 # opObj11.addParameter(name='save', value=0, format='bool')
250 # opObj11.addParameter(name='pairsList', value='(0,1)', format='pairsList')
251 # #
252 # opObj11 = procUnitConfObjB.addOperation(name='CoherenceMap', optype='other')
253 # opObj11.addParameter(name='id', value='4', format='int')
254 # opObj11.addParameter(name='wintitle', value='Coherence', format='str')
255 # opObj11.addParameter(name='phase_cmap', value='jet', format='str')
256 # opObj11.addParameter(name='xmin', value=xmin, format='float')
257 # opObj11.addParameter(name='xmax', value=xmax, format='float')
258 # opObj11.addParameter(name='figpath', value=figures_path, format='str')
259 # opObj11.addParameter(name='save', value=0, format='bool')
260 # opObj11.addParameter(name='pairsList', value='(0,1)', format='pairsList')
261 #
262
263 '''
264 #######################################################################
265 ############### UNIDAD DE ESCRITURA ###################################
266 #######################################################################
267 #opObj11 = procUnitConfObjB.addOperation(name='SpectraWriter', optype='other')
268 #opObj11.addParameter(name='path', value=wr_path)
269 #opObj11.addParameter(name='blocksPerFile', value='50', format='int')
270 print ("Escribiendo el archivo XML")
271 print ("Leyendo el archivo XML")
272 '''
273
274
275 controllerObj.start()
@@ -0,0 +1,126
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
12
13 from schainpy.controller import Project
14
15 desc = "USRP_test"
16 filename = "USRP_processing.xml"
17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
19
20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21
22 #######################################################################
23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 #path = '/DATA_RM/TEST_INTEGRACION'
27 path = '/DATA_RM/TEST_ONLINE'
28 path_pp = '/DATA_RM/TEST_HDF5'
29
30 figpath = '/home/soporte/Pictures/TEST_INTEGRACION_IMG'
31 #remotefolder = "/home/wmaster/graficos"
32 #######################################################################
33 ################# RANGO DE PLOTEO######################################
34 #######################################################################
35 dBmin = '-5'
36 dBmax = '20'
37 xmin = '0'
38 xmax ='24'
39 ymin = '0'
40 ymax = '600'
41 #######################################################################
42 ########################FECHA##########################################
43 #######################################################################
44 str = datetime.date.today()
45 today = str.strftime("%Y/%m/%d")
46 str2 = str - datetime.timedelta(days=1)
47 yesterday = str2.strftime("%Y/%m/%d")
48 #######################################################################
49 ######################## UNIDAD DE LECTURA#############################
50 #######################################################################
51 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
52 path=path,
53 startDate="2021/01/01",#today,
54 endDate="2021/12/30",#today,
55 startTime='00:00:00',
56 endTime='23:59:59',
57 delay=0,
58 #set=0,
59 online=1,
60 walk=1,
61 ippKm = 60)
62
63 opObj11 = readUnitConfObj.addOperation(name='printInfo')
64 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
65 #######################################################################
66 ################ OPERACIONES DOMINIO DEL TIEMPO########################
67 #######################################################################
68
69 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
70
71 #
72 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
73 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
74
75 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
76 #opObj11.addParameter(name='frequency', value='70312500')
77 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
78 opObj11.addParameter(name='n', value='625', format='int')#10
79 opObj11.addParameter(name='removeDC', value=1, format='int')
80 # Ploteo TEST
81 '''
82 opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
83 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
84 opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
85 #opObj11.addParameter(name='xmax', value=8)
86 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
87 '''
88 # OJO SCOPE
89 #opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
90 #opObj10.addParameter(name='id', value='10', format='int')
91 ##opObj10.addParameter(name='xmin', value='0', format='int')
92 ##opObj10.addParameter(name='xmax', value='50', format='int')
93 #opObj10.addParameter(name='type', value='iq')
94 ##opObj10.addParameter(name='ymin', value='-5000', format='int')
95 ##opObj10.addParameter(name='ymax', value='8500', format='int')
96 #opObj11.addParameter(name='save', value=figpath, format='str')
97 #opObj11.addParameter(name='save_period', value=10, format='int')
98
99 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
100 #opObj10.addParameter(name='h0', value='-5000', format='float')
101
102 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
103 #opObj11.addParameter(name='window', value='1', format='int')
104
105 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
106 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
107 #opObj11.addParameter(name='code', value=codigo, formatyesterday='floatlist')
108 #opObj11.addParameter(name='nCode', value='1', format='int')
109 #opObj11.addParameter(name='nBaud', value='28', format='int')
110
111 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
112 #opObj11.addParameter(name='n', value='100', format='int')
113
114 #######################################################################
115 ########## OPERACIONES ParametersProc########################
116 #######################################################################
117
118 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
119 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
120 opObj10.addParameter(name='path',value=path_pp)
121 #opObj10.addParameter(name='mode',value=0)
122 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
123 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
124 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,utctime',format='list')#,format='list'
125
126 controllerObj.start()
@@ -0,0 +1,126
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
12
13 from schainpy.controller import Project
14
15 desc = "USRP_test"
16 filename = "USRP_processing.xml"
17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
19
20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21
22 #######################################################################
23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 #path = '/DATA_RM/TEST_INTEGRACION'
27 path = '/DATA_RM/TEST_ONLINE'
28 path_pp = '/DATA_RM/TEST_HDF5'
29
30 figpath = '/home/soporte/Pictures/TEST_INTEGRACION_IMG'
31 #remotefolder = "/home/wmaster/graficos"
32 #######################################################################
33 ################# RANGO DE PLOTEO######################################
34 #######################################################################
35 dBmin = '-5'
36 dBmax = '20'
37 xmin = '0'
38 xmax ='24'
39 ymin = '0'
40 ymax = '600'
41 #######################################################################
42 ########################FECHA##########################################
43 #######################################################################
44 str = datetime.date.today()
45 today = str.strftime("%Y/%m/%d")
46 str2 = str - datetime.timedelta(days=1)
47 yesterday = str2.strftime("%Y/%m/%d")
48 #######################################################################
49 ######################## UNIDAD DE LECTURA#############################
50 #######################################################################
51 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
52 path=path,
53 startDate="2021/01/01",#today,
54 endDate="2021/12/30",#today,
55 startTime='00:00:00',
56 endTime='23:59:59',
57 delay=0,
58 #set=0,
59 online=1,
60 walk=1,
61 ippKm = 60)
62
63 opObj11 = readUnitConfObj.addOperation(name='printInfo')
64 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
65 #######################################################################
66 ################ OPERACIONES DOMINIO DEL TIEMPO########################
67 #######################################################################
68
69 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
70
71 #
72 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
73 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
74
75 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
76 #opObj11.addParameter(name='frequency', value='70312500')
77 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
78 opObj11.addParameter(name='n', value='625', format='int')#10
79 opObj11.addParameter(name='removeDC', value=1, format='int')
80 # Ploteo TEST
81 '''
82 opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
83 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
84 opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
85 #opObj11.addParameter(name='xmax', value=8)
86 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
87 '''
88 # OJO SCOPE
89 #opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
90 #opObj10.addParameter(name='buffer_sizeid', value='10', format='int')
91 ##opObj10.addParameter(name='xmin', value='0', format='int')
92 ##opObj10.addParameter(name='xmax', value='50', format='int')
93 #opObj10.addParameter(name='type', value='iq')
94 ##opObj10.addParameter(name='ymin', value='-5000', format='int')
95 ##opObj10.addParameter(name='ymax', value='8500', format='int')
96 #opObj11.addParameter(name='save', value=figpath, format='str')
97 #opObj11.addParameter(name='save_period', value=10, format='int')
98
99 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
100 #opObj10.addParameter(name='h0', value='-5000', format='float')
101
102 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
103 #opObj11.addParameter(name='window', value='1', format='int')
104
105 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
106 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
107 #opObj11.addParameter(name='code', value=codigo, formatyesterday='floatlist')
108 #opObj11.addParameter(name='nCode', value='1', format='int')
109 #opObj11.addParameter(name='nBaud', value='28', format='int')
110
111 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
112 #opObj11.addParameter(name='n', value='100', format='int')
113
114 #######################################################################
115 ########## OPERACIONES ParametersProc########################
116 #######################################################################
117
118 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
119 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
120 opObj10.addParameter(name='path',value=path_pp)
121 #opObj10.addParameter(name='mode',value=0)
122 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
123 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
124 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,utctime',format='list')#,format='list'
125
126 controllerObj.start()
@@ -0,0 +1,52
1 import os,sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5 #path='/DATA_RM/TEST_HDF5/d2021200'
6 #path='/DATA_RM/TEST_HDF5/d2021200'
7 path='/DATA_RM/TEST_HDF5/d2021203'
8
9 path_adq=path
10 #path_ped='/DATA_RM/TEST_PEDESTAL/P2021200'
11 path_ped='/DATA_RM/TEST_PEDESTAL/P2021203'
12
13 figpath = '/home/soporte/Pictures'
14 desc = "Simulator Test"
15
16 controllerObj = Project()
17 controllerObj.setup(id='10',name='Test Simulator',description=desc)
18 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
19 path=path,
20 startDate="2021/01/01", #"2020/01/01",#today,
21 endDate= "2021/12/01", #"2020/12/30",#today,
22 startTime='00:00:00',
23 endTime='23:59:59',
24 t_Interval_p=0.01,
25 n_Muestras_p=100,
26 delay=5,
27 #set=0,
28 online=0,
29 walk=0)#1
30
31 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
32
33 opObj11 = procUnitConfObjA.addOperation(name='PedestalInformation')
34 opObj11.addParameter(name='path_ped', value=path_ped)
35 opObj11.addParameter(name='path_adq', value=path_adq)
36 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
37 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
38 opObj11.addParameter(name='blocksPerfile', value='100', format='int')
39 opObj11.addParameter(name='f_a_p', value='25', format='int')
40 opObj11.addParameter(name='online', value='0', format='int')
41
42
43 opObj11 = procUnitConfObjA.addOperation(name='Block360')
44 opObj11.addParameter(name='n', value='40', format='int')
45
46 opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other')
47 opObj11.addParameter(name='save', value=figpath)
48 opObj11.addParameter(name='save_period', value=1)
49
50 controllerObj.start()
51 #online 1 utc_adq 1617490240.48
52 #online 0 utc_adq 1617489815.4804
@@ -0,0 +1,59
1 import os,sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5 #path='/DATA_RM/TEST_HDF5/d2021200'
6 #path='/DATA_RM/TEST_HDF5/d2021200'
7 #path='/DATA_RM/TEST_HDF5/d2021214'
8 #path='/DATA_RM/TEST_HDF5/d2021229'
9
10 path='/DATA_RM/TEST_HDF5/d2021231'
11
12
13 path_adq=path
14 #path_ped='/DATA_RM/TEST_PEDESTAL/P2021200'
15 #path_ped='/DATA_RM/TEST_PEDESTAL/P2021214'
16 #path_ped='/DATA_RM/TEST_PEDESTAL/P2021230'
17 path_ped='/DATA_RM/TEST_PEDESTAL/P20210819'
18 figpath = '/home/soporte/Pictures'
19 desc = "Simulator Test"
20
21 controllerObj = Project()
22 controllerObj.setup(id='10',name='Test Simulator',description=desc)
23 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
24 path=path,
25 startDate="2021/01/01", #"2020/01/01",#today,
26 endDate= "2021/12/01", #"2020/12/30",#today,
27 startTime='00:00:00',
28 endTime='23:59:59',
29 t_Interval_p=0.01,
30 n_Muestras_p=100,
31 delay=30,
32 #set=0,
33 online=1,
34 walk=0,
35 nTries=6)#1
36
37 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
38
39 opObj11 = procUnitConfObjA.addOperation(name='PedestalInformation')
40 opObj11.addParameter(name='path_ped', value=path_ped)
41 opObj11.addParameter(name='path_adq', value=path_adq)
42 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
43 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
44 opObj11.addParameter(name='blocksPerfile', value='100', format='int')
45 opObj11.addParameter(name='f_a_p', value='25', format='int')
46 opObj11.addParameter(name='online', value='1', format='int')# habilitar el enable aqui tambien
47
48
49 opObj11 = procUnitConfObjA.addOperation(name='Block360')
50 opObj11.addParameter(name='n', value='40', format='int')
51 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
52
53 opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other')
54 opObj11.addParameter(name='save', value=figpath)
55 opObj11.addParameter(name='save_period', value=1)
56
57 controllerObj.start()
58 #online 1 utc_adq 1617490240.48
59 #online 0 utc_adq 1617489815.4804
@@ -0,0 +1,35
1 #*******************************************************************************
2 #*************ARCHIVO DE CONFIGURACION - RADAR METEOROLOGICO********************
3 #*******************************************************************************
4 # 1-Resolucion_angular(Grados ΒΊ)-F
5 1.0
6 # 2-Velocidad_Pedestal Azimuth(ΒΊ/s)-F
7 4.0
8 # 3-Posicion_Pedestal Azimuth(ΒΊ/s)-F
9 0.0
10 # 4-Posicion_Pedestal Elevacion(ΒΊ/s)-F
11 30.0
12 # 5-IPP(useg)-F
13 400
14 # n-PulsePair-nFFTPoints-R
15 625
16 # 6-Cantidad_Muestras_archivo_pedestal-F
17 100
18 # 7-Tiempo_por_muestra_pedestal-F
19 0.01
20 # Tiempo_archivo_por_pedestal-R
21 1.0
22 # 8-Bloques_por_arhivo_adquisicion-F
23 100.0
24 # tiempo_por_archivo_adquisicion-R
25 25.0
26 # mode Time Domain(T , 1) or Frequency Domain(F , 0)
27 1
28 # path_p
29 /home/developer/Downloads/Pedestal/P2021093
30 # path_a
31 /home/developer/Downloads/HDF5_TESTPP2V3/d2021093
32 # online
33 0
34 # Directorio final
35 /home/developer/Downloads/HDF5_WR/
@@ -0,0 +1,70
1 print("LECTURA DE ARCHIVOS DE CONFIGURACION")
2 class ReadfileWR():
3 def __init__(self,filename):
4 f = open(filename, "r")
5 i=0
6 self.dict={'paht_ped':None,'path_adq':None,'path_res':None,'resolution':None,'vel_ped_azi':None,'pos_ped_azi':None,'pos_ped_ele':None,'ipp':None,'n':None,'len_ped':None,\
7 't_s_ped':None,'t_f_ped':None,'b_f_adq':None,'t_f_adq':None,'mode':None,'online':None}
8 while(True):
9 ##print(i)
10 linea = f.readline()
11 if i==4:
12 resolution=float(linea)
13 self.dict['resolution']=resolution
14 if i==6:
15 vel_pedestal_a=float(linea)
16 self.dict['vel_ped_azi']=vel_pedestal_a
17 if i==8:
18 pos_pedestal_a=float(linea)
19 self.dict['pos_ped_azi']=pos_pedestal_a
20 if i==10:
21 pos_pedestal_e=float(linea)
22 self.dict['pos_ped_ele']=pos_pedestal_e
23 if i==12:
24 ipp = float(linea)
25 self.dict['ipp']= round(ipp,5)
26 if i==14:
27 n = float(linea)
28 self.dict['n']= n
29 if i==16:
30 len_pedestal= float(linea)
31 self.dict['len_ped']= len_pedestal
32 if i==18:
33 time_x_sample_ped=float(linea)
34 self.dict['t_s_ped']= time_x_sample_ped
35 if i==20:
36 time_x_file_ped = float(linea)
37 self.dict['t_f_ped']= time_x_file_ped
38 if i==22:
39 bloques_x_file_adq= float(linea)
40 self.dict['b_f_adq']=bloques_x_file_adq
41 if i==24:
42 time_x_file_adq = float(linea)
43 self.dict['t_f_adq'] = time_x_file_adq
44 if i==26:
45 mode= int(linea)
46 self.dict['mode'] = mode
47 if i==28:
48 path_p= str(linea)
49 self.dict['path_ped'] = path_p
50 if i==30:
51 path_a= str(linea)
52 self.dict['path_adq'] = path_a
53 if i==32:
54 online= int(linea)
55 self.dict['online'] = online
56 if i==34:
57 path_r= str(linea)
58 self.dict['path_res'] = path_r
59 #print(linea)
60 if not linea:
61 break
62 i+=1
63 f.close()
64 def getDict(self):
65 return self.dict
66
67
68 #filename= "/home/developer/Downloads/config_WR.txt"
69 #dict= ReadfileWR(filename).getDict()
70 #print(dict)
@@ -0,0 +1,118
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9
10 from schainpy.controller import Project
11
12 desc = "USRP_test"
13 filename = "USRP_processing.xml"
14 controllerObj = Project()
15 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
16
17 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
18
19 #######################################################################
20 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
21 #######################################################################
22 # path IVAN
23 path = '/home/soporte/jarsjuliahigh/high'
24
25 figpath = '/home/soporte/Pictures/IVAN'
26 #remotefolder = "/home/wmaster/graficos"
27 #######################################################################
28 ################# RANGO DE PLOTEO######################################
29 #######################################################################
30 dBmin = '10'
31 dBmax = '55'
32 xmin = '0'
33 xmax ='24'
34 ymin = '0'
35 ymax = '600'
36 #######################################################################
37 ########################FECHA##########################################
38 #######################################################################
39 str = datetime.date.today()
40 today = str.strftime("%Y/%m/%d")
41 str2 = str - datetime.timedelta(days=1)
42 yesterday = str2.strftime("%Y/%m/%d")
43 #######################################################################
44 ######################## UNIDAD DE LECTURA#############################
45 #######################################################################
46 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
47 path=path,
48 startDate="2021/07/02",#today,
49 endDate="2021/07/02",#today,
50 startTime='14:50:01',# inicio libre
51 endTime='14:55:59',
52 delay=0,
53 #set=0,
54 online=0,
55 walk=0)
56
57 opObj11 = readUnitConfObj.addOperation(name='printInfo')
58 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
59 #######################################################################
60 ################ OPERACIONES DOMINIO DEL TIEMPO########################
61 #######################################################################
62
63 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
64
65 #opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
66 #opObj10.addParameter(name='channelList', value=[0])
67
68 '''
69 opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
70 opObj10.addParameter(name='id', value='10', format='int')
71 #opObj10.addParameter(name='xmin', value='0', format='int')
72 ##opObj10.addParameter(name='xmax', value='50', format='int')
73 opObj10.addParameter(name='type', value='iq')
74 ##opObj10.addParameter(name='ymin', value='-5000', format='int')
75 ##opObj10.addParameter(name='ymax', value='8500', format='int')
76 #opObj11.addParameter(name='save', value=figpath, format='str')
77 #opObj11.addParameter(name='save_period', value=10, format='int')
78 '''
79 ###opObj11 = procUnitConfObjA.addOperation(name='selectHeights')
80 ###opObj11.addParameter(name='minIndex', value='1', format='int')
81 #### opObj11.addParameter(name='maxIndex', value='10000', format='int')
82 ####opObj11.addParameter(name='maxIndex', value='39980', format='int')
83
84 #######################################################################
85 ########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
86 #######################################################################
87
88 #procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
89 #procUnitConfObjB.addParameter(name='nFFTPoints', value='32', format='int')
90 #procUnitConfObjB.addParameter(name='nProfiles', value='32', format='int')
91
92 procUnitConfObjC = controllerObj.addProcUnit(datatype='SpectraHeisProc', inputId=procUnitConfObjA.getId())
93
94 opObj11 = procUnitConfObjC.addOperation(name='IncohInt4SpectraHeis', optype='other')
95 #opObj11.addParameter(name='timeInterval', value='4', format='int')
96 opObj11.addParameter(name='n', value='100', format='int')
97
98 #opObj11.addParameter(name='overlapping', value=True, format='bool')
99
100 opObj11 = procUnitConfObjC.addOperation(name='SpectraHeisPlot')
101 opObj11.addParameter(name='id', value='10', format='int')
102 opObj11.addParameter(name='wintitle', value='Spectra_Alturas', format='str')
103
104 #opObj11.addParameter(name='xmin', value=-100000, format='float')
105 #opObj11.addParameter(name='xmax', value=100000, format='float')
106 opObj11.addParameter(name='oneFigure', value=False,format='bool')
107 #opObj11.addParameter(name='zmin', value=-10, format='int')
108 #opObj11.addParameter(name='zmax', value=40, format='int')
109 opObj11.addParameter(name='ymin', value=dBmin, format='int')
110 opObj11.addParameter(name='ymax', value=dBmax, format='int')
111 opObj11.addParameter(name='grid', value=True, format='bool')
112 #opObj11.addParameter(name='showprofile', value='1', format='int')
113 opObj11.addParameter(name='save', value=figpath, format='str')
114 #opObj11.addParameter(name='save_period', value=10, format='int')
115
116
117
118 controllerObj.start()
@@ -0,0 +1,103
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9
10 from schainpy.controller import Project
11
12 desc = "USRP_test"
13 filename = "USRP_processing.xml"
14 controllerObj = Project()
15 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
16
17 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
18
19 #######################################################################
20 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
21 #######################################################################
22 # path JHON
23 path = '/home/soporte/jars2'
24
25 figpath = '/home/soporte/Pictures/JHON'
26 #remotefolder = "/home/wmaster/graficos"
27 #######################################################################
28 ################# RANGO DE PLOTEO######################################
29 #######################################################################
30 dBmin = '0'
31 dBmax = '50'
32 xmin = '0'
33 xmax ='24'
34 ymin = '0'
35 ymax = '600'
36 #######################################################################
37 ########################FECHA##########################################
38 #######################################################################
39 str = datetime.date.today()
40 today = str.strftime("%Y/%m/%d")
41 str2 = str - datetime.timedelta(days=1)
42 yesterday = str2.strftime("%Y/%m/%d")
43 #######################################################################
44 ######################## UNIDAD DE LECTURA#############################
45 #######################################################################
46 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
47 path=path,
48 startDate="2021/07/02",#today,
49 endDate="2021/07/02",#today,
50 startTime='19:45:00',# inicio libre
51 endTime='19:50:59',
52 delay=0,
53 #set=0,
54 online=0,
55 walk=0)
56
57 opObj11 = readUnitConfObj.addOperation(name='printInfo')
58 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
59 #######################################################################
60 ################ OPERACIONES DOMINIO DEL TIEMPO########################
61 #######################################################################
62
63 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
64
65 opObj11 = procUnitConfObjA.addOperation(name='selectHeights')
66 opObj11.addParameter(name='minIndex', value='1', format='int')
67 # opObj11.addParameter(name='maxIndex', value='10000', format='int')
68 opObj11.addParameter(name='maxIndex', value='39980', format='int')
69
70
71 #######################################################################
72 ########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
73 #######################################################################
74
75 #procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
76 #procUnitConfObjB.addParameter(name='nFFTPoints', value='32', format='int')
77 #procUnitConfObjB.addParameter(name='nProfiles', value='32', format='int')
78
79 procUnitConfObjC = controllerObj.addProcUnit(datatype='SpectraHeisProc', inputId=procUnitConfObjA.getId())
80
81 #opObj11 = procUnitConfObjC.addOperation(name='IncohInt4SpectraHeis', optype='other')
82 #opObj11.addParameter(name='timeInterval', value='4', format='int')
83 opObj11 = procUnitConfObjC.addOperation(name='IncohInt4SpectraHeis', optype='other')
84 #opObj11.addParameter(name='timeInterval', value='4', format='int')
85 opObj11.addParameter(name='n', value='100', format='int')
86
87
88 opObj11 = procUnitConfObjC.addOperation(name='SpectraHeisPlot')
89 opObj11.addParameter(name='id', value='10', format='int')
90 opObj11.addParameter(name='wintitle', value='Spectra_Alturas', format='str')
91 #opObj11.addParameter(name='xmin', value=-100000, format='float')
92 #opObj11.addParameter(name='xmax', value=100000, format='float')
93 opObj11.addParameter(name='oneFigure', value=False,format='bool')
94 #opObj11.addParameter(name='zmin', value=-10, format='int')
95 #opObj11.addParameter(name='zmax', value=40, format='int')
96 opObj11.addParameter(name='ymin', value=dBmin, format='int')
97 opObj11.addParameter(name='ymax', value=dBmax, format='int')
98 opObj11.addParameter(name='grid', value=True, format='bool')
99 #opObj11.addParameter(name='showprofile', value='1', format='int')
100 opObj11.addParameter(name='save', value=figpath, format='str')
101 #opObj11.addParameter(name='save_period', value=10, format='int')
102
103 controllerObj.start()
@@ -0,0 +1,47
1 import os,sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5 '''
6 NOTA:
7 Este script de prueba.
8 - Unidad del lectura 'HDFReader'.
9 - Unidad de procesamiento VoltageProc
10 - Unidad de procesamiento SpectraProc
11 - Operacion removeDC.
12 - Unidad de procesamiento ParametersProc
13 - Operacion SpectralMoments
14 - Operacion SpectralMomentsPlot
15 - Unidad de escrituda 'HDFWriter'.
16 '''
17 path='/home/developer/Downloads/HDF5_WR'
18 figpath = path
19 desc = "Simulator Test"
20
21 controllerObj = Project()
22
23 controllerObj.setup(id='10',name='Test Simulator',description=desc)
24
25 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
26 path=path,
27 startDate="2021/01/01", #"2020/01/01",#today,
28 endDate= "2021/12/01", #"2020/12/30",#today,
29 startTime='00:00:00',
30 endTime='23:59:59',
31 delay=0,
32 #set=0,
33 online=0,
34 walk=0)#1
35
36 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
37
38 opObj11 = procUnitConfObjA.addOperation(name='Block360')
39 opObj11.addParameter(name='n', value='40', format='int')
40
41 opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other')
42 opObj11.addParameter(name='save', value=figpath)
43 opObj11.addParameter(name='save_period', value=1)
44 #opObj11 = procUnitConfObjA.addOperation(name='PowerPlot', optype='other')#PulsepairPowerPlot
45 #opObj11 = procUnitConfObjA.addOperation(name='PPSignalPlot', optype='other')
46
47 controllerObj.start()
@@ -0,0 +1,51
1 import os,sys,json
2 import datetime
3 import time
4 from schainpy.controller import Project
5 '''
6 NOTA:
7 Este script de prueba.
8 - Unidad del lectura 'HDFReader'.
9 - Unidad de procesamiento ParametersProc
10 - Operacion SpectralMomentsPlot
11
12 '''
13 path = '/home/soporte/Downloads/RAWDATA_PP'
14 path='/DATA_RM/TEST_HDF5/d2021203'
15 figpath = '/home/soporte/Downloads/IMAGE'
16 desc = "Simulator Test"
17 desc_data = {
18 'Data': {
19 'dataPP_POW': 'Data/dataPP_POW/channel00',
20 'utctime':'Data/utctime'
21 },
22 'Metadata': {
23 'heightList' :'Metadata/heightList',
24 'flagDataAsBlock':'Metadata/flagDataAsBlock',
25 'profileIndex':'Metadata/profileIndex'
26 }
27 }
28
29 controllerObj = Project()
30
31 controllerObj.setup(id='10',name='Test Simulator',description=desc)
32
33 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
34 path=path,
35 startDate="2021/01/01", #"2020/01/01",#today,
36 endDate= "2021/12/01", #"2020/12/30",#today,
37 startTime='00:00:00',
38 endTime='23:59:59',
39 delay=0,
40 #set=0,
41 online=0,
42 walk=0,
43 description= json.dumps(desc_data))#1
44
45 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
46
47 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')#PulsepairPowerPlot
48 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
49
50
51 controllerObj.start()
@@ -0,0 +1,56
1 import os,sys,json
2 import datetime
3 import time
4 from schainpy.controller import Project
5 '''
6 NOTA:
7 Este script de prueba.
8 - Unidad del lectura 'HDFReader'.
9 - Unidad de procesamiento ParametersProc
10 - Operacion SpectralMomentsPlot
11
12 '''
13 path = '/home/soporte/Downloads/RAWDATA'
14 figpath = '/home/soporte/Downloads/IMAGE'
15 desc = "Simulator Test"
16 desc_data = {
17 'Data': {
18 'data_pow': 'Data/data_pow/channel00',
19 'data_dop': 'Data/data_dop/channel00',
20 'utctime':'Data/utctime'
21 },
22 'Metadata': {
23 'heightList':'Metadata/heightList',
24 'nIncohInt' :'Metadata/nIncohInt',
25 'nCohInt' :'Metadata/nCohInt',
26 'nProfiles' :'Metadata/nProfiles',
27 'channelList' :'Metadata/channelList'
28 }
29 }
30
31 controllerObj = Project()
32
33 controllerObj.setup(id='10',name='Test Simulator',description=desc)
34
35 readUnitConfObj = controllerObj.addReadUnit(datatype='HDFReader',
36 path=path,
37 startDate="2021/01/01", #"2020/01/01",#today,
38 endDate= "2021/12/01", #"2020/12/30",#today,
39 startTime='00:00:00',
40 endTime='23:59:59',
41 delay=0,
42 #set=0,
43 online=0,
44 walk=1,
45 description= json.dumps(desc_data))#1
46
47 procUnitConfObjA = controllerObj.addProcUnit(datatype='ParametersProc',inputId=readUnitConfObj.getId())
48 '''
49 opObj11 = procUnitConfObjA.addOperation(name='DopplerPlot',optype='external')
50 #opObj11.addParameter(name='xmin', value=0)
51 #opObj11.addParameter(name='xmax', value=23)
52 opObj11.addParameter(name='save', value=figpath)
53 opObj11.addParameter(name='showprofile', value=0)
54 opObj11.addParameter(name='save_period', value=10)
55 '''
56 controllerObj.start()
@@ -0,0 +1,55
1 import os,sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5
6 #*************************************************************************
7 #**************************LECTURA config_WR.txt**************************
8 #*************************************************************************
9 from readFileconfig import ReadfileWR
10 filename= "/home/soporte/schainv3/schain/schainpy/scripts/config_WR.txt"
11 dict= ReadfileWR(filename).getDict()
12
13 FixRCP_IPP = dict['ipp']*0.15 #equivalencia
14 dataBlocksPerFile= dict['b_f_adq']
15 profilesPerBlock= int(dict['n'])
16 pulsepair = int(dict['n'])
17 #*************************************************************************
18 path = '/home/soporte/Downloads/RAWDATA_PP_C'
19 figpath = path
20 desc = "Simulator Test"
21 controllerObj = Project()
22 controllerObj.setup(id='10',name='Test Simulator',description=desc)
23 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
24 frequency=9.345e9,
25 FixRCP_IPP= FixRCP_IPP,
26 Tau_0 = 30,
27 AcqH0_0=0,
28 samples=330,
29 AcqDH_0=0.15,
30 FixRCP_TXA=0.15,
31 FixRCP_TXB=0.15,
32 Fdoppler=600.0,
33 Hdoppler=36,
34 Adoppler=300,#300
35 delay=0,
36 online=0,
37 walk=0,
38 profilesPerBlock=profilesPerBlock,
39 dataBlocksPerFile=dataBlocksPerFile)#,#nTotalReadFiles=2)
40 #opObj11 = readUnitConfObj.addOperation(name='printInfo')
41 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
42
43 opObj11 = procUnitConfObjA.addOperation(name='PulsePair')
44 opObj11.addParameter(name='n', value=pulsepair, format='int')#10
45
46 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
47
48 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
49 opObj10.addParameter(name='path',value=figpath)
50 #opObj10.addParameter(name='mode',value=2)
51 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
52 opObj10.addParameter(name='metadataList',value='utctimeInit,paramInterval,heightList,profileIndex,flagDataAsBlock',format='list')
53 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,utctime',format='list')#,format='list'
54
55 controllerObj.start()
@@ -0,0 +1,50
1 import os,sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5 path = '/home/soporte/Downloads/RAWDATA_PP_Z'
6 figpath = path
7 desc = "Simulator Test"
8
9 controllerObj = Project()
10
11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
12
13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 frequency=9.345e9,
15 FixRCP_IPP= 60,
16 Tau_0 = 30,
17 AcqH0_0=0,
18 samples=330,
19 AcqDH_0=0.15,
20 FixRCP_TXA=0.15,
21 FixRCP_TXB=0.15,
22 Fdoppler=600.0,
23 Hdoppler=36,
24 Adoppler=300,#300
25 delay=0,
26 online=0,
27 walk=0,
28 profilesPerBlock=625,
29 dataBlocksPerFile=360)#,#nTotalReadFiles=2)
30
31 ### opObj11 = readUnitConfObj.addOperation(name='printInfo')
32
33 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
34
35 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
36 opObj11.addParameter(name='n', value='625', format='int')#10
37 opObj11.addParameter(name='removeDC', value=1, format='int')
38
39 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
40
41 opObj10 = procUnitConfObjB.addOperation(name="WeatherRadar")
42
43 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
44 opObj10.addParameter(name='path',value=figpath)
45 #opObj10.addParameter(name='mode',value=0)
46 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
47 opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
48 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH,factor_Zeh,utctime')#,format='list'
49
50 controllerObj.start()
@@ -1,1069 +1,1069
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Definition of diferent Data objects for different types of data
6 6
7 7 Here you will find the diferent data objects for the different types
8 8 of data, this data objects must be used as dataIn or dataOut objects in
9 9 processing units and operations. Currently the supported data objects are:
10 10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 11 """
12 12
13 13 import copy
14 14 import numpy
15 15 import datetime
16 16 import json
17 17
18 18 import schainpy.admin
19 19 from schainpy.utils import log
20 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
21 21 from schainpy.model.data import _noise
22 22
23 23
24 24 def getNumpyDtype(dataTypeCode):
25 25
26 26 if dataTypeCode == 0:
27 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
28 28 elif dataTypeCode == 1:
29 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
30 30 elif dataTypeCode == 2:
31 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
32 32 elif dataTypeCode == 3:
33 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
34 34 elif dataTypeCode == 4:
35 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36 elif dataTypeCode == 5:
37 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
38 38 else:
39 39 raise ValueError('dataTypeCode was not defined')
40 40
41 41 return numpyDtype
42 42
43 43
44 44 def getDataTypeCode(numpyDtype):
45 45
46 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
47 47 datatype = 0
48 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
49 49 datatype = 1
50 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
51 51 datatype = 2
52 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
53 53 datatype = 3
54 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
55 55 datatype = 4
56 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
57 57 datatype = 5
58 58 else:
59 59 datatype = None
60 60
61 61 return datatype
62 62
63 63
64 64 def hildebrand_sekhon(data, navg):
65 65 """
66 66 This method is for the objective determination of the noise level in Doppler spectra. This
67 67 implementation technique is based on the fact that the standard deviation of the spectral
68 68 densities is equal to the mean spectral density for white Gaussian noise
69 69
70 70 Inputs:
71 71 Data : heights
72 72 navg : numbers of averages
73 73
74 74 Return:
75 75 mean : noise's level
76 76 """
77 77
78 78 sortdata = numpy.sort(data, axis=None)
79 79 '''
80 80 lenOfData = len(sortdata)
81 81 nums_min = lenOfData*0.2
82 82
83 83 if nums_min <= 5:
84 84
85 85 nums_min = 5
86 86
87 87 sump = 0.
88 88 sumq = 0.
89 89
90 90 j = 0
91 91 cont = 1
92 92
93 93 while((cont == 1)and(j < lenOfData)):
94 94
95 95 sump += sortdata[j]
96 96 sumq += sortdata[j]**2
97 97
98 98 if j > nums_min:
99 99 rtest = float(j)/(j-1) + 1.0/navg
100 100 if ((sumq*j) > (rtest*sump**2)):
101 101 j = j - 1
102 102 sump = sump - sortdata[j]
103 103 sumq = sumq - sortdata[j]**2
104 104 cont = 0
105 105
106 106 j += 1
107 107
108 108 lnoise = sump / j
109 109 '''
110 110 return _noise.hildebrand_sekhon(sortdata, navg)
111 111
112 112
113 113 class Beam:
114 114
115 115 def __init__(self):
116 116 self.codeList = []
117 117 self.azimuthList = []
118 118 self.zenithList = []
119 119
120 120
121 121 class GenericData(object):
122 122
123 123 flagNoData = True
124 124
125 125 def copy(self, inputObj=None):
126 126
127 127 if inputObj == None:
128 128 return copy.deepcopy(self)
129 129
130 130 for key in list(inputObj.__dict__.keys()):
131 131
132 132 attribute = inputObj.__dict__[key]
133 133
134 134 # If this attribute is a tuple or list
135 135 if type(inputObj.__dict__[key]) in (tuple, list):
136 136 self.__dict__[key] = attribute[:]
137 137 continue
138 138
139 139 # If this attribute is another object or instance
140 140 if hasattr(attribute, '__dict__'):
141 141 self.__dict__[key] = attribute.copy()
142 142 continue
143 143
144 144 self.__dict__[key] = inputObj.__dict__[key]
145 145
146 146 def deepcopy(self):
147 147
148 148 return copy.deepcopy(self)
149 149
150 150 def isEmpty(self):
151 151
152 152 return self.flagNoData
153 153
154 154 def isReady(self):
155 155
156 156 return not self.flagNoData
157 157
158 158
159 159 class JROData(GenericData):
160 160
161 161 systemHeaderObj = SystemHeader()
162 162 radarControllerHeaderObj = RadarControllerHeader()
163 163 type = None
164 164 datatype = None # dtype but in string
165 165 nProfiles = None
166 166 heightList = None
167 167 channelList = None
168 168 flagDiscontinuousBlock = False
169 169 useLocalTime = False
170 170 utctime = None
171 171 timeZone = None
172 172 dstFlag = None
173 173 errorCount = None
174 174 blocksize = None
175 175 flagDecodeData = False # asumo q la data no esta decodificada
176 176 flagDeflipData = False # asumo q la data no esta sin flip
177 177 flagShiftFFT = False
178 178 nCohInt = None
179 179 windowOfFilter = 1
180 180 C = 3e8
181 181 frequency = 49.92e6
182 182 realtime = False
183 183 beacon_heiIndexList = None
184 184 last_block = None
185 185 blocknow = None
186 186 azimuth = None
187 187 zenith = None
188 188 beam = Beam()
189 189 profileIndex = None
190 190 error = None
191 191 data = None
192 192 nmodes = None
193 193 metadata_list = ['heightList', 'timeZone', 'type']
194 194
195 195 def __str__(self):
196 196
197 197 return '{} - {}'.format(self.type, self.datatime())
198 198
199 199 def getNoise(self):
200 200
201 201 raise NotImplementedError
202 202
203 203 @property
204 204 def nChannels(self):
205 205
206 206 return len(self.channelList)
207 207
208 208 @property
209 209 def channelIndexList(self):
210 210
211 211 return list(range(self.nChannels))
212 212
213 213 @property
214 214 def nHeights(self):
215 215
216 216 return len(self.heightList)
217 217
218 218 def getDeltaH(self):
219 219
220 220 return self.heightList[1] - self.heightList[0]
221 221
222 222 @property
223 223 def ltctime(self):
224 224
225 225 if self.useLocalTime:
226 226 return self.utctime - self.timeZone * 60
227 227
228 228 return self.utctime
229 229
230 230 @property
231 231 def datatime(self):
232 232
233 233 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
234 234 return datatimeValue
235 235
236 236 def getTimeRange(self):
237 237
238 238 datatime = []
239 239
240 240 datatime.append(self.ltctime)
241 241 datatime.append(self.ltctime + self.timeInterval + 1)
242 242
243 243 datatime = numpy.array(datatime)
244 244
245 245 return datatime
246 246
247 247 def getFmaxTimeResponse(self):
248 248
249 249 period = (10**-6) * self.getDeltaH() / (0.15)
250 250
251 251 PRF = 1. / (period * self.nCohInt)
252 252
253 253 fmax = PRF
254 254
255 255 return fmax
256 256
257 257 def getFmax(self):
258 258 PRF = 1. / (self.ippSeconds * self.nCohInt)
259 259
260 260 fmax = PRF
261 261 return fmax
262 262
263 263 def getVmax(self):
264 264
265 265 _lambda = self.C / self.frequency
266 266
267 267 vmax = self.getFmax() * _lambda / 2
268 268
269 269 return vmax
270 270
271 271 @property
272 272 def ippSeconds(self):
273 273 '''
274 274 '''
275 275 return self.radarControllerHeaderObj.ippSeconds
276
276
277 277 @ippSeconds.setter
278 278 def ippSeconds(self, ippSeconds):
279 279 '''
280 280 '''
281 281 self.radarControllerHeaderObj.ippSeconds = ippSeconds
282
282
283 283 @property
284 284 def code(self):
285 285 '''
286 286 '''
287 287 return self.radarControllerHeaderObj.code
288 288
289 289 @code.setter
290 290 def code(self, code):
291 291 '''
292 292 '''
293 293 self.radarControllerHeaderObj.code = code
294 294
295 295 @property
296 296 def nCode(self):
297 297 '''
298 298 '''
299 299 return self.radarControllerHeaderObj.nCode
300 300
301 301 @nCode.setter
302 302 def nCode(self, ncode):
303 303 '''
304 304 '''
305 305 self.radarControllerHeaderObj.nCode = ncode
306 306
307 307 @property
308 308 def nBaud(self):
309 309 '''
310 310 '''
311 311 return self.radarControllerHeaderObj.nBaud
312 312
313 313 @nBaud.setter
314 314 def nBaud(self, nbaud):
315 315 '''
316 316 '''
317 317 self.radarControllerHeaderObj.nBaud = nbaud
318 318
319 319 @property
320 320 def ipp(self):
321 321 '''
322 322 '''
323 323 return self.radarControllerHeaderObj.ipp
324 324
325 325 @ipp.setter
326 326 def ipp(self, ipp):
327 327 '''
328 328 '''
329 329 self.radarControllerHeaderObj.ipp = ipp
330 330
331 331 @property
332 332 def metadata(self):
333 333 '''
334 334 '''
335 335
336 336 return {attr: getattr(self, attr) for attr in self.metadata_list}
337 337
338 338
339 339 class Voltage(JROData):
340 340
341 341 dataPP_POW = None
342 342 dataPP_DOP = None
343 343 dataPP_WIDTH = None
344 344 dataPP_SNR = None
345 345
346 346 def __init__(self):
347 347 '''
348 348 Constructor
349 349 '''
350 350
351 351 self.useLocalTime = True
352 352 self.radarControllerHeaderObj = RadarControllerHeader()
353 353 self.systemHeaderObj = SystemHeader()
354 354 self.type = "Voltage"
355 355 self.data = None
356 356 self.nProfiles = None
357 357 self.heightList = None
358 358 self.channelList = None
359 359 self.flagNoData = True
360 360 self.flagDiscontinuousBlock = False
361 361 self.utctime = None
362 362 self.timeZone = 0
363 363 self.dstFlag = None
364 364 self.errorCount = None
365 365 self.nCohInt = None
366 366 self.blocksize = None
367 367 self.flagCohInt = False
368 368 self.flagDecodeData = False # asumo q la data no esta decodificada
369 369 self.flagDeflipData = False # asumo q la data no esta sin flip
370 370 self.flagShiftFFT = False
371 371 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
372 372 self.profileIndex = 0
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
374 374 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
375 375
376 376 def getNoisebyHildebrand(self, channel=None):
377 377 """
378 378 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
379 379
380 380 Return:
381 381 noiselevel
382 382 """
383 383
384 384 if channel != None:
385 385 data = self.data[channel]
386 386 nChannels = 1
387 387 else:
388 388 data = self.data
389 389 nChannels = self.nChannels
390 390
391 391 noise = numpy.zeros(nChannels)
392 392 power = data * numpy.conjugate(data)
393 393
394 394 for thisChannel in range(nChannels):
395 395 if nChannels == 1:
396 396 daux = power[:].real
397 397 else:
398 398 daux = power[thisChannel, :].real
399 399 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
400 400
401 401 return noise
402 402
403 403 def getNoise(self, type=1, channel=None):
404 404
405 405 if type == 1:
406 406 noise = self.getNoisebyHildebrand(channel)
407 407
408 408 return noise
409 409
410 410 def getPower(self, channel=None):
411 411
412 412 if channel != None:
413 413 data = self.data[channel]
414 414 else:
415 415 data = self.data
416 416
417 417 power = data * numpy.conjugate(data)
418 418 powerdB = 10 * numpy.log10(power.real)
419 419 powerdB = numpy.squeeze(powerdB)
420 420
421 421 return powerdB
422 422
423 423 @property
424 424 def timeInterval(self):
425 425
426 426 return self.ippSeconds * self.nCohInt
427 427
428 428 noise = property(getNoise, "I'm the 'nHeights' property.")
429 429
430 430
431 431 class Spectra(JROData):
432 432
433 433 def __init__(self):
434 434 '''
435 435 Constructor
436 436 '''
437 437
438 438 self.data_dc = None
439 439 self.data_spc = None
440 440 self.data_cspc = None
441 441 self.useLocalTime = True
442 442 self.radarControllerHeaderObj = RadarControllerHeader()
443 443 self.systemHeaderObj = SystemHeader()
444 444 self.type = "Spectra"
445 445 self.timeZone = 0
446 446 self.nProfiles = None
447 447 self.heightList = None
448 448 self.channelList = None
449 449 self.pairsList = None
450 450 self.flagNoData = True
451 451 self.flagDiscontinuousBlock = False
452 452 self.utctime = None
453 453 self.nCohInt = None
454 454 self.nIncohInt = None
455 455 self.blocksize = None
456 456 self.nFFTPoints = None
457 457 self.wavelength = None
458 458 self.flagDecodeData = False # asumo q la data no esta decodificada
459 459 self.flagDeflipData = False # asumo q la data no esta sin flip
460 460 self.flagShiftFFT = False
461 461 self.ippFactor = 1
462 462 self.beacon_heiIndexList = []
463 463 self.noise_estimation = None
464 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
464 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
465 465 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
466 466
467 467 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
468 468 """
469 469 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
470 470
471 471 Return:
472 472 noiselevel
473 473 """
474 474
475 475 noise = numpy.zeros(self.nChannels)
476 476
477 477 for channel in range(self.nChannels):
478 478 daux = self.data_spc[channel,
479 479 xmin_index:xmax_index, ymin_index:ymax_index]
480 480 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
481 481
482 482 return noise
483 483
484 484 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
485 485
486 486 if self.noise_estimation is not None:
487 487 # this was estimated by getNoise Operation defined in jroproc_spectra.py
488 488 return self.noise_estimation
489 489 else:
490 490 noise = self.getNoisebyHildebrand(
491 491 xmin_index, xmax_index, ymin_index, ymax_index)
492 492 return noise
493 493
494 494 def getFreqRangeTimeResponse(self, extrapoints=0):
495 495
496 496 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
497 497 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
498 498
499 499 return freqrange
500 500
501 501 def getAcfRange(self, extrapoints=0):
502 502
503 503 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
504 504 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
505 505
506 506 return freqrange
507 507
508 508 def getFreqRange(self, extrapoints=0):
509 509
510 510 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
511 511 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
512 512
513 513 return freqrange
514 514
515 515 def getVelRange(self, extrapoints=0):
516 516
517 517 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
518 518 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
519 519
520 520 if self.nmodes:
521 521 return velrange/self.nmodes
522 522 else:
523 523 return velrange
524 524
525 525 @property
526 526 def nPairs(self):
527 527
528 528 return len(self.pairsList)
529 529
530 530 @property
531 531 def pairsIndexList(self):
532 532
533 533 return list(range(self.nPairs))
534 534
535 535 @property
536 536 def normFactor(self):
537 537
538 538 pwcode = 1
539 539
540 540 if self.flagDecodeData:
541 541 pwcode = numpy.sum(self.code[0]**2)
542 542 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
543 543 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
544 544
545 545 return normFactor
546 546
547 547 @property
548 548 def flag_cspc(self):
549 549
550 550 if self.data_cspc is None:
551 551 return True
552 552
553 553 return False
554 554
555 555 @property
556 556 def flag_dc(self):
557 557
558 558 if self.data_dc is None:
559 559 return True
560 560
561 561 return False
562 562
563 563 @property
564 564 def timeInterval(self):
565 565
566 566 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
567 567 if self.nmodes:
568 568 return self.nmodes*timeInterval
569 569 else:
570 570 return timeInterval
571 571
572 572 def getPower(self):
573 573
574 574 factor = self.normFactor
575 575 z = self.data_spc / factor
576 576 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
577 577 avg = numpy.average(z, axis=1)
578 578
579 579 return 10 * numpy.log10(avg)
580 580
581 581 def getCoherence(self, pairsList=None, phase=False):
582 582
583 583 z = []
584 584 if pairsList is None:
585 585 pairsIndexList = self.pairsIndexList
586 586 else:
587 587 pairsIndexList = []
588 588 for pair in pairsList:
589 589 if pair not in self.pairsList:
590 590 raise ValueError("Pair %s is not in dataOut.pairsList" % (
591 591 pair))
592 592 pairsIndexList.append(self.pairsList.index(pair))
593 593 for i in range(len(pairsIndexList)):
594 594 pair = self.pairsList[pairsIndexList[i]]
595 595 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
596 596 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
597 597 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
598 598 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
599 599 if phase:
600 600 data = numpy.arctan2(avgcoherenceComplex.imag,
601 601 avgcoherenceComplex.real) * 180 / numpy.pi
602 602 else:
603 603 data = numpy.abs(avgcoherenceComplex)
604 604
605 605 z.append(data)
606 606
607 607 return numpy.array(z)
608 608
609 609 def setValue(self, value):
610 610
611 611 print("This property should not be initialized")
612 612
613 613 return
614
614
615 615 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
616 616
617 617
618 618 class SpectraHeis(Spectra):
619 619
620 620 def __init__(self):
621 621
622 622 self.radarControllerHeaderObj = RadarControllerHeader()
623 623 self.systemHeaderObj = SystemHeader()
624 624 self.type = "SpectraHeis"
625 625 self.nProfiles = None
626 626 self.heightList = None
627 627 self.channelList = None
628 628 self.flagNoData = True
629 629 self.flagDiscontinuousBlock = False
630 630 self.utctime = None
631 631 self.blocksize = None
632 632 self.profileIndex = 0
633 633 self.nCohInt = 1
634 634 self.nIncohInt = 1
635 635
636 636 @property
637 637 def normFactor(self):
638 638 pwcode = 1
639 639 if self.flagDecodeData:
640 640 pwcode = numpy.sum(self.code[0]**2)
641 641
642 642 normFactor = self.nIncohInt * self.nCohInt * pwcode
643 643
644 644 return normFactor
645 645
646 646 @property
647 647 def timeInterval(self):
648 648
649 649 return self.ippSeconds * self.nCohInt * self.nIncohInt
650 650
651 651
652 652 class Fits(JROData):
653 653
654 654 def __init__(self):
655 655
656 656 self.type = "Fits"
657 657 self.nProfiles = None
658 658 self.heightList = None
659 659 self.channelList = None
660 660 self.flagNoData = True
661 661 self.utctime = None
662 662 self.nCohInt = 1
663 663 self.nIncohInt = 1
664 664 self.useLocalTime = True
665 665 self.profileIndex = 0
666 666 self.timeZone = 0
667 667
668 668 def getTimeRange(self):
669 669
670 670 datatime = []
671 671
672 672 datatime.append(self.ltctime)
673 673 datatime.append(self.ltctime + self.timeInterval)
674 674
675 675 datatime = numpy.array(datatime)
676 676
677 677 return datatime
678 678
679 679 def getChannelIndexList(self):
680 680
681 681 return list(range(self.nChannels))
682 682
683 683 def getNoise(self, type=1):
684 684
685 685
686 686 if type == 1:
687 687 noise = self.getNoisebyHildebrand()
688 688
689 689 if type == 2:
690 690 noise = self.getNoisebySort()
691 691
692 692 if type == 3:
693 693 noise = self.getNoisebyWindow()
694 694
695 695 return noise
696 696
697 697 @property
698 698 def timeInterval(self):
699 699
700 700 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
701 701
702 702 return timeInterval
703 703
704 704 @property
705 705 def ippSeconds(self):
706 706 '''
707 707 '''
708 708 return self.ipp_sec
709 709
710 710 noise = property(getNoise, "I'm the 'nHeights' property.")
711
711
712 712
713 713 class Correlation(JROData):
714 714
715 715 def __init__(self):
716 716 '''
717 717 Constructor
718 718 '''
719 719 self.radarControllerHeaderObj = RadarControllerHeader()
720 720 self.systemHeaderObj = SystemHeader()
721 721 self.type = "Correlation"
722 722 self.data = None
723 723 self.dtype = None
724 724 self.nProfiles = None
725 725 self.heightList = None
726 726 self.channelList = None
727 727 self.flagNoData = True
728 728 self.flagDiscontinuousBlock = False
729 729 self.utctime = None
730 730 self.timeZone = 0
731 731 self.dstFlag = None
732 732 self.errorCount = None
733 733 self.blocksize = None
734 734 self.flagDecodeData = False # asumo q la data no esta decodificada
735 735 self.flagDeflipData = False # asumo q la data no esta sin flip
736 736 self.pairsList = None
737 737 self.nPoints = None
738 738
739 739 def getPairsList(self):
740 740
741 741 return self.pairsList
742 742
743 743 def getNoise(self, mode=2):
744 744
745 745 indR = numpy.where(self.lagR == 0)[0][0]
746 746 indT = numpy.where(self.lagT == 0)[0][0]
747 747
748 748 jspectra0 = self.data_corr[:, :, indR, :]
749 749 jspectra = copy.copy(jspectra0)
750 750
751 751 num_chan = jspectra.shape[0]
752 752 num_hei = jspectra.shape[2]
753 753
754 754 freq_dc = jspectra.shape[1] / 2
755 755 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
756 756
757 757 if ind_vel[0] < 0:
758 758 ind_vel[list(range(0, 1))] = ind_vel[list(
759 759 range(0, 1))] + self.num_prof
760 760
761 761 if mode == 1:
762 762 jspectra[:, freq_dc, :] = (
763 763 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
764 764
765 765 if mode == 2:
766 766
767 767 vel = numpy.array([-2, -1, 1, 2])
768 768 xx = numpy.zeros([4, 4])
769 769
770 770 for fil in range(4):
771 771 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
772 772
773 773 xx_inv = numpy.linalg.inv(xx)
774 774 xx_aux = xx_inv[0, :]
775 775
776 776 for ich in range(num_chan):
777 777 yy = jspectra[ich, ind_vel, :]
778 778 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
779 779
780 780 junkid = jspectra[ich, freq_dc, :] <= 0
781 781 cjunkid = sum(junkid)
782 782
783 783 if cjunkid.any():
784 784 jspectra[ich, freq_dc, junkid.nonzero()] = (
785 785 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
786 786
787 787 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
788 788
789 789 return noise
790 790
791 791 @property
792 792 def timeInterval(self):
793 793
794 794 return self.ippSeconds * self.nCohInt * self.nProfiles
795 795
796 796 def splitFunctions(self):
797 797
798 798 pairsList = self.pairsList
799 799 ccf_pairs = []
800 800 acf_pairs = []
801 801 ccf_ind = []
802 802 acf_ind = []
803 803 for l in range(len(pairsList)):
804 804 chan0 = pairsList[l][0]
805 805 chan1 = pairsList[l][1]
806 806
807 807 # Obteniendo pares de Autocorrelacion
808 808 if chan0 == chan1:
809 809 acf_pairs.append(chan0)
810 810 acf_ind.append(l)
811 811 else:
812 812 ccf_pairs.append(pairsList[l])
813 813 ccf_ind.append(l)
814 814
815 815 data_acf = self.data_cf[acf_ind]
816 816 data_ccf = self.data_cf[ccf_ind]
817 817
818 818 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
819 819
820 820 @property
821 821 def normFactor(self):
822 822 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
823 823 acf_pairs = numpy.array(acf_pairs)
824 824 normFactor = numpy.zeros((self.nPairs, self.nHeights))
825 825
826 826 for p in range(self.nPairs):
827 827 pair = self.pairsList[p]
828 828
829 829 ch0 = pair[0]
830 830 ch1 = pair[1]
831 831
832 832 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
833 833 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
834 834 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
835 835
836 836 return normFactor
837 837
838 838
839 839 class Parameters(Spectra):
840 840
841 841 groupList = None # List of Pairs, Groups, etc
842 842 data_param = None # Parameters obtained
843 843 data_pre = None # Data Pre Parametrization
844 844 data_SNR = None # Signal to Noise Ratio
845 845 abscissaList = None # Abscissa, can be velocities, lags or time
846 846 utctimeInit = None # Initial UTC time
847 847 paramInterval = None # Time interval to calculate Parameters in seconds
848 848 useLocalTime = True
849 849 # Fitting
850 850 data_error = None # Error of the estimation
851 851 constants = None
852 852 library = None
853 853 # Output signal
854 854 outputInterval = None # Time interval to calculate output signal in seconds
855 855 data_output = None # Out signal
856 856 nAvg = None
857 857 noise_estimation = None
858 858 GauSPC = None # Fit gaussian SPC
859 859
860 860 def __init__(self):
861 861 '''
862 862 Constructor
863 863 '''
864 864 self.radarControllerHeaderObj = RadarControllerHeader()
865 865 self.systemHeaderObj = SystemHeader()
866 866 self.type = "Parameters"
867 867 self.timeZone = 0
868 868
869 869 def getTimeRange1(self, interval):
870 870
871 871 datatime = []
872 872
873 873 if self.useLocalTime:
874 874 time1 = self.utctimeInit - self.timeZone * 60
875 875 else:
876 876 time1 = self.utctimeInit
877 877
878 878 datatime.append(time1)
879 879 datatime.append(time1 + interval)
880 880 datatime = numpy.array(datatime)
881 881
882 882 return datatime
883 883
884 884 @property
885 885 def timeInterval(self):
886 886
887 887 if hasattr(self, 'timeInterval1'):
888 888 return self.timeInterval1
889 889 else:
890 890 return self.paramInterval
891 891
892 892 def setValue(self, value):
893 893
894 894 print("This property should not be initialized")
895 895
896 896 return
897 897
898 898 def getNoise(self):
899 899
900 900 return self.spc_noise
901 901
902 902 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
903 903
904 904
905 905 class PlotterData(object):
906 906 '''
907 907 Object to hold data to be plotted
908 908 '''
909 909
910 910 MAXNUMX = 200
911 911 MAXNUMY = 200
912 912
913 913 def __init__(self, code, exp_code, localtime=True):
914 914
915 915 self.key = code
916 916 self.exp_code = exp_code
917 917 self.ready = False
918 918 self.flagNoData = False
919 919 self.localtime = localtime
920 920 self.data = {}
921 921 self.meta = {}
922 922 self.__heights = []
923 923
924 924 def __str__(self):
925 925 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
926 926 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
927 927
928 928 def __len__(self):
929 929 return len(self.data)
930 930
931 931 def __getitem__(self, key):
932 932 if isinstance(key, int):
933 933 return self.data[self.times[key]]
934 934 elif isinstance(key, str):
935 935 ret = numpy.array([self.data[x][key] for x in self.times])
936 936 if ret.ndim > 1:
937 937 ret = numpy.swapaxes(ret, 0, 1)
938 938 return ret
939 939
940 940 def __contains__(self, key):
941 941 return key in self.data[self.min_time]
942 942
943 943 def setup(self):
944 944 '''
945 945 Configure object
946 946 '''
947 947 self.type = ''
948 948 self.ready = False
949 949 del self.data
950 950 self.data = {}
951 951 self.__heights = []
952 952 self.__all_heights = set()
953 953
954 954 def shape(self, key):
955 955 '''
956 956 Get the shape of the one-element data for the given key
957 957 '''
958 958
959 959 if len(self.data[self.min_time][key]):
960 960 return self.data[self.min_time][key].shape
961 961 return (0,)
962 962
963 963 def update(self, data, tm, meta={}):
964 964 '''
965 965 Update data object with new dataOut
966 966 '''
967 967
968 968 self.data[tm] = data
969
969
970 970 for key, value in meta.items():
971 971 setattr(self, key, value)
972 972
973 973 def normalize_heights(self):
974 974 '''
975 975 Ensure same-dimension of the data for different heighList
976 976 '''
977 977
978 978 H = numpy.array(list(self.__all_heights))
979 979 H.sort()
980 980 for key in self.data:
981 981 shape = self.shape(key)[:-1] + H.shape
982 982 for tm, obj in list(self.data[key].items()):
983 983 h = self.__heights[self.times.tolist().index(tm)]
984 984 if H.size == h.size:
985 985 continue
986 986 index = numpy.where(numpy.in1d(H, h))[0]
987 987 dummy = numpy.zeros(shape) + numpy.nan
988 988 if len(shape) == 2:
989 989 dummy[:, index] = obj
990 990 else:
991 991 dummy[index] = obj
992 992 self.data[key][tm] = dummy
993 993
994 994 self.__heights = [H for tm in self.times]
995 995
996 996 def jsonify(self, tm, plot_name, plot_type, decimate=False):
997 997 '''
998 998 Convert data to json
999 999 '''
1000 1000
1001 1001 meta = {}
1002 1002 meta['xrange'] = []
1003 1003 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1004 1004 tmp = self.data[tm][self.key]
1005 1005 shape = tmp.shape
1006 1006 if len(shape) == 2:
1007 1007 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1008 1008 elif len(shape) == 3:
1009 1009 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1010 1010 data = self.roundFloats(
1011 1011 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1012 1012 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1013 1013 else:
1014 1014 data = self.roundFloats(self.data[tm][self.key].tolist())
1015
1015
1016 1016 ret = {
1017 1017 'plot': plot_name,
1018 1018 'code': self.exp_code,
1019 1019 'time': float(tm),
1020 1020 'data': data,
1021 1021 }
1022 1022 meta['type'] = plot_type
1023 1023 meta['interval'] = float(self.interval)
1024 1024 meta['localtime'] = self.localtime
1025 1025 meta['yrange'] = self.roundFloats(self.yrange[::dy].tolist())
1026 1026 meta.update(self.meta)
1027 1027 ret['metadata'] = meta
1028 1028 return json.dumps(ret)
1029 1029
1030 1030 @property
1031 1031 def times(self):
1032 1032 '''
1033 1033 Return the list of times of the current data
1034 1034 '''
1035 1035
1036 1036 ret = [t for t in self.data]
1037 1037 ret.sort()
1038 1038 return numpy.array(ret)
1039 1039
1040 1040 @property
1041 1041 def min_time(self):
1042 1042 '''
1043 1043 Return the minimun time value
1044 1044 '''
1045 1045
1046 1046 return self.times[0]
1047 1047
1048 1048 @property
1049 1049 def max_time(self):
1050 1050 '''
1051 1051 Return the maximun time value
1052 1052 '''
1053 1053
1054 1054 return self.times[-1]
1055 1055
1056 1056 # @property
1057 1057 # def heights(self):
1058 1058 # '''
1059 1059 # Return the list of heights of the current data
1060 1060 # '''
1061 1061
1062 1062 # return numpy.array(self.__heights[-1])
1063 1063
1064 1064 @staticmethod
1065 1065 def roundFloats(obj):
1066 1066 if isinstance(obj, list):
1067 1067 return list(map(PlotterData.roundFloats, obj))
1068 1068 elif isinstance(obj, float):
1069 1069 return round(obj, 2)
@@ -1,693 +1,704
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196
197 197 def __setup(self, **kwargs):
198 198 '''
199 199 Initialize variables
200 200 '''
201 201
202 202 self.figures = []
203 203 self.axes = []
204 204 self.cb_axes = []
205 205 self.localtime = kwargs.pop('localtime', True)
206 206 self.show = kwargs.get('show', True)
207 207 self.save = kwargs.get('save', False)
208 208 self.save_period = kwargs.get('save_period', 0)
209 209 self.colormap = kwargs.get('colormap', self.colormap)
210 210 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
211 211 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
212 212 self.colormaps = kwargs.get('colormaps', None)
213 213 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
214 214 self.showprofile = kwargs.get('showprofile', False)
215 215 self.title = kwargs.get('wintitle', self.CODE.upper())
216 216 self.cb_label = kwargs.get('cb_label', None)
217 217 self.cb_labels = kwargs.get('cb_labels', None)
218 218 self.labels = kwargs.get('labels', None)
219 219 self.xaxis = kwargs.get('xaxis', 'frequency')
220 220 self.zmin = kwargs.get('zmin', None)
221 221 self.zmax = kwargs.get('zmax', None)
222 222 self.zlimits = kwargs.get('zlimits', None)
223 223 self.xmin = kwargs.get('xmin', None)
224 224 self.xmax = kwargs.get('xmax', None)
225 225 self.xrange = kwargs.get('xrange', 12)
226 226 self.xscale = kwargs.get('xscale', None)
227 227 self.ymin = kwargs.get('ymin', None)
228 228 self.ymax = kwargs.get('ymax', None)
229 229 self.yscale = kwargs.get('yscale', None)
230 230 self.xlabel = kwargs.get('xlabel', None)
231 231 self.attr_time = kwargs.get('attr_time', 'utctime')
232 232 self.attr_data = kwargs.get('attr_data', 'data_param')
233 233 self.decimation = kwargs.get('decimation', None)
234 234 self.oneFigure = kwargs.get('oneFigure', True)
235 235 self.width = kwargs.get('width', None)
236 236 self.height = kwargs.get('height', None)
237 237 self.colorbar = kwargs.get('colorbar', True)
238 238 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
239 239 self.channels = kwargs.get('channels', None)
240 240 self.titles = kwargs.get('titles', [])
241 241 self.polar = False
242 242 self.type = kwargs.get('type', 'iq')
243 243 self.grid = kwargs.get('grid', False)
244 244 self.pause = kwargs.get('pause', False)
245 245 self.save_code = kwargs.get('save_code', self.CODE)
246 246 self.throttle = kwargs.get('throttle', 0)
247 247 self.exp_code = kwargs.get('exp_code', None)
248 248 self.server = kwargs.get('server', False)
249 249 self.sender_period = kwargs.get('sender_period', 60)
250 250 self.tag = kwargs.get('tag', '')
251 251 self.height_index = kwargs.get('height_index', None)
252 252 self.__throttle_plot = apply_throttle(self.throttle)
253 253 code = self.attr_data if self.attr_data else self.CODE
254 254 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
255
255
256 256 if self.server:
257 257 if not self.server.startswith('tcp://'):
258 258 self.server = 'tcp://{}'.format(self.server)
259 259 log.success(
260 260 'Sending to server: {}'.format(self.server),
261 261 self.name
262 262 )
263 263
264 264 if isinstance(self.attr_data, str):
265 265 self.attr_data = [self.attr_data]
266 266
267 267 def __setup_plot(self):
268 268 '''
269 269 Common setup for all figures, here figures and axes are created
270 270 '''
271 271
272 272 self.setup()
273 273
274 self.time_label = 'LT' if self.localtime else 'UTC'
274 self.time_label = 'LT' if self.localtime else 'UTC'
275 275
276 276 if self.width is None:
277 277 self.width = 8
278 278
279 279 self.figures = []
280 280 self.axes = []
281 281 self.cb_axes = []
282 282 self.pf_axes = []
283 283 self.cmaps = []
284 284
285 285 size = '15%' if self.ncols == 1 else '30%'
286 286 pad = '4%' if self.ncols == 1 else '8%'
287 287
288 288 if self.oneFigure:
289 289 if self.height is None:
290 290 self.height = 1.4 * self.nrows + 1
291 291 fig = plt.figure(figsize=(self.width, self.height),
292 292 edgecolor='k',
293 293 facecolor='w')
294 294 self.figures.append(fig)
295 295 for n in range(self.nplots):
296 296 ax = fig.add_subplot(self.nrows, self.ncols,
297 297 n + 1, polar=self.polar)
298 298 ax.tick_params(labelsize=8)
299 299 ax.firsttime = True
300 300 ax.index = 0
301 301 ax.press = None
302 302 self.axes.append(ax)
303 303 if self.showprofile:
304 304 cax = self.__add_axes(ax, size=size, pad=pad)
305 305 cax.tick_params(labelsize=8)
306 306 self.pf_axes.append(cax)
307 307 else:
308 308 if self.height is None:
309 309 self.height = 3
310 310 for n in range(self.nplots):
311 311 fig = plt.figure(figsize=(self.width, self.height),
312 312 edgecolor='k',
313 313 facecolor='w')
314 314 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
315 315 ax.tick_params(labelsize=8)
316 316 ax.firsttime = True
317 317 ax.index = 0
318 318 ax.press = None
319 319 self.figures.append(fig)
320 320 self.axes.append(ax)
321 321 if self.showprofile:
322 322 cax = self.__add_axes(ax, size=size, pad=pad)
323 323 cax.tick_params(labelsize=8)
324 324 self.pf_axes.append(cax)
325 325
326 326 for n in range(self.nrows):
327 327 if self.colormaps is not None:
328 328 cmap = plt.get_cmap(self.colormaps[n])
329 329 else:
330 330 cmap = plt.get_cmap(self.colormap)
331 331 cmap.set_bad(self.bgcolor, 1.)
332 332 self.cmaps.append(cmap)
333 333
334 334 def __add_axes(self, ax, size='30%', pad='8%'):
335 335 '''
336 336 Add new axes to the given figure
337 337 '''
338 338 divider = make_axes_locatable(ax)
339 339 nax = divider.new_horizontal(size=size, pad=pad)
340 340 ax.figure.add_axes(nax)
341 341 return nax
342 342
343 343 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
344 344 '''
345 345 Create a masked array for missing data
346 346 '''
347 347 if x_buffer.shape[0] < 2:
348 348 return x_buffer, y_buffer, z_buffer
349 349
350 350 deltas = x_buffer[1:] - x_buffer[0:-1]
351 351 x_median = numpy.median(deltas)
352 352
353 353 index = numpy.where(deltas > 5 * x_median)
354 354
355 355 if len(index[0]) != 0:
356 356 z_buffer[::, index[0], ::] = self.__missing
357 357 z_buffer = numpy.ma.masked_inside(z_buffer,
358 358 0.99 * self.__missing,
359 359 1.01 * self.__missing)
360 360
361 361 return x_buffer, y_buffer, z_buffer
362 362
363 363 def decimate(self):
364 364
365 365 # dx = int(len(self.x)/self.__MAXNUMX) + 1
366 366 dy = int(len(self.y) / self.decimation) + 1
367 367
368 368 # x = self.x[::dx]
369 369 x = self.x
370 370 y = self.y[::dy]
371 371 z = self.z[::, ::, ::dy]
372 372
373 373 return x, y, z
374 374
375 375 def format(self):
376 376 '''
377 377 Set min and max values, labels, ticks and titles
378 378 '''
379
379
380 380 for n, ax in enumerate(self.axes):
381 381 if ax.firsttime:
382 382 if self.xaxis != 'time':
383 383 xmin = self.xmin
384 384 xmax = self.xmax
385 385 else:
386 386 xmin = self.tmin
387 387 xmax = self.tmin + self.xrange*60*60
388 388 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
389 389 ax.xaxis.set_major_locator(LinearLocator(9))
390 390 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
391 391 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
392 392 ax.set_facecolor(self.bgcolor)
393 393 if self.xscale:
394 394 ax.xaxis.set_major_formatter(FuncFormatter(
395 395 lambda x, pos: '{0:g}'.format(x*self.xscale)))
396 396 if self.yscale:
397 397 ax.yaxis.set_major_formatter(FuncFormatter(
398 398 lambda x, pos: '{0:g}'.format(x*self.yscale)))
399 399 if self.xlabel is not None:
400 400 ax.set_xlabel(self.xlabel)
401 401 if self.ylabel is not None:
402 402 ax.set_ylabel(self.ylabel)
403 403 if self.showprofile:
404 404 self.pf_axes[n].set_ylim(ymin, ymax)
405 405 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
406 406 self.pf_axes[n].set_xlabel('dB')
407 407 self.pf_axes[n].grid(b=True, axis='x')
408 408 [tick.set_visible(False)
409 409 for tick in self.pf_axes[n].get_yticklabels()]
410 410 if self.colorbar:
411 411 ax.cbar = plt.colorbar(
412 412 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
413 413 ax.cbar.ax.tick_params(labelsize=8)
414 414 ax.cbar.ax.press = None
415 415 if self.cb_label:
416 416 ax.cbar.set_label(self.cb_label, size=8)
417 417 elif self.cb_labels:
418 418 ax.cbar.set_label(self.cb_labels[n], size=8)
419 419 else:
420 420 ax.cbar = None
421 421 ax.set_xlim(xmin, xmax)
422 422 ax.set_ylim(ymin, ymax)
423 423 ax.firsttime = False
424 424 if self.grid:
425 425 ax.grid(True)
426 426 if not self.polar:
427 427 ax.set_title('{} {} {}'.format(
428 428 self.titles[n],
429 429 self.getDateTime(self.data.max_time).strftime(
430 430 '%Y-%m-%d %H:%M:%S'),
431 431 self.time_label),
432 432 size=8)
433 433 else:
434 434 ax.set_title('{}'.format(self.titles[n]), size=8)
435 435 ax.set_ylim(0, 90)
436 436 ax.set_yticks(numpy.arange(0, 90, 20))
437 437 ax.yaxis.labelpad = 40
438 438
439 439 if self.firsttime:
440 440 for n, fig in enumerate(self.figures):
441 441 fig.subplots_adjust(**self.plots_adjust)
442 442 self.firsttime = False
443 443
444 444 def clear_figures(self):
445 445 '''
446 446 Reset axes for redraw plots
447 447 '''
448 448
449 449 for ax in self.axes+self.pf_axes+self.cb_axes:
450 450 ax.clear()
451 451 ax.firsttime = True
452 452 if hasattr(ax, 'cbar') and ax.cbar:
453 453 ax.cbar.remove()
454 454
455 455 def __plot(self):
456 456 '''
457 457 Main function to plot, format and save figures
458 458 '''
459 459
460 460 self.plot()
461 461 self.format()
462
462
463 463 for n, fig in enumerate(self.figures):
464 464 if self.nrows == 0 or self.nplots == 0:
465 465 log.warning('No data', self.name)
466 466 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
467 467 fig.canvas.manager.set_window_title(self.CODE)
468 468 continue
469
469
470 470 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
471 471 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
472 472 fig.canvas.draw()
473 473 if self.show:
474 474 fig.show()
475 475 figpause(0.01)
476 476
477 477 if self.save:
478 478 self.save_figure(n)
479
479
480 480 if self.server:
481 481 self.send_to_server()
482 482
483 483 def __update(self, dataOut, timestamp):
484 484 '''
485 485 '''
486 486
487 487 metadata = {
488 488 'yrange': dataOut.heightList,
489 489 'interval': dataOut.timeInterval,
490 490 'channels': dataOut.channelList
491 491 }
492
492
493 493 data, meta = self.update(dataOut)
494 494 metadata.update(meta)
495 495 self.data.update(data, timestamp, metadata)
496
496
497 497 def save_figure(self, n):
498 498 '''
499 499 '''
500
501 if (self.data.max_time - self.save_time) <= self.save_period:
502 return
500 if self.oneFigure:
501 if (self.data.max_time - self.save_time) <= self.save_period:
502 return
503 503
504 504 self.save_time = self.data.max_time
505 505
506 506 fig = self.figures[n]
507
508 507 if self.throttle == 0:
509 figname = os.path.join(
510 self.save,
511 self.save_code,
512 '{}_{}.png'.format(
508 if self.oneFigure:
509 figname = os.path.join(
510 self.save,
513 511 self.save_code,
514 self.getDateTime(self.data.max_time).strftime(
515 '%Y%m%d_%H%M%S'
516 ),
512 '{}_{}.png'.format(
513 self.save_code,
514 self.getDateTime(self.data.max_time).strftime(
515 '%Y%m%d_%H%M%S'
516 ),
517 )
518 )
519 else:
520 figname = os.path.join(
521 self.save,
522 self.save_code,
523 '{}_ch{}_{}.png'.format(
524 self.save_code,n,
525 self.getDateTime(self.data.max_time).strftime(
526 '%Y%m%d_%H%M%S'
527 ),
528 )
517 529 )
518 )
519 530 log.log('Saving figure: {}'.format(figname), self.name)
520 531 if not os.path.isdir(os.path.dirname(figname)):
521 532 os.makedirs(os.path.dirname(figname))
522 533 fig.savefig(figname)
523 534
524 535 figname = os.path.join(
525 536 self.save,
526 537 '{}_{}.png'.format(
527 538 self.save_code,
528 539 self.getDateTime(self.data.min_time).strftime(
529 540 '%Y%m%d'
530 541 ),
531 542 )
532 543 )
544
533 545 log.log('Saving figure: {}'.format(figname), self.name)
534 546 if not os.path.isdir(os.path.dirname(figname)):
535 547 os.makedirs(os.path.dirname(figname))
536 548 fig.savefig(figname)
537 549
538 550 def send_to_server(self):
539 551 '''
540 552 '''
541 553
542 554 if self.exp_code == None:
543 555 log.warning('Missing `exp_code` skipping sending to server...')
544
556
545 557 last_time = self.data.max_time
546 558 interval = last_time - self.sender_time
547 559 if interval < self.sender_period:
548 560 return
549 561
550 562 self.sender_time = last_time
551
563
552 564 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
553 565 for attr in attrs:
554 566 value = getattr(self, attr)
555 567 if value:
556 568 if isinstance(value, (numpy.float32, numpy.float64)):
557 569 value = round(float(value), 2)
558 570 self.data.meta[attr] = value
559 571 if self.colormap == 'jet':
560 572 self.data.meta['colormap'] = 'Jet'
561 573 elif 'RdBu' in self.colormap:
562 574 self.data.meta['colormap'] = 'RdBu'
563 575 else:
564 576 self.data.meta['colormap'] = 'Viridis'
565 577 self.data.meta['interval'] = int(interval)
566 578
567 579 self.sender_queue.append(last_time)
568
580
569 581 while True:
570 582 try:
571 583 tm = self.sender_queue.popleft()
572 584 except IndexError:
573 585 break
574 586 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
575 587 self.socket.send_string(msg)
576 588 socks = dict(self.poll.poll(2000))
577 589 if socks.get(self.socket) == zmq.POLLIN:
578 590 reply = self.socket.recv_string()
579 591 if reply == 'ok':
580 592 log.log("Response from server ok", self.name)
581 593 time.sleep(0.1)
582 594 continue
583 595 else:
584 596 log.warning(
585 597 "Malformed reply from server: {}".format(reply), self.name)
586 598 else:
587 599 log.warning(
588 600 "No response from server, retrying...", self.name)
589 601 self.sender_queue.appendleft(tm)
590 602 self.socket.setsockopt(zmq.LINGER, 0)
591 603 self.socket.close()
592 604 self.poll.unregister(self.socket)
593 605 self.socket = self.context.socket(zmq.REQ)
594 606 self.socket.connect(self.server)
595 607 self.poll.register(self.socket, zmq.POLLIN)
596 608 break
597 609
598 610 def setup(self):
599 611 '''
600 612 This method should be implemented in the child class, the following
601 613 attributes should be set:
602 614
603 615 self.nrows: number of rows
604 616 self.ncols: number of cols
605 617 self.nplots: number of plots (channels or pairs)
606 618 self.ylabel: label for Y axes
607 self.titles: list of axes title
619 self.titles: list of axes title
608 620
609 621 '''
610 622 raise NotImplementedError
611 623
612 624 def plot(self):
613 625 '''
614 626 Must be defined in the child class, the actual plotting method
615 627 '''
616 628 raise NotImplementedError
617 629
618 630 def update(self, dataOut):
619 631 '''
620 632 Must be defined in the child class, update self.data with new data
621 633 '''
622
634
623 635 data = {
624 636 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
625 637 }
626 638 meta = {}
627 639
628 640 return data, meta
629
641
630 642 def run(self, dataOut, **kwargs):
631 643 '''
632 644 Main plotting routine
633 645 '''
634 646
635 647 if self.isConfig is False:
636 648 self.__setup(**kwargs)
637 649
638 650 if self.localtime:
639 651 self.getDateTime = datetime.datetime.fromtimestamp
640 652 else:
641 653 self.getDateTime = datetime.datetime.utcfromtimestamp
642 654
643 655 self.data.setup()
644 656 self.isConfig = True
645 657 if self.server:
646 658 self.context = zmq.Context()
647 659 self.socket = self.context.socket(zmq.REQ)
648 660 self.socket.connect(self.server)
649 661 self.poll = zmq.Poller()
650 662 self.poll.register(self.socket, zmq.POLLIN)
651 663
652 664 tm = getattr(dataOut, self.attr_time)
653
665
654 666 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
655 667 self.save_time = tm
656 668 self.__plot()
657 669 self.tmin += self.xrange*60*60
658 670 self.data.setup()
659 671 self.clear_figures()
660 672
661 673 self.__update(dataOut, tm)
662 674
663 675 if self.isPlotConfig is False:
664 676 self.__setup_plot()
665 677 self.isPlotConfig = True
666 678 if self.xaxis == 'time':
667 679 dt = self.getDateTime(tm)
668 680 if self.xmin is None:
669 681 self.tmin = tm
670 self.xmin = dt.hour
682 self.xmin = dt.hour
671 683 minutes = (self.xmin-int(self.xmin)) * 60
672 684 seconds = (minutes - int(minutes)) * 60
673 685 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
674 686 datetime.datetime(1970, 1, 1)).total_seconds()
675 687 if self.localtime:
676 688 self.tmin += time.timezone
677 689
678 690 if self.xmin is not None and self.xmax is not None:
679 691 self.xrange = self.xmax - self.xmin
680 692
681 693 if self.throttle == 0:
682 694 self.__plot()
683 695 else:
684 696 self.__throttle_plot(self.__plot)#, coerce=coerce)
685 697
686 698 def close(self):
687 699
688 700 if self.data and not self.data.flagNoData:
689 701 self.save_time = 0
690 702 self.__plot()
691 703 if self.data and not self.data.flagNoData and self.pause:
692 704 figpause(10)
693
@@ -1,101 +1,103
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plo Specra Heis data
6 6
7 7 """
8 8
9 9 import numpy
10 10
11 11 from schainpy.model.graphics.jroplot_base import Plot, plt
12 12
13 13
14 14 class SpectraHeisPlot(Plot):
15 15
16 16 CODE = 'spc_heis'
17 17
18 18 def setup(self):
19 19
20 20 self.nplots = len(self.data.channels)
21 21 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
22 22 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
23 23 self.height = 2.6 * self.nrows
24 24 self.width = 3.5 * self.ncols
25 25 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.95, 'bottom': 0.08})
26 26 self.ylabel = 'Intensity [dB]'
27 27 self.xlabel = 'Frequency [KHz]'
28 28 self.colorbar = False
29 29
30 30 def update(self, dataOut):
31 31
32 32 data = {}
33 33 meta = {}
34 34 spc = 10*numpy.log10(dataOut.data_spc / dataOut.normFactor)
35 35 data['spc_heis'] = spc
36
37 return data, meta
36
37 return data, meta
38 38
39 39 def plot(self):
40 40
41 41 c = 3E8
42 42 deltaHeight = self.data.yrange[1] - self.data.yrange[0]
43 43 x = numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
44 #x = (1/1000.0)*numpy.arange(-1*len(self.data.yrange)/2., len(self.data.yrange)/2.)*(c/(2*deltaHeight*len(self.data.yrange)*1000))
45
44 46 self.y = self.data[-1]['spc_heis']
45 47 self.titles = []
46 48
47 49 for n, ax in enumerate(self.axes):
48 50 ychannel = self.y[n,:]
49 51 if ax.firsttime:
50 52 self.xmin = min(x) if self.xmin is None else self.xmin
51 53 self.xmax = max(x) if self.xmax is None else self.xmax
52 54 ax.plt = ax.plot(x, ychannel, lw=1, color='b')[0]
53 55 else:
54 56 ax.plt.set_data(x, ychannel)
55 57
56 58 self.titles.append("Channel {}: {:4.2f}dB".format(n, numpy.max(ychannel)))
57 59
58 60
59 61 class RTIHeisPlot(Plot):
60 62
61 63 CODE = 'rti_heis'
62 64
63 65 def setup(self):
64 66
65 67 self.xaxis = 'time'
66 68 self.ncols = 1
67 69 self.nrows = 1
68 70 self.nplots = 1
69 71 self.ylabel = 'Intensity [dB]'
70 72 self.xlabel = 'Time'
71 73 self.titles = ['RTI']
72 74 self.colorbar = False
73 75 self.height = 4
74 76 self.plots_adjust.update({'right': 0.85 })
75 77
76 78 def update(self, dataOut):
77 79
78 80 data = {}
79 81 meta = {}
80 82 spc = dataOut.data_spc / dataOut.normFactor
81 83 spc = 10*numpy.log10(numpy.average(spc, axis=1))
82 84 data['rti_heis'] = spc
83
84 return data, meta
85
86 return data, meta
85 87
86 88 def plot(self):
87 89
88 90 x = self.data.times
89 91 Y = self.data['rti_heis']
90 92
91 93 if self.axes[0].firsttime:
92 94 self.ymin = numpy.nanmin(Y) - 5 if self.ymin == None else self.ymin
93 95 self.ymax = numpy.nanmax(Y) + 5 if self.ymax == None else self.ymax
94 96 for ch in self.data.channels:
95 97 y = Y[ch]
96 98 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
97 99 plt.legend(bbox_to_anchor=(1.18, 1.0))
98 100 else:
99 101 for ch in self.data.channels:
100 102 y = Y[ch]
101 103 self.axes[0].lines[ch].set_data(x, y)
@@ -1,370 +1,585
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
7 7 from schainpy.utils import log
8 # libreria wradlib
9 import wradlib as wrl
8 10
9 11 EARTH_RADIUS = 6.3710e3
10 12
11 13
12 14 def ll2xy(lat1, lon1, lat2, lon2):
13 15
14 16 p = 0.017453292519943295
15 17 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 18 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 19 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 20 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 21 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 22 theta = -theta + numpy.pi/2
21 23 return r*numpy.cos(theta), r*numpy.sin(theta)
22 24
23 25
24 26 def km2deg(km):
25 27 '''
26 28 Convert distance in km to degrees
27 29 '''
28 30
29 31 return numpy.rad2deg(km/EARTH_RADIUS)
30 32
31 33
32 34
33 35 class SpectralMomentsPlot(SpectraPlot):
34 36 '''
35 37 Plot for Spectral Moments
36 38 '''
37 39 CODE = 'spc_moments'
38 40 # colormap = 'jet'
39 41 # plot_type = 'pcolor'
40 42
41 43 class DobleGaussianPlot(SpectraPlot):
42 44 '''
43 45 Plot for Double Gaussian Plot
44 46 '''
45 47 CODE = 'gaussian_fit'
46 48 # colormap = 'jet'
47 49 # plot_type = 'pcolor'
48 50
49 51 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
50 52 '''
51 53 Plot SpectraCut with Double Gaussian Fit
52 54 '''
53 55 CODE = 'cut_gaussian_fit'
54 56
55 57 class SnrPlot(RTIPlot):
56 58 '''
57 59 Plot for SNR Data
58 60 '''
59 61
60 62 CODE = 'snr'
61 63 colormap = 'jet'
62 64
63 65 def update(self, dataOut):
64 66
65 67 data = {
66 'snr': 10*numpy.log10(dataOut.data_snr)
68 'snr': 10*numpy.log10(dataOut.data_snr)
67 69 }
68 70
69 71 return data, {}
70 72
71 73 class DopplerPlot(RTIPlot):
72 74 '''
73 75 Plot for DOPPLER Data (1st moment)
74 76 '''
75 77
76 78 CODE = 'dop'
77 79 colormap = 'jet'
78 80
79 81 def update(self, dataOut):
80 82
81 83 data = {
82 'dop': 10*numpy.log10(dataOut.data_dop)
84 'dop': 10*numpy.log10(dataOut.data_dop)
83 85 }
84 86
85 87 return data, {}
86 88
87 89 class PowerPlot(RTIPlot):
88 90 '''
89 91 Plot for Power Data (0 moment)
90 92 '''
91 93
92 94 CODE = 'pow'
93 95 colormap = 'jet'
94 96
95 97 def update(self, dataOut):
96 98
97 99 data = {
98 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
100 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
99 101 }
100 102
101 103 return data, {}
102 104
103 105 class SpectralWidthPlot(RTIPlot):
104 106 '''
105 107 Plot for Spectral Width Data (2nd moment)
106 108 '''
107 109
108 110 CODE = 'width'
109 111 colormap = 'jet'
110 112
111 113 def update(self, dataOut):
112 114
113 115 data = {
114 116 'width': dataOut.data_width
115 117 }
116 118
117 119 return data, {}
118 120
119 121 class SkyMapPlot(Plot):
120 122 '''
121 123 Plot for meteors detection data
122 124 '''
123 125
124 126 CODE = 'param'
125 127
126 128 def setup(self):
127 129
128 130 self.ncols = 1
129 131 self.nrows = 1
130 132 self.width = 7.2
131 133 self.height = 7.2
132 134 self.nplots = 1
133 135 self.xlabel = 'Zonal Zenith Angle (deg)'
134 136 self.ylabel = 'Meridional Zenith Angle (deg)'
135 137 self.polar = True
136 138 self.ymin = -180
137 139 self.ymax = 180
138 140 self.colorbar = False
139 141
140 142 def plot(self):
141 143
142 144 arrayParameters = numpy.concatenate(self.data['param'])
143 145 error = arrayParameters[:, -1]
144 146 indValid = numpy.where(error == 0)[0]
145 147 finalMeteor = arrayParameters[indValid, :]
146 148 finalAzimuth = finalMeteor[:, 3]
147 149 finalZenith = finalMeteor[:, 4]
148 150
149 151 x = finalAzimuth * numpy.pi / 180
150 152 y = finalZenith
151 153
152 154 ax = self.axes[0]
153 155
154 156 if ax.firsttime:
155 157 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
156 158 else:
157 159 ax.plot.set_data(x, y)
158 160
159 161 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
160 162 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
161 163 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
162 164 dt2,
163 165 len(x))
164 166 self.titles[0] = title
165 167
166 168
167 169 class GenericRTIPlot(Plot):
168 170 '''
169 171 Plot for data_xxxx object
170 172 '''
171 173
172 174 CODE = 'param'
173 175 colormap = 'viridis'
174 176 plot_type = 'pcolorbuffer'
175 177
176 178 def setup(self):
177 179 self.xaxis = 'time'
178 180 self.ncols = 1
179 181 self.nrows = self.data.shape('param')[0]
180 182 self.nplots = self.nrows
181 183 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
182
184
183 185 if not self.xlabel:
184 186 self.xlabel = 'Time'
185 187
186 188 self.ylabel = 'Range [km]'
187 189 if not self.titles:
188 190 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
189 191
190 192 def update(self, dataOut):
191 193
192 194 data = {
193 195 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
194 196 }
195 197
196 198 meta = {}
197 199
198 200 return data, meta
199
201
200 202 def plot(self):
201 203 # self.data.normalize_heights()
202 204 self.x = self.data.times
203 205 self.y = self.data.yrange
204 206 self.z = self.data['param']
205 207
206 208 self.z = numpy.ma.masked_invalid(self.z)
207 209
208 210 if self.decimation is None:
209 211 x, y, z = self.fill_gaps(self.x, self.y, self.z)
210 212 else:
211 213 x, y, z = self.fill_gaps(*self.decimate())
212 214
213 215 for n, ax in enumerate(self.axes):
214 216
215 217 self.zmax = self.zmax if self.zmax is not None else numpy.max(
216 218 self.z[n])
217 219 self.zmin = self.zmin if self.zmin is not None else numpy.min(
218 220 self.z[n])
219 221
220 222 if ax.firsttime:
221 223 if self.zlimits is not None:
222 224 self.zmin, self.zmax = self.zlimits[n]
223 225
224 226 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
225 227 vmin=self.zmin,
226 228 vmax=self.zmax,
227 229 cmap=self.cmaps[n]
228 230 )
229 231 else:
230 232 if self.zlimits is not None:
231 233 self.zmin, self.zmax = self.zlimits[n]
232 234 ax.collections.remove(ax.collections[0])
233 235 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
234 236 vmin=self.zmin,
235 237 vmax=self.zmax,
236 238 cmap=self.cmaps[n]
237 239 )
238 240
239 241
240 242 class PolarMapPlot(Plot):
241 243 '''
242 244 Plot for weather radar
243 245 '''
244 246
245 247 CODE = 'param'
246 248 colormap = 'seismic'
247 249
248 250 def setup(self):
249 251 self.ncols = 1
250 252 self.nrows = 1
251 253 self.width = 9
252 254 self.height = 8
253 255 self.mode = self.data.meta['mode']
254 256 if self.channels is not None:
255 257 self.nplots = len(self.channels)
256 258 self.nrows = len(self.channels)
257 259 else:
258 260 self.nplots = self.data.shape(self.CODE)[0]
259 261 self.nrows = self.nplots
260 262 self.channels = list(range(self.nplots))
261 263 if self.mode == 'E':
262 264 self.xlabel = 'Longitude'
263 265 self.ylabel = 'Latitude'
264 266 else:
265 267 self.xlabel = 'Range (km)'
266 268 self.ylabel = 'Height (km)'
267 269 self.bgcolor = 'white'
268 270 self.cb_labels = self.data.meta['units']
269 271 self.lat = self.data.meta['latitude']
270 272 self.lon = self.data.meta['longitude']
271 273 self.xmin, self.xmax = float(
272 274 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
273 275 self.ymin, self.ymax = float(
274 276 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
275 277 # self.polar = True
276 278
277 279 def plot(self):
278 280
279 281 for n, ax in enumerate(self.axes):
280 282 data = self.data['param'][self.channels[n]]
281 283
282 284 zeniths = numpy.linspace(
283 285 0, self.data.meta['max_range'], data.shape[1])
284 286 if self.mode == 'E':
285 287 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
286 288 r, theta = numpy.meshgrid(zeniths, azimuths)
287 289 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
288 290 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
289 291 x = km2deg(x) + self.lon
290 292 y = km2deg(y) + self.lat
291 293 else:
292 294 azimuths = numpy.radians(self.data.yrange)
293 295 r, theta = numpy.meshgrid(zeniths, azimuths)
294 296 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
295 297 self.y = zeniths
296 298
297 299 if ax.firsttime:
298 300 if self.zlimits is not None:
299 301 self.zmin, self.zmax = self.zlimits[n]
300 302 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
301 303 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
302 304 vmin=self.zmin,
303 305 vmax=self.zmax,
304 306 cmap=self.cmaps[n])
305 307 else:
306 308 if self.zlimits is not None:
307 309 self.zmin, self.zmax = self.zlimits[n]
308 310 ax.collections.remove(ax.collections[0])
309 311 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
310 312 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
311 313 vmin=self.zmin,
312 314 vmax=self.zmax,
313 315 cmap=self.cmaps[n])
314 316
315 317 if self.mode == 'A':
316 318 continue
317 319
318 320 # plot district names
319 321 f = open('/data/workspace/schain_scripts/distrito.csv')
320 322 for line in f:
321 323 label, lon, lat = [s.strip() for s in line.split(',') if s]
322 324 lat = float(lat)
323 325 lon = float(lon)
324 326 # ax.plot(lon, lat, '.b', ms=2)
325 327 ax.text(lon, lat, label.decode('utf8'), ha='center',
326 328 va='bottom', size='8', color='black')
327 329
328 330 # plot limites
329 331 limites = []
330 332 tmp = []
331 333 for line in open('/data/workspace/schain_scripts/lima.csv'):
332 334 if '#' in line:
333 335 if tmp:
334 336 limites.append(tmp)
335 337 tmp = []
336 338 continue
337 339 values = line.strip().split(',')
338 340 tmp.append((float(values[0]), float(values[1])))
339 341 for points in limites:
340 342 ax.add_patch(
341 343 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
342 344
343 345 # plot Cuencas
344 346 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
345 347 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
346 348 values = [line.strip().split(',') for line in f]
347 349 points = [(float(s[0]), float(s[1])) for s in values]
348 350 ax.add_patch(Polygon(points, ec='b', fc='none'))
349 351
350 352 # plot grid
351 353 for r in (15, 30, 45, 60):
352 354 ax.add_artist(plt.Circle((self.lon, self.lat),
353 355 km2deg(r), color='0.6', fill=False, lw=0.2))
354 356 ax.text(
355 357 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
356 358 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
357 359 '{}km'.format(r),
358 360 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
359 361
360 362 if self.mode == 'E':
361 363 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
362 364 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
363 365 else:
364 366 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
365 367 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
366 368
367 369 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
368 370 self.titles = ['{} {}'.format(
369 371 self.data.parameters[x], title) for x in self.channels]
370 372
373 class WeatherPlot(Plot):
374 CODE = 'weather'
375 plot_name = 'weather'
376 plot_type = 'ppistyle'
377 buffering = False
378
379 def setup(self):
380 self.ncols = 1
381 self.nrows = 1
382 self.nplots= 1
383 self.ylabel= 'Range [Km]'
384 self.titles= ['Weather']
385 self.colorbar=False
386 self.width =8
387 self.height =8
388 self.ini =0
389 self.len_azi =0
390 self.buffer_ini = None
391 self.buffer_azi = None
392 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
393 self.flag =0
394 self.indicador= 0
395
396 def update(self, dataOut):
397
398 data = {}
399 meta = {}
400 data['weather'] = 10*numpy.log10(dataOut.data_360[0]/(650**2))
401 data['azi'] = dataOut.data_azi
402
403 return data, meta
404
405 def plot(self):
406 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1])
407
408 data = self.data[-1]
409 tmp_h = (data['weather'].shape[1])/10.0
410 stoprange = float(tmp_h*1.5)#stoprange = float(33*1.5) por ahora 400
411 rangestep = float(0.15)
412 r = numpy.arange(0, stoprange, rangestep)
413 self.y = 2*r
414
415 tmp_v = data['weather']
416 print("tmp_v",tmp_v.shape)
417 tmp_z = data['azi']
418 #print("tmp_z",tmp_z.shape)
419 res = 1
420 step = (360/(res*tmp_v.shape[0]))
421 print("step",step)
422 mode = 1
423 if mode==0:
424 #print("self.ini",self.ini)
425 val = numpy.mean(tmp_v[:,0])
426 self.len_azi = len(tmp_z)
427 ones = numpy.ones([(360-tmp_v.shape[0]),tmp_v.shape[1]])*val
428 self.buffer_ini = numpy.vstack((tmp_v,ones))
429
430 n = ((360/res)-len(tmp_z))
431 start = tmp_z[-1]+res
432 end = tmp_z[0]-res
433 if start>end:
434 end = end+360
435 azi_zeros = numpy.linspace(start,end,int(n))
436 azi_zeros = numpy.where(azi_zeros>360,azi_zeros-360,azi_zeros)
437 self.buffer_ini_azi = numpy.hstack((tmp_z,azi_zeros))
438 self.ini = self.ini+1
439
440 if mode==1:
441 #print("self.ini",self.ini)
442 if self.ini==0:
443 res = 1
444 step = (360/(res*tmp_v.shape[0]))
445 val = numpy.mean(tmp_v[:,0])
446 self.len_azi = len(tmp_z)
447 self.buf_tmp = tmp_v
448 ones = numpy.ones([(360-tmp_v.shape[0]),tmp_v.shape[1]])*val
449 self.buffer_ini = numpy.vstack((tmp_v,ones))
450
451 n = ((360/res)-len(tmp_z))
452 start = tmp_z[-1]+res
453 end = tmp_z[0]-res
454 if start>end:
455 end =end+360
456 azi_zeros = numpy.linspace(start,end,int(n))
457 azi_zeros = numpy.where(azi_zeros>360,azi_zeros-360,azi_zeros)
458 self.buf_azi = tmp_z
459 self.buffer_ini_azi = numpy.hstack((tmp_z,azi_zeros))
460 self.ini = self.ini+1
461 elif 0<self.ini<step:
462 '''
463 if self.ini>31:
464 start= tmp_z[0]
465 end =tmp_z[-1]
466 print("start","end",start,end)
467 if self.ini==32:
468 tmp_v=tmp_v+20
469 if self.ini==33:
470 tmp_v=tmp_v+10
471 if self.ini==34:
472 tmp_v=tmp_v+20
473 if self.ini==35:
474 tmp_v=tmp_v+20
475 '''
476 self.buf_tmp= numpy.vstack((self.buf_tmp,tmp_v))
477 print("ERROR_INMINENTE",self.buf_tmp.shape)
478 if self.buf_tmp.shape[0]==360:
479 self.buffer_ini=self.buf_tmp
480 else:
481 val=30.0
482 ones = numpy.ones([(360-self.buf_tmp.shape[0]),self.buf_tmp.shape[1]])*val
483 self.buffer_ini = numpy.vstack((self.buf_tmp,ones))
484
485 self.buf_azi = numpy.hstack((self.buf_azi,tmp_z))
486 n = ((360/res)-len(self.buf_azi))
487 if n==0:
488 self.buffer_ini_azi = self.buf_azi
489 else:
490 start = self.buf_azi[-1]+res
491 end = self.buf_azi[0]-res
492 if start>end:
493 end =end+360
494 azi_zeros = numpy.linspace(start,end,int(n))
495 azi_zeros = numpy.where(azi_zeros>360,azi_zeros-360,azi_zeros)
496 if tmp_z[0]<self.buf_azi[0] <tmp_z[-1]:
497 self.indicador=1
498 if self.indicador==1:
499 azi_zeros = numpy.ones(360-len(self.buf_azi))*(tmp_z[-1]+res)
500 # self.indicador = True
501 #if self.indicador==True:
502 # azi_zeros = numpy.ones(360-len(self.buf_azi))*(tmp_z[-1]+res)
503
504 #self.buf_azi = tmp_z
505 self.buffer_ini_azi = numpy.hstack((self.buf_azi,azi_zeros))
506
507 if self.ini==step-1:
508 start= tmp_z[0]
509 end = tmp_z[-1]
510 #print("start","end",start,end)
511 ###print(self.buffer_ini_azi[:80])
512 self.ini = self.ini+1
513
514 else:
515 step = (360/(res*tmp_v.shape[0]))
516 tmp_v=tmp_v+5+(self.ini-step)*1
517
518 start= tmp_z[0]
519 end = tmp_z[-1]
520 #print("start","end",start,end)
521 ###print(self.buffer_ini_azi[:120])
522
523 if step>=2:
524 if self.flag<step-1:
525 limit_i=self.buf_azi[len(tmp_z)*(self.flag+1)]
526 limit_s=self.buf_azi[len(tmp_z)*(self.flag+2)-1]
527 print("flag",self.flag,limit_i,limit_s)
528 if limit_i< tmp_z[-1]< limit_s:
529 index_i=int(numpy.where(tmp_z<=self.buf_azi[len(tmp_z)*(self.flag+1)])[0][-1])
530 tmp_r =int(numpy.where(self.buf_azi[(self.flag+1)*len(tmp_z):(self.flag+2)*len(tmp_z)]>=tmp_z[-1])[0][0])
531 print("tmp_r",tmp_r)
532 index_f=(self.flag+1)*len(tmp_z)+tmp_r
533
534 if len(tmp_z[index_i:])>len(self.buf_azi[len(tmp_z)*(self.flag+1):index_f]):
535 final = len(self.buf_azi[len(tmp_z)*(self.flag+1):index_f])
536 else:
537 final= len(tmp_z[index_i:])
538 self.buf_azi[len(tmp_z)*(self.flag+1):index_f]=tmp_z[index_i:index_i+final]
539 self.buf_tmp[len(tmp_z)*(self.flag+1):index_f,:]=tmp_v[index_i:index_i+final,:]
540 if limit_i<tmp_z[0]<limit_s:
541 index_f =int(numpy.where(self.buf_azi>=tmp_z[-1])[0][0])
542 n_p =index_f-len(tmp_z)*(self.flag+1)
543 if n_p>0:
544 self.buf_azi[len(tmp_z)*(self.flag+1):index_f]=tmp_z[-1]*numpy.ones(n_p)
545 self.buf_tmp[len(tmp_z)*(self.flag+1):index_f,:]=tmp_v[-1,:]*numpy.ones([n_p,tmp_v.shape[1]])
546
547 '''
548 if self.buf_azi[len(tmp_z)]<tmp_z[-1]<self.buf_azi[2*len(tmp_z)-1]:
549 index_i= int(numpy.where(tmp_z <= self.buf_azi[len(tmp_z)])[0][-1])
550 index_f= int(numpy.where(self.buf_azi>=tmp_z[-1])[0][0])
551 #print("index",index_i,index_f)
552 if len(tmp_z[index_i:])>len(self.buf_azi[len(tmp_z):index_f]):
553 final = len(self.buf_azi[len(tmp_z):index_f])
554 else:
555 final = len(tmp_z[index_i:])
556 self.buf_azi[len(tmp_z):index_f]=tmp_z[index_i:index_i+final]
557 self.buf_tmp[len(tmp_z):index_f,:]=tmp_v[index_i:index_i+final,:]
558 '''
559 self.buf_tmp[len(tmp_z)*(self.flag):len(tmp_z)*(self.flag+1),:]=tmp_v
560 self.buf_azi[len(tmp_z)*(self.flag):len(tmp_z)*(self.flag+1)] = tmp_z
561 self.buffer_ini=self.buf_tmp
562 self.buffer_ini_azi = self.buf_azi
563 ##print("--------salida------------")
564 start= tmp_z[0]
565 end = tmp_z[-1]
566 ##print("start","end",start,end)
567 ##print(self.buffer_ini_azi[:120])
568 self.ini= self.ini+1
569 self.flag = self.flag +1
570 if self.flag==step:
571 self.flag=0
572
573 for i,ax in enumerate(self.axes):
574 if ax.firsttime:
575 plt.clf()
576 cgax, pm = wrl.vis.plot_ppi(self.buffer_ini,r=r,az=self.buffer_ini_azi,fig=self.figures[0], proj='cg', vmin=30, vmax=70)
577 else:
578 plt.clf()
579 cgax, pm = wrl.vis.plot_ppi(self.buffer_ini,r=r,az=self.buffer_ini_azi,fig=self.figures[0], proj='cg', vmin=30, vmax=70)
580 caax = cgax.parasites[0]
581 paax = cgax.parasites[1]
582 cbar = plt.gcf().colorbar(pm, pad=0.075)
583 caax.set_xlabel('x_range [km]')
584 caax.set_ylabel('y_range [km]')
585 plt.text(1.0, 1.05, 'azimuth '+str(thisDatetime), transform=caax.transAxes, va='bottom',ha='right')
@@ -1,743 +1,745
1 1 # Copyright (c) 2012-2021 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11
12 12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13 13
14 14
15 15 class SpectraPlot(Plot):
16 16 '''
17 17 Plot for Spectra data
18 18 '''
19 19
20 20 CODE = 'spc'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
24 24
25 25 def setup(self):
26 26 self.nplots = len(self.data.channels)
27 27 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
28 28 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
29 29 self.height = 2.6 * self.nrows
30 30 self.cb_label = 'dB'
31 31 if self.showprofile:
32 32 self.width = 4 * self.ncols
33 33 else:
34 34 self.width = 3.5 * self.ncols
35 35 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
36 36 self.ylabel = 'Range [km]'
37 37
38 38 def update(self, dataOut):
39 39
40 40 data = {}
41 41 meta = {}
42 42 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
43 43 data['spc'] = spc
44 44 data['rti'] = dataOut.getPower()
45 45 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
46 46 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
47
47
48 48 if self.CODE == 'spc_moments':
49 49 data['moments'] = dataOut.moments
50 50 # data['spc'] = 10*numpy.log10(dataOut.data_pre[0]/dataOut.normFactor)
51 51 if self.CODE == 'gaussian_fit':
52 52 # data['moments'] = dataOut.moments
53 53 data['gaussfit'] = dataOut.DGauFitParams
54 54 # data['spc'] = 10*numpy.log10(dataOut.data_pre[0]/dataOut.normFactor)
55 55
56 return data, meta
57
56 return data, meta
57
58 58 def plot(self):
59 59 if self.xaxis == "frequency":
60 60 x = self.data.xrange[0]
61 61 self.xlabel = "Frequency (kHz)"
62 62 elif self.xaxis == "time":
63 63 x = self.data.xrange[1]
64 64 self.xlabel = "Time (ms)"
65 65 else:
66 66 x = self.data.xrange[2]
67 67 self.xlabel = "Velocity (m/s)"
68 68
69 69 if (self.CODE == 'spc_moments') | (self.CODE == 'gaussian_fit'):
70 70 x = self.data.xrange[2]
71 71 self.xlabel = "Velocity (m/s)"
72 72
73 73 self.titles = []
74 74
75 75 y = self.data.yrange
76 76 self.y = y
77 77
78 78 data = self.data[-1]
79 79 z = data['spc']
80 80
81 81 for n, ax in enumerate(self.axes):
82 82 noise = data['noise'][n]
83 83 if self.CODE == 'spc_moments':
84 84 mean = data['moments'][n, 1]
85 if self.CODE == 'gaussian_fit':
85 if self.CODE == 'gaussian_fit':
86 86 # mean = data['moments'][n, 1]
87 87 gau0 = data['gaussfit'][n][2,:,0]
88 88 gau1 = data['gaussfit'][n][2,:,1]
89 89 if ax.firsttime:
90 90 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
91 91 self.xmin = self.xmin if self.xmin else -self.xmax
92 92 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
93 93 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
94 94 ax.plt = ax.pcolormesh(x, y, z[n].T,
95 95 vmin=self.zmin,
96 96 vmax=self.zmax,
97 97 cmap=plt.get_cmap(self.colormap)
98 98 )
99 99
100 100 if self.showprofile:
101 101 ax.plt_profile = self.pf_axes[n].plot(
102 102 data['rti'][n], y)[0]
103 103 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
104 104 color="k", linestyle="dashed", lw=1)[0]
105 105 if self.CODE == 'spc_moments':
106 106 ax.plt_mean = ax.plot(mean, y, color='k', lw=1)[0]
107 107 if self.CODE == 'gaussian_fit':
108 108 # ax.plt_mean = ax.plot(mean, y, color='k', lw=1)[0]
109 109 ax.plt_gau0 = ax.plot(gau0, y, color='r', lw=1)[0]
110 110 ax.plt_gau1 = ax.plot(gau1, y, color='y', lw=1)[0]
111 111 else:
112 112 ax.plt.set_array(z[n].T.ravel())
113 113 if self.showprofile:
114 114 ax.plt_profile.set_data(data['rti'][n], y)
115 115 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
116 116 if self.CODE == 'spc_moments':
117 117 ax.plt_mean.set_data(mean, y)
118 118 if self.CODE == 'gaussian_fit':
119 119 # ax.plt_mean.set_data(mean, y)
120 120 ax.plt_gau0.set_data(gau0, y)
121 121 ax.plt_gau1.set_data(gau1, y)
122 122 self.titles.append('CH {}: {:3.2f}dB'.format(n, noise))
123 123
124 124
125 125 class CrossSpectraPlot(Plot):
126 126
127 127 CODE = 'cspc'
128 128 colormap = 'jet'
129 129 plot_type = 'pcolor'
130 130 zmin_coh = None
131 131 zmax_coh = None
132 132 zmin_phase = None
133 133 zmax_phase = None
134 134
135 135 def setup(self):
136 136
137 137 self.ncols = 4
138 138 self.nplots = len(self.data.pairs) * 2
139 139 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
140 140 self.width = 3.1 * self.ncols
141 141 self.height = 2.6 * self.nrows
142 142 self.ylabel = 'Range [km]'
143 143 self.showprofile = False
144 144 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
145 145
146 146 def update(self, dataOut):
147 147
148 148 data = {}
149 149 meta = {}
150 150
151 151 spc = dataOut.data_spc
152 152 cspc = dataOut.data_cspc
153 153 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
154 154 meta['pairs'] = dataOut.pairsList
155 155
156 156 tmp = []
157 157
158 158 for n, pair in enumerate(meta['pairs']):
159 159 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
160 160 coh = numpy.abs(out)
161 161 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
162 162 tmp.append(coh)
163 163 tmp.append(phase)
164 164
165 165 data['cspc'] = numpy.array(tmp)
166 166
167 return data, meta
168
167 return data, meta
168
169 169 def plot(self):
170 170
171 171 if self.xaxis == "frequency":
172 172 x = self.data.xrange[0]
173 173 self.xlabel = "Frequency (kHz)"
174 174 elif self.xaxis == "time":
175 175 x = self.data.xrange[1]
176 176 self.xlabel = "Time (ms)"
177 177 else:
178 178 x = self.data.xrange[2]
179 179 self.xlabel = "Velocity (m/s)"
180
180
181 181 self.titles = []
182 182
183 183 y = self.data.yrange
184 184 self.y = y
185 185
186 186 data = self.data[-1]
187 187 cspc = data['cspc']
188 188
189 189 for n in range(len(self.data.pairs)):
190 190 pair = self.data.pairs[n]
191 191 coh = cspc[n*2]
192 192 phase = cspc[n*2+1]
193 193 ax = self.axes[2 * n]
194 194 if ax.firsttime:
195 195 ax.plt = ax.pcolormesh(x, y, coh.T,
196 196 vmin=0,
197 197 vmax=1,
198 198 cmap=plt.get_cmap(self.colormap_coh)
199 199 )
200 200 else:
201 201 ax.plt.set_array(coh.T.ravel())
202 202 self.titles.append(
203 203 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
204
204
205 205 ax = self.axes[2 * n + 1]
206 206 if ax.firsttime:
207 207 ax.plt = ax.pcolormesh(x, y, phase.T,
208 208 vmin=-180,
209 209 vmax=180,
210 cmap=plt.get_cmap(self.colormap_phase)
210 cmap=plt.get_cmap(self.colormap_phase)
211 211 )
212 212 else:
213 213 ax.plt.set_array(phase.T.ravel())
214 214 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
215 215
216 216
217 217 class RTIPlot(Plot):
218 218 '''
219 219 Plot for RTI data
220 220 '''
221 221
222 222 CODE = 'rti'
223 223 colormap = 'jet'
224 224 plot_type = 'pcolorbuffer'
225 225
226 226 def setup(self):
227 227 self.xaxis = 'time'
228 228 self.ncols = 1
229 print("ch",self.data.channels)
229 230 self.nrows = len(self.data.channels)
230 231 self.nplots = len(self.data.channels)
231 232 self.ylabel = 'Range [km]'
232 233 self.xlabel = 'Time'
233 234 self.cb_label = 'dB'
234 235 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
235 236 self.titles = ['{} Channel {}'.format(
236 237 self.CODE.upper(), x) for x in range(self.nrows)]
237 238
238 239 def update(self, dataOut):
239 240
240 241 data = {}
241 242 meta = {}
242 243 data['rti'] = dataOut.getPower()
243 244 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
244 245
245 246 return data, meta
246 247
247 248 def plot(self):
248 249 self.x = self.data.times
249 250 self.y = self.data.yrange
250 251 self.z = self.data[self.CODE]
251 252 self.z = numpy.ma.masked_invalid(self.z)
252 253
253 254 if self.decimation is None:
254 255 x, y, z = self.fill_gaps(self.x, self.y, self.z)
255 256 else:
256 257 x, y, z = self.fill_gaps(*self.decimate())
257 258
258 259 for n, ax in enumerate(self.axes):
259 260 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
260 261 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
261 262 data = self.data[-1]
262 263 if ax.firsttime:
263 264 ax.plt = ax.pcolormesh(x, y, z[n].T,
264 265 vmin=self.zmin,
265 266 vmax=self.zmax,
266 267 cmap=plt.get_cmap(self.colormap)
267 268 )
268 269 if self.showprofile:
270 print("test-------------------------------------1")
269 271 ax.plot_profile = self.pf_axes[n].plot(
270 272 data['rti'][n], self.y)[0]
271 273 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
272 274 color="k", linestyle="dashed", lw=1)[0]
273 275 else:
274 276 ax.collections.remove(ax.collections[0])
275 277 ax.plt = ax.pcolormesh(x, y, z[n].T,
276 278 vmin=self.zmin,
277 279 vmax=self.zmax,
278 280 cmap=plt.get_cmap(self.colormap)
279 281 )
280 282 if self.showprofile:
281 283 ax.plot_profile.set_data(data['rti'][n], self.y)
282 284 ax.plot_noise.set_data(numpy.repeat(
283 285 data['noise'][n], len(self.y)), self.y)
284 286
285 287
286 288 class CoherencePlot(RTIPlot):
287 289 '''
288 290 Plot for Coherence data
289 291 '''
290 292
291 293 CODE = 'coh'
292 294
293 295 def setup(self):
294 296 self.xaxis = 'time'
295 297 self.ncols = 1
296 298 self.nrows = len(self.data.pairs)
297 299 self.nplots = len(self.data.pairs)
298 300 self.ylabel = 'Range [km]'
299 301 self.xlabel = 'Time'
300 302 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
301 303 if self.CODE == 'coh':
302 304 self.cb_label = ''
303 305 self.titles = [
304 306 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
305 307 else:
306 308 self.cb_label = 'Degrees'
307 309 self.titles = [
308 310 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
309 311
310 312 def update(self, dataOut):
311 313
312 314 data = {}
313 315 meta = {}
314 316 data['coh'] = dataOut.getCoherence()
315 317 meta['pairs'] = dataOut.pairsList
316 318
317 319 return data, meta
318 320
319 321 class PhasePlot(CoherencePlot):
320 322 '''
321 323 Plot for Phase map data
322 324 '''
323 325
324 326 CODE = 'phase'
325 327 colormap = 'seismic'
326 328
327 329 def update(self, dataOut):
328 330
329 331 data = {}
330 332 meta = {}
331 333 data['phase'] = dataOut.getCoherence(phase=True)
332 334 meta['pairs'] = dataOut.pairsList
333 335
334 336 return data, meta
335 337
336 338 class NoisePlot(Plot):
337 339 '''
338 Plot for noise
340 Plot for noise
339 341 '''
340 342
341 343 CODE = 'noise'
342 344 plot_type = 'scatterbuffer'
343 345
344 346 def setup(self):
345 347 self.xaxis = 'time'
346 348 self.ncols = 1
347 349 self.nrows = 1
348 350 self.nplots = 1
349 351 self.ylabel = 'Intensity [dB]'
350 352 self.xlabel = 'Time'
351 353 self.titles = ['Noise']
352 354 self.colorbar = False
353 355 self.plots_adjust.update({'right': 0.85 })
354 356
355 357 def update(self, dataOut):
356 358
357 359 data = {}
358 360 meta = {}
359 361 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
360 362 meta['yrange'] = numpy.array([])
361 363
362 364 return data, meta
363 365
364 366 def plot(self):
365 367
366 368 x = self.data.times
367 369 xmin = self.data.min_time
368 370 xmax = xmin + self.xrange * 60 * 60
369 371 Y = self.data['noise']
370 372
371 373 if self.axes[0].firsttime:
372 374 self.ymin = numpy.nanmin(Y) - 5
373 375 self.ymax = numpy.nanmax(Y) + 5
374 376 for ch in self.data.channels:
375 377 y = Y[ch]
376 378 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
377 379 plt.legend(bbox_to_anchor=(1.18, 1.0))
378 380 else:
379 381 for ch in self.data.channels:
380 382 y = Y[ch]
381 383 self.axes[0].lines[ch].set_data(x, y)
382 384
383
385
384 386 class PowerProfilePlot(Plot):
385 387
386 388 CODE = 'pow_profile'
387 389 plot_type = 'scatter'
388 390
389 391 def setup(self):
390 392
391 393 self.ncols = 1
392 394 self.nrows = 1
393 395 self.nplots = 1
394 396 self.height = 4
395 397 self.width = 3
396 398 self.ylabel = 'Range [km]'
397 399 self.xlabel = 'Intensity [dB]'
398 400 self.titles = ['Power Profile']
399 401 self.colorbar = False
400 402
401 403 def update(self, dataOut):
402 404
403 405 data = {}
404 406 meta = {}
405 407 data[self.CODE] = dataOut.getPower()
406 408
407 409 return data, meta
408 410
409 411 def plot(self):
410 412
411 413 y = self.data.yrange
412 414 self.y = y
413 415
414 416 x = self.data[-1][self.CODE]
415
417
416 418 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
417 419 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
418
420
419 421 if self.axes[0].firsttime:
420 422 for ch in self.data.channels:
421 423 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
422 424 plt.legend()
423 425 else:
424 426 for ch in self.data.channels:
425 427 self.axes[0].lines[ch].set_data(x[ch], y)
426 428
427 429
428 430 class SpectraCutPlot(Plot):
429 431
430 432 CODE = 'spc_cut'
431 433 plot_type = 'scatter'
432 434 buffering = False
433 435
434 436 def setup(self):
435 437
436 438 self.nplots = len(self.data.channels)
437 439 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
438 440 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
439 441 self.width = 3.4 * self.ncols + 1.5
440 442 self.height = 3 * self.nrows
441 443 self.ylabel = 'Power [dB]'
442 444 self.colorbar = False
443 445 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
444 446
445 447 def update(self, dataOut):
446 448
447 449 data = {}
448 450 meta = {}
449 451 spc = 10*numpy.log10(dataOut.data_pre[0]/dataOut.normFactor)
450 452 data['spc'] = spc
451 453 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
452 454 if self.CODE == 'cut_gaussian_fit':
453 455 data['gauss_fit0'] = 10*numpy.log10(dataOut.GaussFit0/dataOut.normFactor)
454 456 data['gauss_fit1'] = 10*numpy.log10(dataOut.GaussFit1/dataOut.normFactor)
455 457 return data, meta
456 458
457 459 def plot(self):
458 460 if self.xaxis == "frequency":
459 461 x = self.data.xrange[0][1:]
460 462 self.xlabel = "Frequency (kHz)"
461 463 elif self.xaxis == "time":
462 464 x = self.data.xrange[1]
463 465 self.xlabel = "Time (ms)"
464 466 else:
465 467 x = self.data.xrange[2][:-1]
466 468 self.xlabel = "Velocity (m/s)"
467
469
468 470 if self.CODE == 'cut_gaussian_fit':
469 471 x = self.data.xrange[2][:-1]
470 472 self.xlabel = "Velocity (m/s)"
471 473
472 474 self.titles = []
473 475
474 476 y = self.data.yrange
475 477 data = self.data[-1]
476 478 z = data['spc']
477 479
478 480 if self.height_index:
479 481 index = numpy.array(self.height_index)
480 482 else:
481 483 index = numpy.arange(0, len(y), int((len(y))/9))
482 484
483 485 for n, ax in enumerate(self.axes):
484 if self.CODE == 'cut_gaussian_fit':
486 if self.CODE == 'cut_gaussian_fit':
485 487 gau0 = data['gauss_fit0']
486 488 gau1 = data['gauss_fit1']
487 489 if ax.firsttime:
488 490 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
489 491 self.xmin = self.xmin if self.xmin else -self.xmax
490 492 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
491 493 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
492 494 ax.plt = ax.plot(x, z[n, :, index].T, lw=0.25)
493 495 if self.CODE == 'cut_gaussian_fit':
494 496 ax.plt_gau0 = ax.plot(x, gau0[n, :, index].T, lw=1, linestyle='-.')
495 497 for i, line in enumerate(ax.plt_gau0):
496 line.set_color(ax.plt[i].get_color())
498 line.set_color(ax.plt[i].get_color())
497 499 ax.plt_gau1 = ax.plot(x, gau1[n, :, index].T, lw=1, linestyle='--')
498 500 for i, line in enumerate(ax.plt_gau1):
499 line.set_color(ax.plt[i].get_color())
501 line.set_color(ax.plt[i].get_color())
500 502 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
501 503 self.figures[0].legend(ax.plt, labels, loc='center right')
502 504 else:
503 505 for i, line in enumerate(ax.plt):
504 506 line.set_data(x, z[n, :, index[i]].T)
505 507 for i, line in enumerate(ax.plt_gau0):
506 508 line.set_data(x, gau0[n, :, index[i]].T)
507 509 line.set_color(ax.plt[i].get_color())
508 510 for i, line in enumerate(ax.plt_gau1):
509 511 line.set_data(x, gau1[n, :, index[i]].T)
510 512 line.set_color(ax.plt[i].get_color())
511 513 self.titles.append('CH {}'.format(n))
512 514
513 515
514 516 class BeaconPhase(Plot):
515 517
516 518 __isConfig = None
517 519 __nsubplots = None
518 520
519 521 PREFIX = 'beacon_phase'
520 522
521 523 def __init__(self):
522 524 Plot.__init__(self)
523 525 self.timerange = 24*60*60
524 526 self.isConfig = False
525 527 self.__nsubplots = 1
526 528 self.counter_imagwr = 0
527 529 self.WIDTH = 800
528 530 self.HEIGHT = 400
529 531 self.WIDTHPROF = 120
530 532 self.HEIGHTPROF = 0
531 533 self.xdata = None
532 534 self.ydata = None
533 535
534 536 self.PLOT_CODE = BEACON_CODE
535 537
536 538 self.FTP_WEI = None
537 539 self.EXP_CODE = None
538 540 self.SUB_EXP_CODE = None
539 541 self.PLOT_POS = None
540 542
541 543 self.filename_phase = None
542 544
543 545 self.figfile = None
544 546
545 547 self.xmin = None
546 548 self.xmax = None
547 549
548 550 def getSubplots(self):
549 551
550 552 ncol = 1
551 553 nrow = 1
552 554
553 555 return nrow, ncol
554 556
555 557 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
556 558
557 559 self.__showprofile = showprofile
558 560 self.nplots = nplots
559 561
560 562 ncolspan = 7
561 563 colspan = 6
562 564 self.__nsubplots = 2
563 565
564 566 self.createFigure(id = id,
565 567 wintitle = wintitle,
566 568 widthplot = self.WIDTH+self.WIDTHPROF,
567 569 heightplot = self.HEIGHT+self.HEIGHTPROF,
568 570 show=show)
569 571
570 572 nrow, ncol = self.getSubplots()
571 573
572 574 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
573 575
574 576 def save_phase(self, filename_phase):
575 577 f = open(filename_phase,'w+')
576 578 f.write('\n\n')
577 579 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
578 580 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
579 581 f.close()
580 582
581 583 def save_data(self, filename_phase, data, data_datetime):
582 584 f=open(filename_phase,'a')
583 585 timetuple_data = data_datetime.timetuple()
584 586 day = str(timetuple_data.tm_mday)
585 587 month = str(timetuple_data.tm_mon)
586 588 year = str(timetuple_data.tm_year)
587 589 hour = str(timetuple_data.tm_hour)
588 590 minute = str(timetuple_data.tm_min)
589 591 second = str(timetuple_data.tm_sec)
590 592 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
591 593 f.close()
592 594
593 595 def plot(self):
594 596 log.warning('TODO: Not yet implemented...')
595 597
596 598 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
597 599 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
598 600 timerange=None,
599 601 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
600 602 server=None, folder=None, username=None, password=None,
601 603 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
602 604
603 if dataOut.flagNoData:
605 if dataOut.flagNoData:
604 606 return dataOut
605 607
606 608 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
607 609 return
608 610
609 611 if pairsList == None:
610 612 pairsIndexList = dataOut.pairsIndexList[:10]
611 613 else:
612 614 pairsIndexList = []
613 615 for pair in pairsList:
614 616 if pair not in dataOut.pairsList:
615 617 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
616 618 pairsIndexList.append(dataOut.pairsList.index(pair))
617 619
618 620 if pairsIndexList == []:
619 621 return
620 622
621 623 # if len(pairsIndexList) > 4:
622 624 # pairsIndexList = pairsIndexList[0:4]
623 625
624 626 hmin_index = None
625 627 hmax_index = None
626 628
627 629 if hmin != None and hmax != None:
628 630 indexes = numpy.arange(dataOut.nHeights)
629 631 hmin_list = indexes[dataOut.heightList >= hmin]
630 632 hmax_list = indexes[dataOut.heightList <= hmax]
631 633
632 634 if hmin_list.any():
633 635 hmin_index = hmin_list[0]
634 636
635 637 if hmax_list.any():
636 638 hmax_index = hmax_list[-1]+1
637 639
638 640 x = dataOut.getTimeRange()
639 641
640 642 thisDatetime = dataOut.datatime
641 643
642 644 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
643 645 xlabel = "Local Time"
644 646 ylabel = "Phase (degrees)"
645 647
646 648 update_figfile = False
647 649
648 650 nplots = len(pairsIndexList)
649 651 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
650 652 phase_beacon = numpy.zeros(len(pairsIndexList))
651 653 for i in range(nplots):
652 654 pair = dataOut.pairsList[pairsIndexList[i]]
653 655 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
654 656 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
655 657 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
656 658 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
657 659 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
658 660
659 661 if dataOut.beacon_heiIndexList:
660 662 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
661 663 else:
662 664 phase_beacon[i] = numpy.average(phase)
663 665
664 666 if not self.isConfig:
665 667
666 668 nplots = len(pairsIndexList)
667 669
668 670 self.setup(id=id,
669 671 nplots=nplots,
670 672 wintitle=wintitle,
671 673 showprofile=showprofile,
672 674 show=show)
673 675
674 676 if timerange != None:
675 677 self.timerange = timerange
676 678
677 679 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
678 680
679 681 if ymin == None: ymin = 0
680 682 if ymax == None: ymax = 360
681 683
682 684 self.FTP_WEI = ftp_wei
683 685 self.EXP_CODE = exp_code
684 686 self.SUB_EXP_CODE = sub_exp_code
685 687 self.PLOT_POS = plot_pos
686 688
687 689 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
688 690 self.isConfig = True
689 691 self.figfile = figfile
690 692 self.xdata = numpy.array([])
691 693 self.ydata = numpy.array([])
692 694
693 695 update_figfile = True
694 696
695 697 #open file beacon phase
696 698 path = '%s%03d' %(self.PREFIX, self.id)
697 699 beacon_file = os.path.join(path,'%s.txt'%self.name)
698 700 self.filename_phase = os.path.join(figpath,beacon_file)
699 701 #self.save_phase(self.filename_phase)
700 702
701 703
702 704 #store data beacon phase
703 705 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
704 706
705 707 self.setWinTitle(title)
706 708
707 709
708 710 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
709 711
710 712 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
711 713
712 714 axes = self.axesList[0]
713 715
714 716 self.xdata = numpy.hstack((self.xdata, x[0:1]))
715 717
716 718 if len(self.ydata)==0:
717 719 self.ydata = phase_beacon.reshape(-1,1)
718 720 else:
719 721 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
720 722
721 723
722 724 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
723 725 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
724 726 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
725 727 XAxisAsTime=True, grid='both'
726 728 )
727 729
728 730 self.draw()
729 731
730 732 if dataOut.ltctime >= self.xmax:
731 733 self.counter_imagwr = wr_period
732 734 self.isConfig = False
733 735 update_figfile = True
734 736
735 737 self.save(figpath=figpath,
736 738 figfile=figfile,
737 739 save=save,
738 740 ftp=ftp,
739 741 wr_period=wr_period,
740 742 thisDatetime=thisDatetime,
741 743 update_figfile=update_figfile)
742 744
743 return dataOut No newline at end of file
745 return dataOut
@@ -1,1575 +1,1609
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 try:
387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 try:
398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 set = int(file[8:11])
401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420
420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 raise NotImplementedError
487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499
499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 folders = [x for f in path.split(',')
502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 for folder in folders:
510 try:
511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
509 for folder in folders:
510 try:
511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
520
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523
524 for path in folders:
523
524 for path in folders:
525 525 files = glob.glob1(path, '*{}'.format(ext))
526 526 files.sort()
527 527 if last:
528 if files:
528 if files:
529 529 fo = files[-1]
530 try:
530 try:
531 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 yield os.path.join(path, expLabel, fo)
533 except Exception as e:
532 yield os.path.join(path, expLabel, fo)
533 except Exception as e:
534 534 pass
535 535 return
536 536 else:
537 537 return
538 538
539 539 for fo in files:
540 try:
541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
540 try:
541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 542 if dt >= startDate and dt <= endDate:
543 543 yield os.path.join(path, expLabel, fo)
544 544 else:
545 545 log.log('Skiping file {}'.format(fo), self.name)
546 546 except Exception as e:
547 547 log.log('Skiping file {}'.format(fo), self.name)
548 continue
548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 expLabel, ext, walk,
551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
555 555 Return:
556 556 Generator of files
557 557 """
558 558
559 559 if walk:
560 560 folders = self.find_folders(
561 561 path, startDate, endDate, folderfmt)
562 562 else:
563 563 folders = path.split(',')
564
564
565 565 return self.find_files(
566 folders, ext, filefmt, startDate, endDate, expLabel)
566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 expLabel, ext, walk,
569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
573 573 Arguments:
574 574 path : carpeta donde estan contenidos los files que contiene data
575 575 expLabel : Nombre del subexperimento (subfolder)
576 576 ext : extension de los files
577 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 578
579 579 Return:
580 580 generator with the full path of last filename
581 581 """
582
582
583 583 if walk:
584 584 folders = self.find_folders(
585 585 path, startDate, endDate, folderfmt, last=True)
586 586 else:
587 587 folders = path.split(',')
588
588
589 589 return self.find_files(
590 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 591
592 592 def setNextFile(self):
593 593 """Set the next file to be readed open it and parse de file header"""
594 594
595 595 while True:
596 596 if self.fp != None:
597 self.fp.close()
597 self.fp.close()
598 598
599 599 if self.online:
600 600 newFile = self.setNextFileOnline()
601 601 else:
602 602 newFile = self.setNextFileOffline()
603
603
604 604 if not(newFile):
605 605 if self.online:
606 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 607 else:
608 608 if self.fileIndex == -1:
609 609 raise schainpy.admin.SchainWarning('No files found in the given path')
610 610 else:
611 611 raise schainpy.admin.SchainWarning('No more files to read')
612
612
613 613 if self.verifyFile(self.filename):
614 614 break
615
615
616 616 log.log('Opening file: %s' % self.filename, self.name)
617 617
618 618 self.readFirstHeader()
619 619 self.nReadBlocks = 0
620 620
621 621 def setNextFileOnline(self):
622 622 """Check for the next file to be readed in online mode.
623 623
624 624 Set:
625 625 self.filename
626 626 self.fp
627 627 self.filesize
628
628
629 629 Return:
630 630 boolean
631 631
632 632 """
633 633 nextFile = True
634 634 nextDay = False
635 635
636 for nFiles in range(self.nFiles+1):
636 for nFiles in range(self.nFiles+1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
640 640 break
641 641 log.warning(
642 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 643 self.name)
644 644 time.sleep(self.delay)
645 645 nextFile = False
646 continue
647
646 continue
647
648 648 if fullfilename is not None:
649 649 break
650
650
651 651 self.nTries = 1
652 nextFile = True
652 nextFile = True
653 653
654 654 if nFiles == (self.nFiles - 1):
655 655 log.log('Trying with next day...', self.name)
656 656 nextDay = True
657 self.nTries = 3
657 self.nTries = 3
658 658
659 659 if fullfilename:
660 660 self.fileSize = os.path.getsize(fullfilename)
661 661 self.filename = fullfilename
662 662 self.flagIsNewFile = 1
663 663 if self.fp != None:
664 664 self.fp.close()
665 665 self.fp = self.open_file(fullfilename, self.open_mode)
666 666 self.flagNoMoreFiles = 0
667 667 self.fileIndex += 1
668 668 return 1
669 else:
669 else:
670 670 return 0
671
671
672 672 def setNextFileOffline(self):
673 673 """Open the next file to be readed in offline mode"""
674
674
675 675 try:
676 676 filename = next(self.filenameList)
677 677 self.fileIndex +=1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 return 0
680 return 0
681 681
682 682 self.filename = filename
683 683 self.fileSize = os.path.getsize(filename)
684 684 self.fp = self.open_file(filename, self.open_mode)
685 685 self.flagIsNewFile = 1
686 686
687 687 return 1
688
688
689 689 @staticmethod
690 690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 691 """Check if the given datetime is in range"""
692
692
693 693 if startDate <= dt.date() <= endDate:
694 694 if startTime <= dt.time() <= endTime:
695 695 return True
696 696 return False
697
697
698 698 def verifyFile(self, filename):
699 699 """Check for a valid file
700
700
701 701 Arguments:
702 702 filename -- full path filename
703
703
704 704 Return:
705 705 boolean
706 706 """
707 707
708 708 return True
709 709
710 710 def checkForRealPath(self, nextFile, nextDay):
711 711 """Check if the next file to be readed exists"""
712 if nextFile:
713 self.set += 1
714 if nextDay:
715 self.set = 0
716 self.doy += 1
717 foldercounter = 0
718 prefixDirList = [None, 'd', 'D']
719 if self.ext.lower() == ".r": # voltage
720 prefixFileList = ['d', 'D']
721 elif self.ext.lower() == ".pdata": # spectra
722 prefixFileList = ['p', 'P']
723 elif self.ext.lower() == ".hdf5": # HDF5
724 prefixFileList = ['D', 'P'] # HDF5
725
726 # barrido por las combinaciones posibles
727 for prefixDir in prefixDirList:
728 thispath = self.path
729 if prefixDir != None:
730 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
731 if foldercounter == 0:
732 thispath = os.path.join(self.path, "%s%04d%03d" %
733 (prefixDir, self.year, self.doy))
734 else:
735 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
736 prefixDir, self.year, self.doy, foldercounter))
737 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
738 # formo el nombre del file xYYYYDDDSSS.ext
739 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
740 fullfilename = os.path.join(
741 thispath, filename)
742
743 if os.path.exists(fullfilename):
744 return fullfilename, filename
745
746 return None, filename
747 #raise NotImplementedError
712 748
713 raise NotImplementedError
714
715 749 def readFirstHeader(self):
716 750 """Parse the file header"""
717 751
718 752 pass
719 753
720 754 def waitDataBlock(self, pointer_location, blocksize=None):
721 755 """
722 756 """
723 757
724 758 currentPointer = pointer_location
725 759 if blocksize is None:
726 760 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
727 761 else:
728 762 neededSize = blocksize
729 763
730 764 for nTries in range(self.nTries):
731 765 self.fp.close()
732 766 self.fp = open(self.filename, 'rb')
733 767 self.fp.seek(currentPointer)
734 768
735 769 self.fileSize = os.path.getsize(self.filename)
736 770 currentSize = self.fileSize - currentPointer
737 771
738 772 if (currentSize >= neededSize):
739 773 return 1
740 774
741 775 log.warning(
742 776 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
743 777 self.name
744 778 )
745 779 time.sleep(self.delay)
746 780
747 781 return 0
748 782
749 783 class JRODataReader(Reader):
750 784
751 785 utc = 0
752 786 nReadBlocks = 0
753 787 foldercounter = 0
754 788 firstHeaderSize = 0
755 789 basicHeaderSize = 24
756 790 __isFirstTimeOnline = 1
757 791 filefmt = "*%Y%j***"
758 792 folderfmt = "*%Y%j"
759 793 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
760 794
761 795 def getDtypeWidth(self):
762 796
763 797 dtype_index = get_dtype_index(self.dtype)
764 798 dtype_width = get_dtype_width(dtype_index)
765 799
766 800 return dtype_width
767 801
768 802 def checkForRealPath(self, nextFile, nextDay):
769 803 """Check if the next file to be readed exists.
770 804
771 805 Example :
772 806 nombre correcto del file es .../.../D2009307/P2009307367.ext
773 807
774 808 Entonces la funcion prueba con las siguientes combinaciones
775 809 .../.../y2009307367.ext
776 810 .../.../Y2009307367.ext
777 811 .../.../x2009307/y2009307367.ext
778 812 .../.../x2009307/Y2009307367.ext
779 813 .../.../X2009307/y2009307367.ext
780 814 .../.../X2009307/Y2009307367.ext
781 815 siendo para este caso, la ultima combinacion de letras, identica al file buscado
782 816
783 817 Return:
784 818 str -- fullpath of the file
785 819 """
786
787
820
821
788 822 if nextFile:
789 823 self.set += 1
790 824 if nextDay:
791 825 self.set = 0
792 826 self.doy += 1
793 827 foldercounter = 0
794 828 prefixDirList = [None, 'd', 'D']
795 829 if self.ext.lower() == ".r": # voltage
796 830 prefixFileList = ['d', 'D']
797 831 elif self.ext.lower() == ".pdata": # spectra
798 832 prefixFileList = ['p', 'P']
799
833
800 834 # barrido por las combinaciones posibles
801 835 for prefixDir in prefixDirList:
802 836 thispath = self.path
803 837 if prefixDir != None:
804 838 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 839 if foldercounter == 0:
806 840 thispath = os.path.join(self.path, "%s%04d%03d" %
807 841 (prefixDir, self.year, self.doy))
808 842 else:
809 843 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
810 844 prefixDir, self.year, self.doy, foldercounter))
811 845 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
812 846 # formo el nombre del file xYYYYDDDSSS.ext
813 847 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
814 848 fullfilename = os.path.join(
815 849 thispath, filename)
816 850
817 851 if os.path.exists(fullfilename):
818 852 return fullfilename, filename
819
820 return None, filename
821
853
854 return None, filename
855
822 856 def __waitNewBlock(self):
823 857 """
824 858 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
825 859
826 860 Si el modo de lectura es OffLine siempre retorn 0
827 861 """
828 862 if not self.online:
829 863 return 0
830 864
831 865 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
832 866 return 0
833 867
834 868 currentPointer = self.fp.tell()
835 869
836 870 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
837 871
838 872 for nTries in range(self.nTries):
839 873
840 874 self.fp.close()
841 875 self.fp = open(self.filename, 'rb')
842 876 self.fp.seek(currentPointer)
843 877
844 878 self.fileSize = os.path.getsize(self.filename)
845 879 currentSize = self.fileSize - currentPointer
846 880
847 881 if (currentSize >= neededSize):
848 882 self.basicHeaderObj.read(self.fp)
849 883 return 1
850 884
851 885 if self.fileSize == self.fileSizeByHeader:
852 886 # self.flagEoF = True
853 887 return 0
854 888
855 889 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
856 890 time.sleep(self.delay)
857 891
858 892 return 0
859 893
860 894 def __setNewBlock(self):
861 895
862 896 if self.fp == None:
863 return 0
864
865 if self.flagIsNewFile:
897 return 0
898
899 if self.flagIsNewFile:
866 900 self.lastUTTime = self.basicHeaderObj.utc
867 901 return 1
868 902
869 903 if self.realtime:
870 904 self.flagDiscontinuousBlock = 1
871 905 if not(self.setNextFile()):
872 906 return 0
873 907 else:
874 908 return 1
875 909
876 910 currentSize = self.fileSize - self.fp.tell()
877 911 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
878
912
879 913 if (currentSize >= neededSize):
880 914 self.basicHeaderObj.read(self.fp)
881 915 self.lastUTTime = self.basicHeaderObj.utc
882 916 return 1
883
917
884 918 if self.__waitNewBlock():
885 919 self.lastUTTime = self.basicHeaderObj.utc
886 920 return 1
887 921
888 922 if not(self.setNextFile()):
889 923 return 0
890 924
891 925 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
892 926 self.lastUTTime = self.basicHeaderObj.utc
893 927
894 928 self.flagDiscontinuousBlock = 0
895 929
896 930 if deltaTime > self.maxTimeStep:
897 931 self.flagDiscontinuousBlock = 1
898 932
899 933 return 1
900 934
901 935 def readNextBlock(self):
902 936
903 937 while True:
904 938 if not(self.__setNewBlock()):
905 939 continue
906 940
907 941 if not(self.readBlock()):
908 942 return 0
909 943
910 944 self.getBasicHeader()
911 945
912 946 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
913 947 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
914 948 self.processingHeaderObj.dataBlocksPerFile,
915 949 self.dataOut.datatime.ctime()))
916 950 continue
917 951
918 952 break
919 953
920 954 if self.verbose:
921 955 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
922 956 self.processingHeaderObj.dataBlocksPerFile,
923 957 self.dataOut.datatime.ctime()))
924 958 return 1
925 959
926 960 def readFirstHeader(self):
927 961
928 962 self.basicHeaderObj.read(self.fp)
929 963 self.systemHeaderObj.read(self.fp)
930 964 self.radarControllerHeaderObj.read(self.fp)
931 965 self.processingHeaderObj.read(self.fp)
932 966 self.firstHeaderSize = self.basicHeaderObj.size
933 967
934 968 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 969 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 970 if datatype == 0:
937 971 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
938 972 elif datatype == 1:
939 973 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
940 974 elif datatype == 2:
941 975 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
942 976 elif datatype == 3:
943 977 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
944 978 elif datatype == 4:
945 979 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
946 980 elif datatype == 5:
947 981 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
948 982 else:
949 983 raise ValueError('Data type was not defined')
950 984
951 985 self.dtype = datatype_str
952 986 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 987 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 988 self.firstHeaderSize + self.basicHeaderSize * \
955 989 (self.processingHeaderObj.dataBlocksPerFile - 1)
956 990 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
957 991 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
958 992 self.getBlockDimension()
959 993
960 994 def verifyFile(self, filename):
961 995
962 996 flag = True
963 997
964 998 try:
965 999 fp = open(filename, 'rb')
966 1000 except IOError:
967 1001 log.error("File {} can't be opened".format(filename), self.name)
968 1002 return False
969
1003
970 1004 if self.online and self.waitDataBlock(0):
971 1005 pass
972
1006
973 1007 basicHeaderObj = BasicHeader(LOCALTIME)
974 1008 systemHeaderObj = SystemHeader()
975 1009 radarControllerHeaderObj = RadarControllerHeader()
976 1010 processingHeaderObj = ProcessingHeader()
977 1011
978 1012 if not(basicHeaderObj.read(fp)):
979 1013 flag = False
980 1014 if not(systemHeaderObj.read(fp)):
981 1015 flag = False
982 1016 if not(radarControllerHeaderObj.read(fp)):
983 1017 flag = False
984 1018 if not(processingHeaderObj.read(fp)):
985 1019 flag = False
986 1020 if not self.online:
987 1021 dt1 = basicHeaderObj.datatime
988 1022 pos = self.fileSize-processingHeaderObj.blockSize-24
989 1023 if pos<0:
990 1024 flag = False
991 1025 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 1026 else:
993 1027 fp.seek(pos)
994 1028 if not(basicHeaderObj.read(fp)):
995 1029 flag = False
996 1030 dt2 = basicHeaderObj.datatime
997 1031 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
998 1032 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
999 flag = False
1033 flag = False
1000 1034
1001 1035 fp.close()
1002 1036 return flag
1003 1037
1004 1038 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1005 1039
1006 1040 path_empty = True
1007 1041
1008 1042 dateList = []
1009 1043 pathList = []
1010 1044
1011 1045 multi_path = path.split(',')
1012 1046
1013 1047 if not walk:
1014 1048
1015 1049 for single_path in multi_path:
1016 1050
1017 1051 if not os.path.isdir(single_path):
1018 1052 continue
1019 1053
1020 1054 fileList = glob.glob1(single_path, "*" + ext)
1021 1055
1022 1056 if not fileList:
1023 1057 continue
1024 1058
1025 1059 path_empty = False
1026 1060
1027 1061 fileList.sort()
1028 1062
1029 1063 for thisFile in fileList:
1030 1064
1031 1065 if not os.path.isfile(os.path.join(single_path, thisFile)):
1032 1066 continue
1033 1067
1034 1068 if not isRadarFile(thisFile):
1035 1069 continue
1036 1070
1037 1071 if not isFileInDateRange(thisFile, startDate, endDate):
1038 1072 continue
1039 1073
1040 1074 thisDate = getDateFromRadarFile(thisFile)
1041 1075
1042 1076 if thisDate in dateList or single_path in pathList:
1043 1077 continue
1044 1078
1045 1079 dateList.append(thisDate)
1046 1080 pathList.append(single_path)
1047 1081
1048 1082 else:
1049 1083 for single_path in multi_path:
1050 1084
1051 1085 if not os.path.isdir(single_path):
1052 1086 continue
1053 1087
1054 1088 dirList = []
1055 1089
1056 1090 for thisPath in os.listdir(single_path):
1057 1091
1058 1092 if not os.path.isdir(os.path.join(single_path, thisPath)):
1059 1093 continue
1060 1094
1061 1095 if not isRadarFolder(thisPath):
1062 1096 continue
1063 1097
1064 1098 if not isFolderInDateRange(thisPath, startDate, endDate):
1065 1099 continue
1066 1100
1067 1101 dirList.append(thisPath)
1068 1102
1069 1103 if not dirList:
1070 1104 continue
1071 1105
1072 1106 dirList.sort()
1073 1107
1074 1108 for thisDir in dirList:
1075 1109
1076 1110 datapath = os.path.join(single_path, thisDir, expLabel)
1077 1111 fileList = glob.glob1(datapath, "*" + ext)
1078 1112
1079 1113 if not fileList:
1080 1114 continue
1081 1115
1082 1116 path_empty = False
1083 1117
1084 1118 thisDate = getDateFromRadarFolder(thisDir)
1085 1119
1086 1120 pathList.append(datapath)
1087 1121 dateList.append(thisDate)
1088 1122
1089 1123 dateList.sort()
1090 1124
1091 1125 if walk:
1092 1126 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1093 1127 else:
1094 1128 pattern_path = multi_path[0]
1095 1129
1096 1130 if path_empty:
1097 1131 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1098 1132 else:
1099 1133 if not dateList:
1100 1134 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1101 1135
1102 1136 if include_path:
1103 1137 return dateList, pathList
1104 1138
1105 1139 return dateList
1106 1140
1107 1141 def setup(self, **kwargs):
1108
1142
1109 1143 self.set_kwargs(**kwargs)
1110 1144 if not self.ext.startswith('.'):
1111 1145 self.ext = '.{}'.format(self.ext)
1112
1146
1113 1147 if self.server is not None:
1114 1148 if 'tcp://' in self.server:
1115 1149 address = server
1116 1150 else:
1117 1151 address = 'ipc:///tmp/%s' % self.server
1118 1152 self.server = address
1119 1153 self.context = zmq.Context()
1120 1154 self.receiver = self.context.socket(zmq.PULL)
1121 1155 self.receiver.connect(self.server)
1122 1156 time.sleep(0.5)
1123 1157 print('[Starting] ReceiverData from {}'.format(self.server))
1124 1158 else:
1125 1159 self.server = None
1126 1160 if self.path == None:
1127 1161 raise ValueError("[Reading] The path is not valid")
1128 1162
1129 1163 if self.online:
1130 1164 log.log("[Reading] Searching files in online mode...", self.name)
1131 1165
1132 1166 for nTries in range(self.nTries):
1133 1167 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 self.endDate, self.expLabel, self.ext, self.walk,
1168 self.endDate, self.expLabel, self.ext, self.walk,
1135 1169 self.filefmt, self.folderfmt)
1136 1170
1137 1171 try:
1138 1172 fullpath = next(fullpath)
1139 1173 except:
1140 1174 fullpath = None
1141
1175
1142 1176 if fullpath:
1143 1177 break
1144 1178
1145 1179 log.warning(
1146 1180 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 self.delay, self.path, nTries + 1),
1181 self.delay, self.path, nTries + 1),
1148 1182 self.name)
1149 1183 time.sleep(self.delay)
1150 1184
1151 1185 if not(fullpath):
1152 1186 raise schainpy.admin.SchainError(
1153 'There isn\'t any valid file in {}'.format(self.path))
1187 'There isn\'t any valid file in {}'.format(self.path))
1154 1188
1155 1189 pathname, filename = os.path.split(fullpath)
1156 1190 self.year = int(filename[1:5])
1157 1191 self.doy = int(filename[5:8])
1158 self.set = int(filename[8:11]) - 1
1192 self.set = int(filename[8:11]) - 1
1159 1193 else:
1160 1194 log.log("Searching files in {}".format(self.path), self.name)
1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1195 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1196 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163
1197
1164 1198 self.setNextFile()
1165 1199
1166 1200 return
1167 1201
1168 1202 def getBasicHeader(self):
1169 1203
1170 1204 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1171 1205 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1172 1206
1173 1207 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1174 1208
1175 1209 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1176 1210
1177 1211 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1178 1212
1179 1213 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1180 1214
1181 1215 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1182 1216
1183 1217 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1184
1218
1185 1219 def getFirstHeader(self):
1186 1220
1187 1221 raise NotImplementedError
1188 1222
1189 1223 def getData(self):
1190 1224
1191 1225 raise NotImplementedError
1192 1226
1193 1227 def hasNotDataInBuffer(self):
1194 1228
1195 1229 raise NotImplementedError
1196 1230
1197 1231 def readBlock(self):
1198 1232
1199 1233 raise NotImplementedError
1200 1234
1201 1235 def isEndProcess(self):
1202 1236
1203 1237 return self.flagNoMoreFiles
1204 1238
1205 1239 def printReadBlocks(self):
1206 1240
1207 1241 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1208 1242
1209 1243 def printTotalBlocks(self):
1210 1244
1211 1245 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1212 1246
1213 1247 def run(self, **kwargs):
1214 1248 """
1215 1249
1216 1250 Arguments:
1217 path :
1218 startDate :
1251 path :
1252 startDate :
1219 1253 endDate :
1220 1254 startTime :
1221 1255 endTime :
1222 1256 set :
1223 1257 expLabel :
1224 1258 ext :
1225 1259 online :
1226 1260 delay :
1227 1261 walk :
1228 1262 getblock :
1229 1263 nTxs :
1230 1264 realtime :
1231 1265 blocksize :
1232 1266 blocktime :
1233 1267 skip :
1234 1268 cursor :
1235 1269 warnings :
1236 1270 server :
1237 1271 verbose :
1238 1272 format :
1239 1273 oneDDict :
1240 1274 twoDDict :
1241 1275 independentParam :
1242 1276 """
1243 1277
1244 1278 if not(self.isConfig):
1245 1279 self.setup(**kwargs)
1246 1280 self.isConfig = True
1247 1281 if self.server is None:
1248 1282 self.getData()
1249 1283 else:
1250 1284 self.getFromServer()
1251 1285
1252 1286
1253 1287 class JRODataWriter(Reader):
1254 1288
1255 1289 """
1256 1290 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1257 1291 de los datos siempre se realiza por bloques.
1258 1292 """
1259 1293
1260 1294 setFile = None
1261 1295 profilesPerBlock = None
1262 1296 blocksPerFile = None
1263 1297 nWriteBlocks = 0
1264 1298 fileDate = None
1265 1299
1266 1300 def __init__(self, dataOut=None):
1267 1301 raise NotImplementedError
1268 1302
1269 1303 def hasAllDataInBuffer(self):
1270 1304 raise NotImplementedError
1271 1305
1272 1306 def setBlockDimension(self):
1273 1307 raise NotImplementedError
1274 1308
1275 1309 def writeBlock(self):
1276 1310 raise NotImplementedError
1277 1311
1278 1312 def putData(self):
1279 1313 raise NotImplementedError
1280 1314
1281 1315 def getDtypeWidth(self):
1282 1316
1283 1317 dtype_index = get_dtype_index(self.dtype)
1284 1318 dtype_width = get_dtype_width(dtype_index)
1285 1319
1286 1320 return dtype_width
1287
1321
1288 1322 def getProcessFlags(self):
1289 1323
1290 1324 processFlags = 0
1291 1325
1292 1326 dtype_index = get_dtype_index(self.dtype)
1293 1327 procflag_dtype = get_procflag_dtype(dtype_index)
1294 1328
1295 1329 processFlags += procflag_dtype
1296 1330
1297 1331 if self.dataOut.flagDecodeData:
1298 1332 processFlags += PROCFLAG.DECODE_DATA
1299 1333
1300 1334 if self.dataOut.flagDeflipData:
1301 1335 processFlags += PROCFLAG.DEFLIP_DATA
1302 1336
1303 1337 if self.dataOut.code is not None:
1304 1338 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1305 1339
1306 1340 if self.dataOut.nCohInt > 1:
1307 1341 processFlags += PROCFLAG.COHERENT_INTEGRATION
1308 1342
1309 1343 if self.dataOut.type == "Spectra":
1310 1344 if self.dataOut.nIncohInt > 1:
1311 1345 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1312 1346
1313 1347 if self.dataOut.data_dc is not None:
1314 1348 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1315 1349
1316 1350 if self.dataOut.flagShiftFFT:
1317 1351 processFlags += PROCFLAG.SHIFT_FFT_DATA
1318 1352
1319 1353 return processFlags
1320 1354
1321 1355 def setBasicHeader(self):
1322 1356
1323 1357 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1324 1358 self.basicHeaderObj.version = self.versionFile
1325 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1359 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1326 1360 utc = numpy.floor(self.dataOut.utctime)
1327 milisecond = (self.dataOut.utctime - utc) * 1000.0
1361 milisecond = (self.dataOut.utctime - utc) * 1000.0
1328 1362 self.basicHeaderObj.utc = utc
1329 1363 self.basicHeaderObj.miliSecond = milisecond
1330 1364 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1331 1365 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1332 1366 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1333 1367
1334 1368 def setFirstHeader(self):
1335 1369 """
1336 1370 Obtiene una copia del First Header
1337 1371
1338 1372 Affected:
1339 1373
1340 1374 self.basicHeaderObj
1341 1375 self.systemHeaderObj
1342 1376 self.radarControllerHeaderObj
1343 1377 self.processingHeaderObj self.
1344 1378
1345 1379 Return:
1346 1380 None
1347 1381 """
1348 1382
1349 1383 raise NotImplementedError
1350 1384
1351 1385 def __writeFirstHeader(self):
1352 1386 """
1353 1387 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1354 1388
1355 1389 Affected:
1356 1390 __dataType
1357 1391
1358 1392 Return:
1359 1393 None
1360 1394 """
1361 1395
1362 1396 # CALCULAR PARAMETROS
1363 1397
1364 1398 sizeLongHeader = self.systemHeaderObj.size + \
1365 1399 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1366 1400 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1367 1401
1368 1402 self.basicHeaderObj.write(self.fp)
1369 1403 self.systemHeaderObj.write(self.fp)
1370 1404 self.radarControllerHeaderObj.write(self.fp)
1371 1405 self.processingHeaderObj.write(self.fp)
1372 1406
1373 1407 def __setNewBlock(self):
1374 1408 """
1375 1409 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1376 1410
1377 1411 Return:
1378 1412 0 : si no pudo escribir nada
1379 1413 1 : Si escribio el Basic el First Header
1380 1414 """
1381 1415 if self.fp == None:
1382 1416 self.setNextFile()
1383 1417
1384 1418 if self.flagIsNewFile:
1385 1419 return 1
1386 1420
1387 1421 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1388 1422 self.basicHeaderObj.write(self.fp)
1389 1423 return 1
1390 1424
1391 1425 if not(self.setNextFile()):
1392 1426 return 0
1393 1427
1394 1428 return 1
1395 1429
1396 1430 def writeNextBlock(self):
1397 1431 """
1398 1432 Selecciona el bloque siguiente de datos y los escribe en un file
1399 1433
1400 1434 Return:
1401 1435 0 : Si no hizo pudo escribir el bloque de datos
1402 1436 1 : Si no pudo escribir el bloque de datos
1403 1437 """
1404 1438 if not(self.__setNewBlock()):
1405 1439 return 0
1406 1440
1407 1441 self.writeBlock()
1408 1442
1409 1443 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1410 1444 self.processingHeaderObj.dataBlocksPerFile))
1411 1445
1412 1446 return 1
1413 1447
1414 1448 def setNextFile(self):
1415 1449 """Determina el siguiente file que sera escrito
1416 1450
1417 1451 Affected:
1418 1452 self.filename
1419 1453 self.subfolder
1420 1454 self.fp
1421 1455 self.setFile
1422 1456 self.flagIsNewFile
1423 1457
1424 1458 Return:
1425 1459 0 : Si el archivo no puede ser escrito
1426 1460 1 : Si el archivo esta listo para ser escrito
1427 1461 """
1428 1462 ext = self.ext
1429 1463 path = self.path
1430 1464
1431 1465 if self.fp != None:
1432 1466 self.fp.close()
1433 1467
1434 1468 if not os.path.exists(path):
1435 1469 os.mkdir(path)
1436 1470
1437 1471 timeTuple = time.localtime(self.dataOut.utctime)
1438 1472 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1439 1473
1440 1474 fullpath = os.path.join(path, subfolder)
1441 1475 setFile = self.setFile
1442 1476
1443 1477 if not(os.path.exists(fullpath)):
1444 1478 os.mkdir(fullpath)
1445 1479 setFile = -1 # inicializo mi contador de seteo
1446 1480 else:
1447 1481 filesList = os.listdir(fullpath)
1448 1482 if len(filesList) > 0:
1449 1483 filesList = sorted(filesList, key=str.lower)
1450 1484 filen = filesList[-1]
1451 1485 # el filename debera tener el siguiente formato
1452 1486 # 0 1234 567 89A BCDE (hex)
1453 1487 # x YYYY DDD SSS .ext
1454 1488 if isNumber(filen[8:11]):
1455 1489 # inicializo mi contador de seteo al seteo del ultimo file
1456 1490 setFile = int(filen[8:11])
1457 1491 else:
1458 1492 setFile = -1
1459 1493 else:
1460 1494 setFile = -1 # inicializo mi contador de seteo
1461 1495
1462 1496 setFile += 1
1463 1497
1464 1498 # If this is a new day it resets some values
1465 1499 if self.dataOut.datatime.date() > self.fileDate:
1466 1500 setFile = 0
1467 1501 self.nTotalBlocks = 0
1468
1502
1469 1503 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1470 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1504 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1471 1505
1472 1506 filename = os.path.join(path, subfolder, filen)
1473 1507
1474 1508 fp = open(filename, 'wb')
1475 1509
1476 1510 self.blockIndex = 0
1477 1511 self.filename = filename
1478 1512 self.subfolder = subfolder
1479 1513 self.fp = fp
1480 1514 self.setFile = setFile
1481 1515 self.flagIsNewFile = 1
1482 1516 self.fileDate = self.dataOut.datatime.date()
1483 1517 self.setFirstHeader()
1484 1518
1485 1519 print('[Writing] Opening file: %s' % self.filename)
1486 1520
1487 1521 self.__writeFirstHeader()
1488 1522
1489 1523 return 1
1490 1524
1491 1525 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1492 1526 """
1493 1527 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1494 1528
1495 1529 Inputs:
1496 1530 path : directory where data will be saved
1497 1531 profilesPerBlock : number of profiles per block
1498 1532 set : initial file set
1499 1533 datatype : An integer number that defines data type:
1500 1534 0 : int8 (1 byte)
1501 1535 1 : int16 (2 bytes)
1502 1536 2 : int32 (4 bytes)
1503 1537 3 : int64 (8 bytes)
1504 1538 4 : float32 (4 bytes)
1505 1539 5 : double64 (8 bytes)
1506 1540
1507 1541 Return:
1508 1542 0 : Si no realizo un buen seteo
1509 1543 1 : Si realizo un buen seteo
1510 1544 """
1511 1545
1512 1546 if ext == None:
1513 1547 ext = self.ext
1514 1548
1515 1549 self.ext = ext.lower()
1516 1550
1517 1551 self.path = path
1518
1552
1519 1553 if set is None:
1520 1554 self.setFile = -1
1521 1555 else:
1522 self.setFile = set - 1
1556 self.setFile = set - 1
1523 1557
1524 1558 self.blocksPerFile = blocksPerFile
1525 1559 self.profilesPerBlock = profilesPerBlock
1526 1560 self.dataOut = dataOut
1527 1561 self.fileDate = self.dataOut.datatime.date()
1528 1562 self.dtype = self.dataOut.dtype
1529 1563
1530 1564 if datatype is not None:
1531 1565 self.dtype = get_numpy_dtype(datatype)
1532 1566
1533 1567 if not(self.setNextFile()):
1534 1568 print("[Writing] There isn't a next file")
1535 1569 return 0
1536 1570
1537 1571 self.setBlockDimension()
1538 1572
1539 1573 return 1
1540 1574
1541 1575 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1542 1576
1543 1577 if not(self.isConfig):
1544 1578
1545 1579 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1546 1580 set=set, ext=ext, datatype=datatype, **kwargs)
1547 1581 self.isConfig = True
1548 1582
1549 1583 self.dataOut = dataOut
1550 1584 self.putData()
1551 1585 return self.dataOut
1552 1586
1553 1587 @MPDecorator
1554 1588 class printInfo(Operation):
1555 1589
1556 1590 def __init__(self):
1557 1591
1558 1592 Operation.__init__(self)
1559 1593 self.__printInfo = True
1560 1594
1561 1595 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1562 1596 if self.__printInfo == False:
1563 1597 return
1564 1598
1565 1599 for header in headers:
1566 1600 if hasattr(dataOut, header):
1567 1601 obj = getattr(dataOut, header)
1568 1602 if hasattr(obj, 'printInfo'):
1569 1603 obj.printInfo()
1570 1604 else:
1571 1605 print(obj)
1572 1606 else:
1573 1607 log.warning('Header {} Not found in object'.format(header))
1574 1608
1575 1609 self.__printInfo = False
@@ -1,793 +1,798
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 # SUBCHANNELS EN VEZ DE CHANNELS
7 7 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
8 8 # ACTUALIZACION DE VERSION
9 9 # HEADERS
10 10 # MODULO DE ESCRITURA
11 11 # METADATA
12 12
13 13 import os
14 14 import time
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19 from time import time
20 20 from time import sleep
21 21
22 22 import schainpy.admin
23 23 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
24 24 from schainpy.model.data.jrodata import Voltage
25 25 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
26 26
27 27 import pickle
28 28 try:
29 29 import digital_rf
30 30 except:
31 31 pass
32 32
33 33
34 34 class DigitalRFReader(ProcessingUnit):
35 35 '''
36 36 classdocs
37 37 '''
38 38
39 39 def __init__(self):
40 40 '''
41 41 Constructor
42 42 '''
43 43
44 44 ProcessingUnit.__init__(self)
45 45
46 46 self.dataOut = Voltage()
47 47 self.__printInfo = True
48 48 self.__flagDiscontinuousBlock = False
49 49 self.__bufferIndex = 9999999
50 50 self.__codeType = 0
51 51 self.__ippKm = None
52 52 self.__nCode = None
53 53 self.__nBaud = None
54 54 self.__code = None
55 55 self.dtype = None
56 56 self.oldAverage = None
57 57 self.path = None
58 58
59 59 def close(self):
60 60 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
61 61 return
62 62
63 63 def __getCurrentSecond(self):
64 64
65 65 return self.__thisUnixSample / self.__sample_rate
66 66
67 67 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
68 68
69 69 def __setFileHeader(self):
70 70 '''
71 71 In this method will be initialized every parameter of dataOut object (header, no data)
72 72 '''
73 73 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
74 74
75 75 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
76 76
77 77 try:
78 78 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
79 79 self.__radarControllerHeader)
80 80 except:
81 81 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 82 txA=0,
83 83 txB=0,
84 84 nWindows=1,
85 85 nHeights=self.__nSamples,
86 86 firstHeight=self.__firstHeigth,
87 87 deltaHeight=self.__deltaHeigth,
88 88 codeType=self.__codeType,
89 89 nCode=self.__nCode, nBaud=self.__nBaud,
90 90 code=self.__code)
91 91
92 92 try:
93 93 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
94 94 except:
95 95 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
96 96 nProfiles=nProfiles,
97 97 nChannels=len(
98 98 self.__channelList),
99 99 adcResolution=14)
100 100 self.dataOut.type = "Voltage"
101 101
102 102 self.dataOut.data = None
103 103
104 104 self.dataOut.dtype = self.dtype
105 105
106 106 # self.dataOut.nChannels = 0
107 107
108 108 # self.dataOut.nHeights = 0
109 109
110 110 self.dataOut.nProfiles = int(nProfiles)
111 111
112 112 self.dataOut.heightList = self.__firstHeigth + \
113 113 numpy.arange(self.__nSamples, dtype=numpy.float) * \
114 114 self.__deltaHeigth
115 115
116 self.dataOut.channelList = list(range(self.__num_subchannels))
117
116 #self.dataOut.channelList = list(range(self.__num_subchannels))
117 self.dataOut.channelList = list(range(len(self.__channelList)))
118 118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
119 119
120 120 # self.dataOut.channelIndexList = None
121 121
122 122 self.dataOut.flagNoData = True
123 123
124 124 self.dataOut.flagDataAsBlock = False
125 125 # Set to TRUE if the data is discontinuous
126 126 self.dataOut.flagDiscontinuousBlock = False
127 127
128 128 self.dataOut.utctime = None
129 129
130 130 # timezone like jroheader, difference in minutes between UTC and localtime
131 131 self.dataOut.timeZone = self.__timezone / 60
132 132
133 133 self.dataOut.dstFlag = 0
134 134
135 135 self.dataOut.errorCount = 0
136 136
137 137 try:
138 138 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
139 139 'nCohInt', self.nCohInt)
140 140
141 141 # asumo que la data esta decodificada
142 142 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
143 143 'flagDecodeData', self.flagDecodeData)
144 144
145 145 # asumo que la data esta sin flip
146 146 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
147 147
148 148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
149 149
150 150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
151 151 except:
152 152 pass
153 153
154 154 self.dataOut.ippSeconds = ippSeconds
155 155
156 156 # Time interval between profiles
157 157 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
158 158
159 159 self.dataOut.frequency = self.__frequency
160 160
161 161 self.dataOut.realtime = self.__online
162 162
163 163 def findDatafiles(self, path, startDate=None, endDate=None):
164 164
165 165 if not os.path.isdir(path):
166 166 return []
167 167
168 168 try:
169 169 digitalReadObj = digital_rf.DigitalRFReader(
170 170 path, load_all_metadata=True)
171 171 except:
172 172 digitalReadObj = digital_rf.DigitalRFReader(path)
173 173
174 174 channelNameList = digitalReadObj.get_channels()
175 175
176 176 if not channelNameList:
177 177 return []
178 178
179 179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
180 180
181 181 sample_rate = metadata_dict['sample_rate'][0]
182 182
183 183 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
184 184
185 185 try:
186 186 timezone = this_metadata_file['timezone'].value
187 187 except:
188 188 timezone = 0
189 189
190 190 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
191 191 channelNameList[0]) / sample_rate - timezone
192 192
193 193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
195 195
196 196 if not startDate:
197 197 startDate = startDatetime.date()
198 198
199 199 if not endDate:
200 200 endDate = endDatatime.date()
201 201
202 202 dateList = []
203 203
204 204 thisDatetime = startDatetime
205 205
206 206 while(thisDatetime <= endDatatime):
207 207
208 208 thisDate = thisDatetime.date()
209 209
210 210 if thisDate < startDate:
211 211 continue
212 212
213 213 if thisDate > endDate:
214 214 break
215 215
216 216 dateList.append(thisDate)
217 217 thisDatetime += datetime.timedelta(1)
218 218
219 219 return dateList
220 220
221 221 def setup(self, path=None,
222 222 startDate=None,
223 223 endDate=None,
224 224 startTime=datetime.time(0, 0, 0),
225 225 endTime=datetime.time(23, 59, 59),
226 226 channelList=None,
227 227 nSamples=None,
228 228 online=False,
229 229 delay=60,
230 230 buffer_size=1024,
231 231 ippKm=None,
232 232 nCohInt=1,
233 233 nCode=1,
234 234 nBaud=1,
235 235 flagDecodeData=False,
236 236 code=numpy.ones((1, 1), dtype=numpy.int),
237 237 **kwargs):
238 238 '''
239 239 In this method we should set all initial parameters.
240 240
241 241 Inputs:
242 242 path
243 243 startDate
244 244 endDate
245 245 startTime
246 246 endTime
247 247 set
248 248 expLabel
249 249 ext
250 250 online
251 251 delay
252 252 '''
253 253 self.path = path
254 254 self.nCohInt = nCohInt
255 255 self.flagDecodeData = flagDecodeData
256 256 self.i = 0
257 257 if not os.path.isdir(path):
258 258 raise ValueError("[Reading] Directory %s does not exist" % path)
259 259
260 260 try:
261 261 self.digitalReadObj = digital_rf.DigitalRFReader(
262 262 path, load_all_metadata=True)
263 263 except:
264 264 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 265
266 266 channelNameList = self.digitalReadObj.get_channels()
267 267
268 268 if not channelNameList:
269 269 raise ValueError("[Reading] Directory %s does not have any files" % path)
270 270
271 271 if not channelList:
272 272 channelList = list(range(len(channelNameList)))
273 273
274 274 ########## Reading metadata ######################
275 275
276 276 top_properties = self.digitalReadObj.get_properties(
277 277 channelNameList[channelList[0]])
278 278
279 279 self.__num_subchannels = top_properties['num_subchannels']
280 280 self.__sample_rate = 1.0 * \
281 281 top_properties['sample_rate_numerator'] / \
282 282 top_properties['sample_rate_denominator']
283 283 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 285
286 286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 287 channelNameList[channelList[0]])
288 288 metadata_bounds = this_metadata_file.get_bounds()
289 289 self.fixed_metadata_dict = this_metadata_file.read(
290 290 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 291
292 292 try:
293 293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 294 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
297 297 except:
298 298 pass
299 299
300 300 self.__frequency = None
301 301
302 302 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
303 303
304 304 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
305 305
306 306 try:
307 307 nSamples = self.fixed_metadata_dict['nSamples']
308 308 except:
309 309 nSamples = None
310 310
311 311 self.__firstHeigth = 0
312 312
313 313 try:
314 314 codeType = self.__radarControllerHeader['codeType']
315 315 except:
316 316 codeType = 0
317 317
318 318 try:
319 319 if codeType:
320 320 nCode = self.__radarControllerHeader['nCode']
321 321 nBaud = self.__radarControllerHeader['nBaud']
322 322 code = self.__radarControllerHeader['code']
323 323 except:
324 324 pass
325 325
326 326 if not ippKm:
327 327 try:
328 328 # seconds to km
329 329 ippKm = self.__radarControllerHeader['ipp']
330 330 except:
331 331 ippKm = None
332 332 ####################################################
333 333 self.__ippKm = ippKm
334 334 startUTCSecond = None
335 335 endUTCSecond = None
336 336
337 337 if startDate:
338 338 startDatetime = datetime.datetime.combine(startDate, startTime)
339 339 startUTCSecond = (
340 340 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 341
342 342 if endDate:
343 343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 344 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 345 1, 1)).total_seconds() + self.__timezone
346 346
347
348 print(startUTCSecond,endUTCSecond)
347 349 start_index, end_index = self.digitalReadObj.get_bounds(
348 350 channelNameList[channelList[0]])
349 351
352 print("*****",start_index,end_index)
350 353 if not startUTCSecond:
351 354 startUTCSecond = start_index / self.__sample_rate
352 355
353 356 if start_index > startUTCSecond * self.__sample_rate:
354 357 startUTCSecond = start_index / self.__sample_rate
355 358
356 359 if not endUTCSecond:
357 360 endUTCSecond = end_index / self.__sample_rate
358 361
359 362 if end_index < endUTCSecond * self.__sample_rate:
360 363 endUTCSecond = end_index / self.__sample_rate
361 364 if not nSamples:
362 365 if not ippKm:
363 366 raise ValueError("[Reading] nSamples or ippKm should be defined")
364 367 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 368 channelBoundList = []
366 369 channelNameListFiltered = []
367 370
368 371 for thisIndexChannel in channelList:
369 372 thisChannelName = channelNameList[thisIndexChannel]
370 373 start_index, end_index = self.digitalReadObj.get_bounds(
371 374 thisChannelName)
372 375 channelBoundList.append((start_index, end_index))
373 376 channelNameListFiltered.append(thisChannelName)
374 377
375 378 self.profileIndex = 0
376 379 self.i = 0
377 380 self.__delay = delay
378 381
379 382 self.__codeType = codeType
380 383 self.__nCode = nCode
381 384 self.__nBaud = nBaud
382 385 self.__code = code
383 386
384 387 self.__datapath = path
385 388 self.__online = online
386 389 self.__channelList = channelList
387 390 self.__channelNameList = channelNameListFiltered
388 391 self.__channelBoundList = channelBoundList
389 392 self.__nSamples = nSamples
390 393 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 394 self.__nChannels = len(self.__channelList)
392 395
393 396 self.__startUTCSecond = startUTCSecond
394 397 self.__endUTCSecond = endUTCSecond
395 398
396 399 self.__timeInterval = 1.0 * self.__samples_to_read / \
397 400 self.__sample_rate # Time interval
398 401
399 402 if online:
400 403 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
401 404 startUTCSecond = numpy.floor(endUTCSecond)
402 405
403 406 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 407 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
405 408
406 self.__data_buffer = numpy.zeros(
407 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
409 #self.__data_buffer = numpy.zeros(
410 # (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
411 self.__data_buffer = numpy.zeros((int(len(channelList)), self.__samples_to_read), dtype=numpy.complex)
412
408 413
409 414 self.__setFileHeader()
410 415 self.isConfig = True
411 416
412 417 print("[Reading] Digital RF Data was found from %s to %s " % (
413 418 datetime.datetime.utcfromtimestamp(
414 419 self.__startUTCSecond - self.__timezone),
415 420 datetime.datetime.utcfromtimestamp(
416 421 self.__endUTCSecond - self.__timezone)
417 422 ))
418 423
419 424 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
420 425 datetime.datetime.utcfromtimestamp(
421 426 endUTCSecond - self.__timezone)
422 427 ))
423 428 self.oldAverage = None
424 429 self.count = 0
425 430 self.executionTime = 0
426 431
427 432 def __reload(self):
428 433 # print
429 434 # print "%s not in range [%s, %s]" %(
430 435 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
431 436 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
432 437 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
433 438 # )
434 439 print("[Reading] reloading metadata ...")
435 440
436 441 try:
437 442 self.digitalReadObj.reload(complete_update=True)
438 443 except:
439 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
444 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
440 445
441 446 start_index, end_index = self.digitalReadObj.get_bounds(
442 447 self.__channelNameList[self.__channelList[0]])
443 448
444 449 if start_index > self.__startUTCSecond * self.__sample_rate:
445 450 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
446 451
447 452 if end_index > self.__endUTCSecond * self.__sample_rate:
448 453 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
449 454 print()
450 455 print("[Reading] New timerange found [%s, %s] " % (
451 456 datetime.datetime.utcfromtimestamp(
452 457 self.__startUTCSecond - self.__timezone),
453 458 datetime.datetime.utcfromtimestamp(
454 459 self.__endUTCSecond - self.__timezone)
455 460 ))
456 461
457 462 return True
458 463
459 464 return False
460 465
461 466 def timeit(self, toExecute):
462 467 t0 = time.time()
463 468 toExecute()
464 469 self.executionTime = time.time() - t0
465 470 if self.oldAverage is None:
466 471 self.oldAverage = self.executionTime
467 472 self.oldAverage = (self.executionTime + self.count *
468 473 self.oldAverage) / (self.count + 1.0)
469 474 self.count = self.count + 1.0
470 475 return
471 476
472 477 def __readNextBlock(self, seconds=30, volt_scale=1):
473 478 '''
474 479 '''
475 480
476 481 # Set the next data
477 482 self.__flagDiscontinuousBlock = False
478 483 self.__thisUnixSample += self.__samples_to_read
479
484
480 485 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
481 486 print ("[Reading] There are no more data into selected time-range")
482 487 if self.__online:
483 488 sleep(3)
484 489 self.__reload()
485 490 else:
486 491 return False
487 492
488 493 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
489 494 return False
490 495 self.__thisUnixSample -= self.__samples_to_read
491 496
492 497 indexChannel = 0
493 498
494 499 dataOk = False
495
500
496 501 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
497 502 for indexSubchannel in range(self.__num_subchannels):
498 503 try:
499 504 t0 = time()
500 505 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
501 506 self.__samples_to_read,
502 507 thisChannelName, sub_channel=indexSubchannel)
503 508 self.executionTime = time() - t0
504 509 if self.oldAverage is None:
505 510 self.oldAverage = self.executionTime
506 511 self.oldAverage = (
507 512 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 513 self.count = self.count + 1.0
509 514
510 515 except IOError as e:
511 516 # read next profile
512 517 self.__flagDiscontinuousBlock = True
513 518 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
514 519 break
515 520
516 521 if result.shape[0] != self.__samples_to_read:
517 522 self.__flagDiscontinuousBlock = True
518 523 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
519 524 result.shape[0],
520 525 self.__samples_to_read))
521 526 break
522
523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
527
528 self.__data_buffer[indexChannel, :] = result * volt_scale
524 529 indexChannel+=1
525 530
526 531 dataOk = True
527 532
528 533 self.__utctime = self.__thisUnixSample / self.__sample_rate
529 534
530 535 if not dataOk:
531 536 return False
532 537
533 538 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
534 539 self.__samples_to_read,
535 540 self.__timeInterval))
536 541
537 542 self.__bufferIndex = 0
538 543
539 544 return True
540 545
541 546 def __isBufferEmpty(self):
542 547 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
543 548
544 549 def getData(self, seconds=30, nTries=5):
545 550 '''
546 551 This method gets the data from files and put the data into the dataOut object
547 552
548 553 In addition, increase el the buffer counter in one.
549 554
550 555 Return:
551 556 data : retorna un perfil de voltages (alturas * canales) copiados desde el
552 557 buffer. Si no hay mas archivos a leer retorna None.
553 558
554 559 Affected:
555 560 self.dataOut
556 561 self.profileIndex
557 562 self.flagDiscontinuousBlock
558 563 self.flagIsNewBlock
559 564 '''
560 565 #print("getdata")
561 566 err_counter = 0
562 567 self.dataOut.flagNoData = True
563 568
564 569 if self.__isBufferEmpty():
565 570 #print("hi")
566 571 self.__flagDiscontinuousBlock = False
567 572
568 573 while True:
569 574 #print ("q ha pasado")
570 575 if self.__readNextBlock():
571 576 break
572 577 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
573 578 raise schainpy.admin.SchainError('Error')
574 579 return
575 580
576 581 if self.__flagDiscontinuousBlock:
577 582 raise schainpy.admin.SchainError('discontinuous block found')
578 583 return
579 584
580 585 if not self.__online:
581 586 raise schainpy.admin.SchainError('Online?')
582 587 return
583 588
584 589 err_counter += 1
585 590 if err_counter > nTries:
586 591 raise schainpy.admin.SchainError('Max retrys reach')
587 592 return
588 593
589 594 print('[Reading] waiting %d seconds to read a new block' % seconds)
590 time.sleep(seconds)
595 sleep(seconds)
591 596
592 597 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 598 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 599 self.dataOut.flagNoData = False
595 600 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
596 601 self.dataOut.profileIndex = self.profileIndex
597 602
598 603 self.__bufferIndex += self.__nSamples
599 604 self.profileIndex += 1
600 605
601 606 if self.profileIndex == self.dataOut.nProfiles:
602 607 self.profileIndex = 0
603 608
604 609 return True
605 610
606 611 def printInfo(self):
607 612 '''
608 613 '''
609 614 if self.__printInfo == False:
610 615 return
611 616
612 617 # self.systemHeaderObj.printInfo()
613 618 # self.radarControllerHeaderObj.printInfo()
614 619
615 620 self.__printInfo = False
616 621
617 622 def printNumberOfBlock(self):
618 623 '''
619 624 '''
620 625 return
621 626 # print self.profileIndex
622 627
623 628 def run(self, **kwargs):
624 629 '''
625 630 This method will be called many times so here you should put all your code
626 631 '''
627
632
628 633 if not self.isConfig:
629 634 self.setup(**kwargs)
630 635 #self.i = self.i+1
631 636 self.getData(seconds=self.__delay)
632
637
633 638 return
634 639
635 640 @MPDecorator
636 641 class DigitalRFWriter(Operation):
637 642 '''
638 643 classdocs
639 644 '''
640 645
641 646 def __init__(self, **kwargs):
642 647 '''
643 648 Constructor
644 649 '''
645 650 Operation.__init__(self, **kwargs)
646 651 self.metadata_dict = {}
647 652 self.dataOut = None
648 653 self.dtype = None
649 654 self.oldAverage = 0
650 655
651 656 def setHeader(self):
652 657
653 658 self.metadata_dict['frequency'] = self.dataOut.frequency
654 659 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 660 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 661 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 662 self.metadata_dict['heightList'] = self.dataOut.heightList
658 663 self.metadata_dict['channelList'] = self.dataOut.channelList
659 664 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
660 665 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
661 666 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 667 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 668 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 669 self.metadata_dict['type'] = self.dataOut.type
665 670 self.metadata_dict['flagDataAsBlock']= getattr(
666 671 self.dataOut, 'flagDataAsBlock', None) # chequear
667 672
668 673 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
669 674 '''
670 675 In this method we should set all initial parameters.
671 676 Input:
672 677 dataOut: Input data will also be outputa data
673 678 '''
674 679 self.setHeader()
675 680 self.__ippSeconds = dataOut.ippSeconds
676 681 self.__deltaH = dataOut.getDeltaH()
677 682 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
678 683 self.__dtype = dataOut.dtype
679 684 if len(dataOut.dtype) == 2:
680 685 self.__dtype = dataOut.dtype[0]
681 686 self.__nSamples = dataOut.systemHeaderObj.nSamples
682 687 self.__nProfiles = dataOut.nProfiles
683 688
684 689 if self.dataOut.type != 'Voltage':
685 690 raise 'Digital RF cannot be used with this data type'
686 691 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
687 692 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
688 693 else:
689 694 self.arr_data = numpy.ones((self.__nSamples, len(
690 695 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
691 696
692 697 file_cadence_millisecs = 1000
693 698
694 699 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 700 sample_rate_numerator = int(sample_rate_fraction.numerator)
696 701 sample_rate_denominator = int(sample_rate_fraction.denominator)
697 702 start_global_index = dataOut.utctime * self.__sample_rate
698 703
699 704 uuid = 'prueba'
700 705 compression_level = 0
701 706 checksum = False
702 707 is_complex = True
703 708 num_subchannels = len(dataOut.channelList)
704 709 is_continuous = True
705 710 marching_periods = False
706 711
707 712 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
708 713 fileCadence, start_global_index,
709 714 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
710 715 is_complex, num_subchannels, is_continuous, marching_periods)
711 716 metadata_dir = os.path.join(path, 'metadata')
712 717 os.system('mkdir %s' % (metadata_dir))
713 718 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
714 719 sample_rate_numerator, sample_rate_denominator,
715 720 metadataFile)
716 721 self.isConfig = True
717 722 self.currentSample = 0
718 723 self.oldAverage = 0
719 724 self.count = 0
720 725 return
721 726
722 727 def writeMetadata(self):
723 728 start_idx = self.__sample_rate * self.dataOut.utctime
724 729
725 730 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
726 731 )
727 732 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
728 733 )
729 734 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
730 735 )
731 736 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
732 737 return
733 738
734 739 def timeit(self, toExecute):
735 740 t0 = time()
736 741 toExecute()
737 742 self.executionTime = time() - t0
738 743 if self.oldAverage is None:
739 744 self.oldAverage = self.executionTime
740 745 self.oldAverage = (self.executionTime + self.count *
741 746 self.oldAverage) / (self.count + 1.0)
742 747 self.count = self.count + 1.0
743 748 return
744 749
745 750 def writeData(self):
746 751 if self.dataOut.type != 'Voltage':
747 752 raise 'Digital RF cannot be used with this data type'
748 753 for channel in self.dataOut.channelList:
749 754 for i in range(self.dataOut.nFFTPoints):
750 755 self.arr_data[1][channel * self.dataOut.nFFTPoints +
751 756 i]['r'] = self.dataOut.data[channel][i].real
752 757 self.arr_data[1][channel * self.dataOut.nFFTPoints +
753 758 i]['i'] = self.dataOut.data[channel][i].imag
754 759 else:
755 760 for i in range(self.dataOut.systemHeaderObj.nSamples):
756 761 for channel in self.dataOut.channelList:
757 762 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
758 763 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
759 764
760 765 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
761 766 self.timeit(f)
762 767
763 768 return
764 769
765 770 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
766 771 '''
767 772 This method will be called many times so here you should put all your code
768 773 Inputs:
769 774 dataOut: object with the data
770 775 '''
771 776 # print dataOut.__dict__
772 777 self.dataOut = dataOut
773 778 if not self.isConfig:
774 779 self.setup(dataOut, path, frequency, fileCadence,
775 780 dirCadence, metadataCadence, **kwargs)
776 781 self.writeMetadata()
777 782
778 783 self.writeData()
779 784
780 785 ## self.currentSample += 1
781 786 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
782 787 # self.writeMetadata()
783 788 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
784 789
785 790 return dataOut# en la version 2.7 no aparece este return
786 791
787 792 def close(self):
788 793 print('[Writing] - Closing files ')
789 794 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
790 795 try:
791 796 self.digitalWriteObj.close()
792 797 except:
793 798 pass
@@ -1,347 +1,351
1 1 import numpy
2 2
3 3 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
4 4 from schainpy.model.data.jrodata import SpectraHeis
5 5 from schainpy.utils import log
6 6
7 7
8 8
9 9 class SpectraHeisProc(ProcessingUnit):
10 10
11 11 def __init__(self):#, **kwargs):
12 12
13 13 ProcessingUnit.__init__(self)#, **kwargs)
14 14
15 15 # self.buffer = None
16 16 # self.firstdatatime = None
17 17 # self.profIndex = 0
18 18 self.dataOut = SpectraHeis()
19 19
20 20 def __updateObjFromVoltage(self):
21 21
22 22 self.dataOut.timeZone = self.dataIn.timeZone
23 23 self.dataOut.dstFlag = self.dataIn.dstFlag
24 24 self.dataOut.errorCount = self.dataIn.errorCount
25 25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
26 26
27 27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
28 28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
29 29 self.dataOut.channelList = self.dataIn.channelList
30 30 self.dataOut.heightList = self.dataIn.heightList
31 31 # self.dataOut.dtype = self.dataIn.dtype
32 32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 33 # self.dataOut.nHeights = self.dataIn.nHeights
34 34 # self.dataOut.nChannels = self.dataIn.nChannels
35 35 self.dataOut.nBaud = self.dataIn.nBaud
36 36 self.dataOut.nCode = self.dataIn.nCode
37 37 self.dataOut.code = self.dataIn.code
38 38 # self.dataOut.nProfiles = 1
39 39 self.dataOut.ippFactor = 1
40 40 self.dataOut.noise_estimation = None
41 41 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
42 42 self.dataOut.nFFTPoints = self.dataIn.nHeights
43 43 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
44 44 # self.dataOut.flagNoData = self.dataIn.flagNoData
45 45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
46 46 self.dataOut.utctime = self.dataIn.utctime
47 47 # self.dataOut.utctime = self.firstdatatime
48 48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
49 49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
50 50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
51 51 self.dataOut.nCohInt = self.dataIn.nCohInt
52 52 self.dataOut.nIncohInt = 1
53 53 # self.dataOut.ippSeconds= self.dataIn.ippSeconds
54 54 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
55 55
56 56 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
57 57 # self.dataOut.set=self.dataIn.set
58 58 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
59 59
60 60
61 61 def __updateObjFromFits(self):
62 62
63 63 self.dataOut.utctime = self.dataIn.utctime
64 64 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
65 65
66 66 self.dataOut.channelList = self.dataIn.channelList
67 67 self.dataOut.heightList = self.dataIn.heightList
68 68 self.dataOut.data_spc = self.dataIn.data
69 69 self.dataOut.ippSeconds = self.dataIn.ippSeconds
70 70 self.dataOut.nCohInt = self.dataIn.nCohInt
71 71 self.dataOut.nIncohInt = self.dataIn.nIncohInt
72 72 # self.dataOut.timeInterval = self.dataIn.timeInterval
73 73 self.dataOut.timeZone = self.dataIn.timeZone
74 74 self.dataOut.useLocalTime = True
75 75 # self.dataOut.
76 76 # self.dataOut.
77 77
78 78 def __getFft(self):
79 79
80 80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
81 81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
82 82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
83 83 self.dataOut.data_spc = spc
84 84
85 85 def run(self):
86 86
87 87 self.dataOut.flagNoData = True
88 88
89 89 if self.dataIn.type == "Fits":
90 90 self.__updateObjFromFits()
91 91 self.dataOut.flagNoData = False
92 return
92 return
93 93
94 94 if self.dataIn.type == "SpectraHeis":
95 95 self.dataOut.copy(self.dataIn)
96 96 return
97 97
98 98 if self.dataIn.type == "Voltage":
99 99 self.__updateObjFromVoltage()
100 100 self.__getFft()
101 101 self.dataOut.flagNoData = False
102 102
103 103 return
104 104
105 105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
106 106
107 107
108 108 def selectChannels(self, channelList):
109 109
110 110 channelIndexList = []
111 111
112 112 for channel in channelList:
113 113 index = self.dataOut.channelList.index(channel)
114 114 channelIndexList.append(index)
115 115
116 116 self.selectChannelsByIndex(channelIndexList)
117 117
118 118 def selectChannelsByIndex(self, channelIndexList):
119 119 """
120 120 Selecciona un bloque de datos en base a canales segun el channelIndexList
121 121
122 122 Input:
123 123 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
124 124
125 125 Affected:
126 126 self.dataOut.data
127 127 self.dataOut.channelIndexList
128 128 self.dataOut.nChannels
129 129 self.dataOut.m_ProcessingHeader.totalSpectra
130 130 self.dataOut.systemHeaderObj.numChannels
131 131 self.dataOut.m_ProcessingHeader.blockSize
132 132
133 133 Return:
134 134 None
135 135 """
136 136
137 137 for channelIndex in channelIndexList:
138 138 if channelIndex not in self.dataOut.channelIndexList:
139 139 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
140 140
141 141 data_spc = self.dataOut.data_spc[channelIndexList,:]
142 142
143 143 self.dataOut.data_spc = data_spc
144 144 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
145 145
146 146 return 1
147 147
148 148
149 149 class IncohInt4SpectraHeis(Operation):
150 150
151 151 isConfig = False
152 152
153 153 __profIndex = 0
154 154 __withOverapping = False
155 155
156 156 __byTime = False
157 157 __initime = None
158 158 __lastdatatime = None
159 159 __integrationtime = None
160 160
161 161 __buffer = None
162 162
163 163 __dataReady = False
164 164
165 165 n = None
166 166
167 167 def __init__(self):#, **kwargs):
168 168
169 169 Operation.__init__(self)#, **kwargs)
170 170 # self.isConfig = False
171 171
172 172 def setup(self, n=None, timeInterval=None, overlapping=False):
173 173 """
174 174 Set the parameters of the integration class.
175 175
176 176 Inputs:
177 177
178 178 n : Number of coherent integrations
179 179 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
180 180 overlapping :
181 181
182 182 """
183 183
184 184 self.__initime = None
185 185 self.__lastdatatime = 0
186 186 self.__buffer = None
187 187 self.__dataReady = False
188 188
189 189
190 190 if n == None and timeInterval == None:
191 191 raise ValueError("n or timeInterval should be specified ...")
192 192
193 193 if n != None:
194 194 self.n = n
195 195 self.__byTime = False
196 196 else:
197 197 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
198 198 self.n = 9999
199 199 self.__byTime = True
200 200
201 201 if overlapping:
202 202 self.__withOverapping = True
203 203 self.__buffer = None
204 204 else:
205 205 self.__withOverapping = False
206 206 self.__buffer = 0
207 207
208 208 self.__profIndex = 0
209 209
210 210 def putData(self, data):
211 211
212 212 """
213 213 Add a profile to the __buffer and increase in one the __profileIndex
214 214
215 215 """
216 216
217 217 if not self.__withOverapping:
218 218 self.__buffer += data.copy()
219 219 self.__profIndex += 1
220 220 return
221 221
222 222 #Overlapping data
223 223 nChannels, nHeis = data.shape
224 224 data = numpy.reshape(data, (1, nChannels, nHeis))
225 225
226 226 #If the buffer is empty then it takes the data value
227 227 if self.__buffer is None:
228 228 self.__buffer = data
229 229 self.__profIndex += 1
230 230 return
231 231
232 232 #If the buffer length is lower than n then stakcing the data value
233 233 if self.__profIndex < self.n:
234 234 self.__buffer = numpy.vstack((self.__buffer, data))
235 235 self.__profIndex += 1
236 236 return
237 237
238 238 #If the buffer length is equal to n then replacing the last buffer value with the data value
239 239 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
240 240 self.__buffer[self.n-1] = data
241 241 self.__profIndex = self.n
242 242 return
243 243
244 244
245 245 def pushData(self):
246 246 """
247 247 Return the sum of the last profiles and the profiles used in the sum.
248 248
249 249 Affected:
250 250
251 251 self.__profileIndex
252 252
253 253 """
254 254
255 255 if not self.__withOverapping:
256 256 data = self.__buffer
257 257 n = self.__profIndex
258 258
259 259 self.__buffer = 0
260 260 self.__profIndex = 0
261 261
262 262 return data, n
263 263
264 264 #Integration with Overlapping
265 265 data = numpy.sum(self.__buffer, axis=0)
266 266 n = self.__profIndex
267 267
268 268 return data, n
269 269
270 270 def byProfiles(self, data):
271 271
272 272 self.__dataReady = False
273 273 avgdata = None
274 274 # n = None
275 275
276 276 self.putData(data)
277 277
278 278 if self.__profIndex == self.n:
279 279
280 280 avgdata, n = self.pushData()
281 281 self.__dataReady = True
282 282
283 283 return avgdata
284 284
285 285 def byTime(self, data, datatime):
286 286
287 287 self.__dataReady = False
288 288 avgdata = None
289 289 n = None
290 290
291 291 self.putData(data)
292 292
293 293 if (datatime - self.__initime) >= self.__integrationtime:
294 294 avgdata, n = self.pushData()
295 295 self.n = n
296 296 self.__dataReady = True
297 297
298 298 return avgdata
299 299
300 300 def integrate(self, data, datatime=None):
301 301
302 302 if self.__initime == None:
303 303 self.__initime = datatime
304 304
305 #if self.__profIndex == 0:
306 # self.__initime = datatime
307
305 308 if self.__byTime:
306 309 avgdata = self.byTime(data, datatime)
307 310 else:
308 311 avgdata = self.byProfiles(data)
309 312
310 313
311 314 self.__lastdatatime = datatime
312 315
313 316 if avgdata is None:
314 317 return None, None
315 318
316 319 avgdatatime = self.__initime
317 320
318 321 deltatime = datatime -self.__lastdatatime
319 322
320 323 if not self.__withOverapping:
321 324 self.__initime = datatime
322 325 else:
323 326 self.__initime += deltatime
324 327
325 328 return avgdata, avgdatatime
326 329
327 330 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, **kwargs):
328 331
329 332 if not self.isConfig:
330 333 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping)
331 334 self.isConfig = True
332 335
336 #print("utc_time",dataOut.utctime)
333 337 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
334 338
335 339 # dataOut.timeInterval *= n
336 340 dataOut.flagNoData = True
337 341
338 342 if self.__dataReady:
339 343 dataOut.data_spc = avgdata
340 344 dataOut.nIncohInt *= self.n
341 345 # dataOut.nCohInt *= self.n
342 346 dataOut.utctime = avgdatatime
343 347 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
344 348 # dataOut.timeInterval = self.__timeInterval*self.n
345 349 dataOut.flagNoData = False
346
347 return dataOut No newline at end of file
350
351 return dataOut
This diff has been collapsed as it changes many lines, (630 lines changed) Show them Hide them
@@ -1,3886 +1,4466
1 import numpy
1 import numpy,os,h5py
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import time
14 14
15 15 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 17 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 18 from scipy import asarray as ar,exp
19 19 from scipy.optimize import curve_fit
20 20 from schainpy.utils import log
21 21 import warnings
22 22 from numpy import NaN
23 23 from scipy.optimize.optimize import OptimizeWarning
24 24 warnings.filterwarnings('ignore')
25 25
26 26 import matplotlib.pyplot as plt
27 27
28 28 SPEED_OF_LIGHT = 299792458
29 29
30 30 '''solving pickling issue'''
31 31
32 32 def _pickle_method(method):
33 33 func_name = method.__func__.__name__
34 34 obj = method.__self__
35 35 cls = method.__self__.__class__
36 36 return _unpickle_method, (func_name, obj, cls)
37 37
38 38 def _unpickle_method(func_name, obj, cls):
39 39 for cls in cls.mro():
40 40 try:
41 41 func = cls.__dict__[func_name]
42 42 except KeyError:
43 43 pass
44 44 else:
45 45 break
46 46 return func.__get__(obj, cls)
47 47
48 def isNumber(str):
49 try:
50 float(str)
51 return True
52 except:
53 return False
48 54
49 55 class ParametersProc(ProcessingUnit):
50 56
51 57 METHODS = {}
52 58 nSeconds = None
53 59
54 60 def __init__(self):
55 61 ProcessingUnit.__init__(self)
56 62
57 63 # self.objectDict = {}
58 64 self.buffer = None
59 65 self.firstdatatime = None
60 66 self.profIndex = 0
61 67 self.dataOut = Parameters()
62 68 self.setupReq = False #Agregar a todas las unidades de proc
63 69
64 70 def __updateObjFromInput(self):
65 71
66 72 self.dataOut.inputUnit = self.dataIn.type
67 73
68 74 self.dataOut.timeZone = self.dataIn.timeZone
69 75 self.dataOut.dstFlag = self.dataIn.dstFlag
70 76 self.dataOut.errorCount = self.dataIn.errorCount
71 77 self.dataOut.useLocalTime = self.dataIn.useLocalTime
72 78
73 79 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
74 80 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
75 81 self.dataOut.channelList = self.dataIn.channelList
76 82 self.dataOut.heightList = self.dataIn.heightList
77 83 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
78 84 # self.dataOut.nHeights = self.dataIn.nHeights
79 85 # self.dataOut.nChannels = self.dataIn.nChannels
80 86 # self.dataOut.nBaud = self.dataIn.nBaud
81 87 # self.dataOut.nCode = self.dataIn.nCode
82 88 # self.dataOut.code = self.dataIn.code
83 89 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
84 90 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
85 91 # self.dataOut.utctime = self.firstdatatime
86 92 self.dataOut.utctime = self.dataIn.utctime
87 93 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
88 94 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
89 95 self.dataOut.nCohInt = self.dataIn.nCohInt
90 96 # self.dataOut.nIncohInt = 1
91 97 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
92 98 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
93 99 self.dataOut.timeInterval1 = self.dataIn.timeInterval
94 100 self.dataOut.heightList = self.dataIn.heightList
95 101 self.dataOut.frequency = self.dataIn.frequency
96 102 # self.dataOut.noise = self.dataIn.noise
97 103
98 104 def run(self):
99 105
100 106
101 107
102 108 #---------------------- Voltage Data ---------------------------
103 109
104 110 if self.dataIn.type == "Voltage":
105 111
106 112 self.__updateObjFromInput()
107 113 self.dataOut.data_pre = self.dataIn.data.copy()
108 114 self.dataOut.flagNoData = False
109 115 self.dataOut.utctimeInit = self.dataIn.utctime
110 116 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
117
118 if hasattr(self.dataIn, 'flagDataAsBlock'):
119 self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock
120
121 if hasattr(self.dataIn, 'profileIndex'):
122 self.dataOut.profileIndex = self.dataIn.profileIndex
123
111 124 if hasattr(self.dataIn, 'dataPP_POW'):
112 125 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
113 126
114 127 if hasattr(self.dataIn, 'dataPP_POWER'):
115 128 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
116 129
117 130 if hasattr(self.dataIn, 'dataPP_DOP'):
118 131 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
119 132
120 133 if hasattr(self.dataIn, 'dataPP_SNR'):
121 134 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
122 135
123 136 if hasattr(self.dataIn, 'dataPP_WIDTH'):
124 137 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
125 138 return
126 139
127 140 #---------------------- Spectra Data ---------------------------
128 141
129 142 if self.dataIn.type == "Spectra":
130 143
131 144 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
132 145 self.dataOut.data_spc = self.dataIn.data_spc
133 146 self.dataOut.data_cspc = self.dataIn.data_cspc
134 147 self.dataOut.nProfiles = self.dataIn.nProfiles
135 148 self.dataOut.nIncohInt = self.dataIn.nIncohInt
136 149 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
137 150 self.dataOut.ippFactor = self.dataIn.ippFactor
138 151 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
139 152 self.dataOut.spc_noise = self.dataIn.getNoise()
140 153 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
141 154 # self.dataOut.normFactor = self.dataIn.normFactor
142 155 self.dataOut.pairsList = self.dataIn.pairsList
143 156 self.dataOut.groupList = self.dataIn.pairsList
144 157 self.dataOut.flagNoData = False
145 158
159 if hasattr(self.dataIn, 'flagDataAsBlock'):
160 self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock
161
146 162 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
147 163 self.dataOut.ChanDist = self.dataIn.ChanDist
148 164 else: self.dataOut.ChanDist = None
149 165
150 166 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
151 167 # self.dataOut.VelRange = self.dataIn.VelRange
152 168 #else: self.dataOut.VelRange = None
153 169
154 170 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
155 171 self.dataOut.RadarConst = self.dataIn.RadarConst
156 172
157 173 if hasattr(self.dataIn, 'NPW'): #NPW
158 174 self.dataOut.NPW = self.dataIn.NPW
159 175
160 176 if hasattr(self.dataIn, 'COFA'): #COFA
161 177 self.dataOut.COFA = self.dataIn.COFA
162 178
163 179
164 180
165 181 #---------------------- Correlation Data ---------------------------
166 182
167 183 if self.dataIn.type == "Correlation":
168 184 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
169 185
170 186 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
171 187 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
172 188 self.dataOut.groupList = (acf_pairs, ccf_pairs)
173 189
174 190 self.dataOut.abscissaList = self.dataIn.lagRange
175 191 self.dataOut.noise = self.dataIn.noise
176 192 self.dataOut.data_snr = self.dataIn.SNR
177 193 self.dataOut.flagNoData = False
178 194 self.dataOut.nAvg = self.dataIn.nAvg
179 195
180 196 #---------------------- Parameters Data ---------------------------
181 197
182 198 if self.dataIn.type == "Parameters":
183 199 self.dataOut.copy(self.dataIn)
184 200 self.dataOut.flagNoData = False
185 201
186 202 return True
187 203
188 204 self.__updateObjFromInput()
189 205 self.dataOut.utctimeInit = self.dataIn.utctime
190 206 self.dataOut.paramInterval = self.dataIn.timeInterval
191 207
192 208 return
193 209
194 210
195 211 def target(tups):
196 212
197 213 obj, args = tups
198 214
199 215 return obj.FitGau(args)
200 216
201 217 class RemoveWideGC(Operation):
202 218 ''' This class remove the wide clutter and replace it with a simple interpolation points
203 219 This mainly applies to CLAIRE radar
204 220
205 221 ClutterWidth : Width to look for the clutter peak
206 222
207 223 Input:
208 224
209 225 self.dataOut.data_pre : SPC and CSPC
210 226 self.dataOut.spc_range : To select wind and rainfall velocities
211 227
212 228 Affected:
213 229
214 230 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
215 231
216 232 Written by D. ScipiΓ³n 25.02.2021
217 233 '''
218 234 def __init__(self):
219 235 Operation.__init__(self)
220 236 self.i = 0
221 237 self.ich = 0
222 238 self.ir = 0
223
239
224 240 def run(self, dataOut, ClutterWidth=2.5):
225 241 # print ('Entering RemoveWideGC ... ')
226 242
227 243 self.spc = dataOut.data_pre[0].copy()
228 244 self.spc_out = dataOut.data_pre[0].copy()
229 245 self.Num_Chn = self.spc.shape[0]
230 246 self.Num_Hei = self.spc.shape[2]
231 247 VelRange = dataOut.spc_range[2][:-1]
232 248 dv = VelRange[1]-VelRange[0]
233 249
234 250 # Find the velocities that corresponds to zero
235 251 gc_values = numpy.squeeze(numpy.where(numpy.abs(VelRange) <= ClutterWidth))
236 252
237 253 # Removing novalid data from the spectra
238 254 for ich in range(self.Num_Chn) :
239 255 for ir in range(self.Num_Hei) :
240 256 # Estimate the noise at each range
241 257 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
242 258
243 259 # Removing the noise floor at each range
244 260 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
245 261 self.spc[ich,novalid,ir] = HSn
246 262
247 263 junk = numpy.append(numpy.insert(numpy.squeeze(self.spc[ich,gc_values,ir]),0,HSn),HSn)
248 264 j1index = numpy.squeeze(numpy.where(numpy.diff(junk)>0))
249 265 j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
250 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
266 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
251 267 continue
252 268 junk3 = numpy.squeeze(numpy.diff(j1index))
253 269 junk4 = numpy.squeeze(numpy.diff(j2index))
254
270
255 271 valleyindex = j2index[numpy.where(junk4>1)]
256 272 peakindex = j1index[numpy.where(junk3>1)]
257 273
258 274 isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
259 275 if numpy.size(isvalid) == 0 :
260 276 continue
261 277 if numpy.size(isvalid) >1 :
262 278 vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
263 279 isvalid = isvalid[vindex]
264
280
265 281 # clutter peak
266 282 gcpeak = peakindex[isvalid]
267 283 vl = numpy.where(valleyindex < gcpeak)
268 284 if numpy.size(vl) == 0:
269 285 continue
270 286 gcvl = valleyindex[vl[0][-1]]
271 287 vr = numpy.where(valleyindex > gcpeak)
272 288 if numpy.size(vr) == 0:
273 289 continue
274 290 gcvr = valleyindex[vr[0][0]]
275 291
276 292 # Removing the clutter
277 293 interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
278 294 gcindex = gc_values[gcvl+1:gcvr-1]
279 295 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
280 296
281 297 dataOut.data_pre[0] = self.spc_out
282 298 #print ('Leaving RemoveWideGC ... ')
283 299 return dataOut
284 300
285 301 class SpectralFilters(Operation):
286 ''' This class allows to replace the novalid values with noise for each channel
302 ''' This class allows to replace the novalid values with noise for each channel
287 303 This applies to CLAIRE RADAR
288 304
289 305 PositiveLimit : RightLimit of novalid data
290 306 NegativeLimit : LeftLimit of novalid data
291 307
292 308 Input:
293 309
294 310 self.dataOut.data_pre : SPC and CSPC
295 311 self.dataOut.spc_range : To select wind and rainfall velocities
296 312
297 313 Affected:
298 314
299 315 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
300 316
301 317 Written by D. ScipiΓ³n 29.01.2021
302 318 '''
303 319 def __init__(self):
304 320 Operation.__init__(self)
305 321 self.i = 0
306
322
307 323 def run(self, dataOut, ):
308 324
309 325 self.spc = dataOut.data_pre[0].copy()
310 326 self.Num_Chn = self.spc.shape[0]
311 327 VelRange = dataOut.spc_range[2]
312 328
313 329 # novalid corresponds to data within the Negative and PositiveLimit
314
330
315 331
316 332 # Removing novalid data from the spectra
317 333 for i in range(self.Num_Chn):
318 334 self.spc[i,novalid,:] = dataOut.noise[i]
319 335 dataOut.data_pre[0] = self.spc
320 336 return dataOut
321 337
322 338 class GaussianFit(Operation):
323 339
324 340 '''
325 341 Function that fit of one and two generalized gaussians (gg) based
326 342 on the PSD shape across an "power band" identified from a cumsum of
327 343 the measured spectrum - noise.
328 344
329 345 Input:
330 346 self.dataOut.data_pre : SelfSpectra
331 347
332 348 Output:
333 349 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
334 350
335 351 '''
336 352 def __init__(self):
337 353 Operation.__init__(self)
338 354 self.i=0
339 355
340 356
341 357 # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
342 358 def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
343 359 """This routine will find a couple of generalized Gaussians to a power spectrum
344 360 methods: generalized, squared
345 361 input: spc
346 362 output:
347 363 noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
348 364 """
349 365 print ('Entering ',method,' double Gaussian fit')
350 366 self.spc = dataOut.data_pre[0].copy()
351 367 self.Num_Hei = self.spc.shape[2]
352 368 self.Num_Bin = self.spc.shape[1]
353 369 self.Num_Chn = self.spc.shape[0]
354 370
355 371 start_time = time.time()
356 372
357 373 pool = Pool(processes=self.Num_Chn)
358 374 args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
359 375 objs = [self for __ in range(self.Num_Chn)]
360 376 attrs = list(zip(objs, args))
361 377 DGauFitParam = pool.map(target, attrs)
362 378 # Parameters:
363 379 # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
364 380 dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
365 381
366 382 # Double Gaussian Curves
367 383 gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
368 384 gau0[:] = numpy.NaN
369 385 gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
370 386 gau1[:] = numpy.NaN
371 387 x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
372 388 for iCh in range(self.Num_Chn):
373 389 N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
374 390 N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
375 391 A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
376 392 A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
377 393 v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
378 394 v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
379 395 s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
380 396 s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
381 397 if method == 'genealized':
382 398 p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
383 399 p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
384 400 elif method == 'squared':
385 401 p0 = 2.
386 p1 = 2.
402 p1 = 2.
387 403 gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
388 404 gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
389 405 dataOut.GaussFit0 = gau0
390 406 dataOut.GaussFit1 = gau1
391 407
392 408 print('Leaving ',method ,' double Gaussian fit')
393 409 return dataOut
394 410
395 411 def FitGau(self, X):
396 412 # print('Entering FitGau')
397 413 # Assigning the variables
398 414 Vrange, ch, wnoise, num_intg, SNRlimit = X
399 415 # Noise Limits
400 416 noisebl = wnoise * 0.9
401 417 noisebh = wnoise * 1.1
402 418 # Radar Velocity
403 419 Va = max(Vrange)
404 420 deltav = Vrange[1] - Vrange[0]
405 421 x = numpy.arange(self.Num_Bin)
406 422
407 423 # print ('stop 0')
408 424
409 425 # 5 parameters, 2 Gaussians
410 426 DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
411 427 DGauFitParam[:] = numpy.NaN
412 428
413 429 # SPCparam = []
414 430 # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
415 431 # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
416 432 # SPC_ch1[:] = 0 #numpy.NaN
417 433 # SPC_ch2[:] = 0 #numpy.NaN
418 434 # print ('stop 1')
419 435 for ht in range(self.Num_Hei):
420 436 # print (ht)
421 437 # print ('stop 2')
422 438 # Spectra at each range
423 439 spc = numpy.asarray(self.spc)[ch,:,ht]
424 440 snr = ( spc.mean() - wnoise ) / wnoise
425 441 snrdB = 10.*numpy.log10(snr)
426 442
427 443 #print ('stop 3')
428 444 if snrdB < SNRlimit :
429 445 # snr = numpy.NaN
430 446 # SPC_ch1[:,ht] = 0#numpy.NaN
431 447 # SPC_ch1[:,ht] = 0#numpy.NaN
432 448 # SPCparam = (SPC_ch1,SPC_ch2)
433 449 # print ('SNR less than SNRth')
434 450 continue
435 451 # wnoise = hildebrand_sekhon(spc,num_intg)
436 452 # print ('stop 2.01')
437 453 #############################################
438 454 # normalizing spc and noise
439 455 # This part differs from gg1
440 456 # spc_norm_max = max(spc) #commented by D. ScipiΓ³n 19.03.2021
441 457 #spc = spc / spc_norm_max
442 458 # pnoise = pnoise #/ spc_norm_max #commented by D. ScipiΓ³n 19.03.2021
443 459 #############################################
444 460
445 461 # print ('stop 2.1')
446 462 fatspectra=1.0
447 463 # noise per channel.... we might want to use the noise at each range
448
464
449 465 # wnoise = noise_ #/ spc_norm_max #commented by D. ScipiΓ³n 19.03.2021
450 466 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
451 467 #if wnoise>1.1*pnoise: # to be tested later
452 468 # wnoise=pnoise
453 469 # noisebl = wnoise*0.9
454 470 # noisebh = wnoise*1.1
455 471 spc = spc - wnoise # signal
456 472
457 473 # print ('stop 2.2')
458 474 minx = numpy.argmin(spc)
459 475 #spcs=spc.copy()
460 476 spcs = numpy.roll(spc,-minx)
461 477 cum = numpy.cumsum(spcs)
462 478 # tot_noise = wnoise * self.Num_Bin #64;
463 479
464 480 # print ('stop 2.3')
465 481 # snr = sum(spcs) / tot_noise
466 482 # snrdB = 10.*numpy.log10(snr)
467 483 #print ('stop 3')
468 484 # if snrdB < SNRlimit :
469 485 # snr = numpy.NaN
470 486 # SPC_ch1[:,ht] = 0#numpy.NaN
471 487 # SPC_ch1[:,ht] = 0#numpy.NaN
472 488 # SPCparam = (SPC_ch1,SPC_ch2)
473 489 # print ('SNR less than SNRth')
474 490 # continue
475 491
476 492
477 493 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
478 494 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
479 495 # print ('stop 4')
480 496 cummax = max(cum)
481 497 epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
482 498 cumlo = cummax * epsi
483 499 cumhi = cummax * (1-epsi)
484 500 powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
485 501
486 502 # print ('stop 5')
487 503 if len(powerindex) < 1:# case for powerindex 0
488 504 # print ('powerindex < 1')
489 505 continue
490 506 powerlo = powerindex[0]
491 507 powerhi = powerindex[-1]
492 508 powerwidth = powerhi-powerlo
493 509 if powerwidth <= 1:
494 510 # print('powerwidth <= 1')
495 511 continue
496
512
497 513 # print ('stop 6')
498 514 firstpeak = powerlo + powerwidth/10.# first gaussian energy location
499 515 secondpeak = powerhi - powerwidth/10. #second gaussian energy location
500 516 midpeak = (firstpeak + secondpeak)/2.
501 517 firstamp = spcs[int(firstpeak)]
502 518 secondamp = spcs[int(secondpeak)]
503 519 midamp = spcs[int(midpeak)]
504 520
505 521 y_data = spc + wnoise
506 522
507 523 ''' single Gaussian '''
508 524 shift0 = numpy.mod(midpeak+minx, self.Num_Bin )
509 525 width0 = powerwidth/4.#Initialization entire power of spectrum divided by 4
510 526 power0 = 2.
511 527 amplitude0 = midamp
512 528 state0 = [shift0,width0,amplitude0,power0,wnoise]
513 529 bnds = ((0,self.Num_Bin-1),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
514 530 lsq1 = fmin_l_bfgs_b(self.misfit1, state0, args=(y_data,x,num_intg), bounds=bnds, approx_grad=True)
515 531 # print ('stop 7.1')
516 532 # print (bnds)
517 533
518 534 chiSq1=lsq1[1]
519 535
520 536 # print ('stop 8')
521 537 if fatspectra<1.0 and powerwidth<4:
522 538 choice=0
523 539 Amplitude0=lsq1[0][2]
524 540 shift0=lsq1[0][0]
525 541 width0=lsq1[0][1]
526 542 p0=lsq1[0][3]
527 543 Amplitude1=0.
528 544 shift1=0.
529 545 width1=0.
530 546 p1=0.
531 547 noise=lsq1[0][4]
532 548 #return (numpy.array([shift0,width0,Amplitude0,p0]),
533 549 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
534
550
535 551 # print ('stop 9')
536 552 ''' two Gaussians '''
537 553 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
538 554 shift0 = numpy.mod(firstpeak+minx, self.Num_Bin )
539 555 shift1 = numpy.mod(secondpeak+minx, self.Num_Bin )
540 556 width0 = powerwidth/6.
541 557 width1 = width0
542 558 power0 = 2.
543 559 power1 = power0
544 560 amplitude0 = firstamp
545 561 amplitude1 = secondamp
546 562 state0 = [shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
547 563 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
548 564 bnds=((0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
549 565 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
550 566
551 567 # print ('stop 10')
552 568 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
553 569
554 570 # print ('stop 11')
555 571 chiSq2 = lsq2[1]
556 572
557 573 # print ('stop 12')
558 574
559 575 oneG = (chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
560 576
561 577 # print ('stop 13')
562 578 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
563 579 if oneG:
564 580 choice = 0
565 581 else:
566 582 w1 = lsq2[0][1]; w2 = lsq2[0][5]
567 583 a1 = lsq2[0][2]; a2 = lsq2[0][6]
568 584 p1 = lsq2[0][3]; p2 = lsq2[0][7]
569 585 s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
570 586 s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
571 587 gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
572 588
573 589 if gp1>gp2:
574 590 if a1>0.7*a2:
575 591 choice = 1
576 592 else:
577 593 choice = 2
578 594 elif gp2>gp1:
579 595 if a2>0.7*a1:
580 596 choice = 2
581 597 else:
582 598 choice = 1
583 599 else:
584 600 choice = numpy.argmax([a1,a2])+1
585 601 #else:
586 602 #choice=argmin([std2a,std2b])+1
587 603
588 604 else: # with low SNR go to the most energetic peak
589 605 choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
590 606
591 607 # print ('stop 14')
592 608 shift0 = lsq2[0][0]
593 609 vel0 = Vrange[0] + shift0 * deltav
594 610 shift1 = lsq2[0][4]
595 611 # vel1=Vrange[0] + shift1 * deltav
596 612
597 613 # max_vel = 1.0
598 614 # Va = max(Vrange)
599 615 # deltav = Vrange[1]-Vrange[0]
600 616 # print ('stop 15')
601 617 #first peak will be 0, second peak will be 1
602 618 # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.ScipiΓ³n 19.03.2021
603 619 if vel0 > -Va and vel0 < Va : #first peak is in the correct range
604 620 shift0 = lsq2[0][0]
605 621 width0 = lsq2[0][1]
606 622 Amplitude0 = lsq2[0][2]
607 623 p0 = lsq2[0][3]
608 624
609 625 shift1 = lsq2[0][4]
610 626 width1 = lsq2[0][5]
611 627 Amplitude1 = lsq2[0][6]
612 628 p1 = lsq2[0][7]
613 629 noise = lsq2[0][8]
614 630 else:
615 631 shift1 = lsq2[0][0]
616 632 width1 = lsq2[0][1]
617 633 Amplitude1 = lsq2[0][2]
618 634 p1 = lsq2[0][3]
619 635
620 636 shift0 = lsq2[0][4]
621 637 width0 = lsq2[0][5]
622 638 Amplitude0 = lsq2[0][6]
623 639 p0 = lsq2[0][7]
624 640 noise = lsq2[0][8]
625 641
626 642 if Amplitude0<0.05: # in case the peak is noise
627 643 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
628 644 if Amplitude1<0.05:
629 645 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
630 646
631 # print ('stop 16 ')
647 # print ('stop 16 ')
632 648 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
633 649 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
634 650 # SPCparam = (SPC_ch1,SPC_ch2)
635 651
636 652 DGauFitParam[0,ht,0] = noise
637 653 DGauFitParam[0,ht,1] = noise
638 654 DGauFitParam[1,ht,0] = Amplitude0
639 655 DGauFitParam[1,ht,1] = Amplitude1
640 656 DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
641 657 DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
642 658 DGauFitParam[3,ht,0] = width0 * deltav
643 659 DGauFitParam[3,ht,1] = width1 * deltav
644 660 DGauFitParam[4,ht,0] = p0
645 661 DGauFitParam[4,ht,1] = p1
646 662
647 663 # print (DGauFitParam.shape)
648 664 # print ('Leaving FitGau')
649 665 return DGauFitParam
650 666 # return SPCparam
651 667 # return GauSPC
652 668
653 669 def y_model1(self,x,state):
654 670 shift0, width0, amplitude0, power0, noise = state
655 671 model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
656 672 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
657 673 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
658 674 return model0 + model0u + model0d + noise
659 675
660 676 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
661 677 shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
662 678 model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
663 679 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
664 680 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
665
681
666 682 model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
667 683 model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
668 684 model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
669 685 return model0 + model0u + model0d + model1 + model1u + model1d + noise
670 686
671 687 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
672 688
673 689 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
674 690
675 691 def misfit2(self,state,y_data,x,num_intg):
676 692 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
677 693
678 694
679 695
680 696 class PrecipitationProc(Operation):
681 697
682 698 '''
683 699 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
684 700
685 701 Input:
686 702 self.dataOut.data_pre : SelfSpectra
687 703
688 704 Output:
689 705
690 706 self.dataOut.data_output : Reflectivity factor, rainfall Rate
691 707
692 708
693 709 Parameters affected:
694 710 '''
695 711
696 712 def __init__(self):
697 713 Operation.__init__(self)
698 714 self.i=0
699 715
700 716 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
701 717 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
702 718
703 719 # print ('Entering PrecepitationProc ... ')
704 720
705 721 if radar == "MIRA35C" :
706 722
707 723 self.spc = dataOut.data_pre[0].copy()
708 724 self.Num_Hei = self.spc.shape[2]
709 725 self.Num_Bin = self.spc.shape[1]
710 726 self.Num_Chn = self.spc.shape[0]
711 727 Ze = self.dBZeMODE2(dataOut)
712 728
713 729 else:
714 730
715 731 self.spc = dataOut.data_pre[0].copy()
716 732
717 733 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
718 734 self.spc[:,:,0:7]= numpy.NaN
719 735
720 736 self.Num_Hei = self.spc.shape[2]
721 737 self.Num_Bin = self.spc.shape[1]
722 738 self.Num_Chn = self.spc.shape[0]
723 739
724 740 VelRange = dataOut.spc_range[2]
725 741
726 742 ''' Se obtiene la constante del RADAR '''
727 743
728 744 self.Pt = Pt
729 745 self.Gt = Gt
730 746 self.Gr = Gr
731 747 self.Lambda = Lambda
732 748 self.aL = aL
733 749 self.tauW = tauW
734 self.ThetaT = ThetaT
750 self.ThetaT = ThetaT
735 751 self.ThetaR = ThetaR
736 752 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
737 753 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
738 754 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
739 755
740 756 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
741 757 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
742 758 RadarConstant = 10e-26 * Numerator / Denominator #
743 759 ExpConstant = 10**(40/10) #Constante Experimental
744 760
745 761 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
746 762 for i in range(self.Num_Chn):
747 763 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
748 764 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
749 765
750 766 SPCmean = numpy.mean(SignalPower, 0)
751 767 Pr = SPCmean[:,:]/dataOut.normFactor
752 768
753 769 # Declaring auxiliary variables
754 770 Range = dataOut.heightList*1000. #Range in m
755 771 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
756 772 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
757 773 zMtrx = rMtrx+Altitude
758 774 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
759 775 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
760 776
761 777 # height dependence to air density Foote and Du Toit (1969)
762 778 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
763 779 VMtrx = VelMtrx / delv_z #Normalized velocity
764 780 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
765 781 # Diameter is related to the fall speed of falling drops
766 782 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
767 783 # Only valid for D>= 0.16 mm
768 784 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
769 785
770 786 #Calculate Radar Reflectivity ETAn
771 787 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
772 788 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
773 789 # Radar Cross Section
774 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
790 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
775 791 # Drop Size Distribution
776 792 DSD = ETAn / sigmaD
777 793 # Equivalente Reflectivy
778 794 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
779 795 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
780 796 # RainFall Rate
781 797 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
782 798
783 799 # Censoring the data
784 800 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
785 801 SNRth = 10**(SNRdBlimit/10) #-30dB
786 802 novalid = numpy.where((dataOut.data_snr[0,:] <SNRth) | (dataOut.data_snr[1,:] <SNRth) | (dataOut.data_snr[2,:] <SNRth)) # AND condition. Maybe OR condition better
787 803 W = numpy.nanmean(dataOut.data_dop,0)
788 804 W[novalid] = numpy.NaN
789 805 Ze_org[novalid] = numpy.NaN
790 806 RR[novalid] = numpy.NaN
791 807
792 808 dataOut.data_output = RR[8]
793 809 dataOut.data_param = numpy.ones([3,self.Num_Hei])
794 810 dataOut.channelList = [0,1,2]
795
811
796 812 dataOut.data_param[0]=10*numpy.log10(Ze_org)
797 813 dataOut.data_param[1]=-W
798 814 dataOut.data_param[2]=RR
799 815
800 816 # print ('Leaving PrecepitationProc ... ')
801 817 return dataOut
802 818
803 819 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
804 820
805 821 NPW = dataOut.NPW
806 822 COFA = dataOut.COFA
807 823
808 824 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
809 825 RadarConst = dataOut.RadarConst
810 826 #frequency = 34.85*10**9
811 827
812 828 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
813 829 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
814 830
815 831 ETA = numpy.sum(SNR,1)
816 832
817 833 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
818 834
819 835 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
820 836
821 837 for r in range(self.Num_Hei):
822 838
823 839 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
824 840 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
825 841
826 842 return Ze
827 843
828 844 # def GetRadarConstant(self):
829 845 #
830 846 # """
831 847 # Constants:
832 848 #
833 849 # Pt: Transmission Power dB 5kW 5000
834 850 # Gt: Transmission Gain dB 24.7 dB 295.1209
835 851 # Gr: Reception Gain dB 18.5 dB 70.7945
836 852 # Lambda: Wavelenght m 0.6741 m 0.6741
837 853 # aL: Attenuation loses dB 4dB 2.5118
838 854 # tauW: Width of transmission pulse s 4us 4e-6
839 855 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
840 856 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
841 857 #
842 858 # """
843 859 #
844 860 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
845 861 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
846 862 # RadarConstant = Numerator / Denominator
847 863 #
848 864 # return RadarConstant
849 865
850 866
851 867
852 868 class FullSpectralAnalysis(Operation):
853 869
854 870 """
855 871 Function that implements Full Spectral Analysis technique.
856 872
857 873 Input:
858 874 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
859 875 self.dataOut.groupList : Pairlist of channels
860 876 self.dataOut.ChanDist : Physical distance between receivers
861 877
862 878
863 879 Output:
864 880
865 881 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
866 882
867 883
868 884 Parameters affected: Winds, height range, SNR
869 885
870 886 """
871 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
887 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
872 888 minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
873 889
874 890 spc = dataOut.data_pre[0].copy()
875 891 cspc = dataOut.data_pre[1]
876 892 nHeights = spc.shape[2]
877 893
878 894 # first_height = 0.75 #km (ref: data header 20170822)
879 895 # resolution_height = 0.075 #km
880 896 '''
881 897 finding height range. check this when radar parameters are changed!
882 898 '''
883 899 if maxheight is not None:
884 900 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
885 901 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
886 902 else:
887 903 range_max = nHeights
888 904 if minheight is not None:
889 905 # range_min = int((minheight - first_height) / resolution_height) # theoretical
890 906 range_min = int(13.26 * minheight - 5) # empirical, works better
891 907 if range_min < 0:
892 908 range_min = 0
893 909 else:
894 910 range_min = 0
895 911
896 912 pairsList = dataOut.groupList
897 913 if dataOut.ChanDist is not None :
898 914 ChanDist = dataOut.ChanDist
899 915 else:
900 916 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
901 917
902 918 # 4 variables: zonal, meridional, vertical, and average SNR
903 919 data_param = numpy.zeros([4,nHeights]) * numpy.NaN
904 920 velocityX = numpy.zeros([nHeights]) * numpy.NaN
905 921 velocityY = numpy.zeros([nHeights]) * numpy.NaN
906 922 velocityZ = numpy.zeros([nHeights]) * numpy.NaN
907 923
908 924 dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
909 925
910 926 '''***********************************************WIND ESTIMATION**************************************'''
911 927 for Height in range(nHeights):
912 928
913 929 if Height >= range_min and Height < range_max:
914 930 # error_code will be useful in future analysis
915 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
931 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
916 932 ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
917 933
918 934 if abs(Vzon) < 100. and abs(Vmer) < 100.:
919 935 velocityX[Height] = Vzon
920 936 velocityY[Height] = -Vmer
921 937 velocityZ[Height] = Vver
922
938
923 939 # Censoring data with SNR threshold
924 940 dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
925 941
926 942 data_param[0] = velocityX
927 943 data_param[1] = velocityY
928 944 data_param[2] = velocityZ
929 945 data_param[3] = dbSNR
930 946 dataOut.data_param = data_param
931 947 return dataOut
932 948
933 949 def moving_average(self,x, N=2):
934 950 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
935 951 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
936 952
937 953 def gaus(self,xSamples,Amp,Mu,Sigma):
938 954 return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
939 955
940 956 def Moments(self, ySamples, xSamples):
941 957 Power = numpy.nanmean(ySamples) # Power, 0th Moment
942 958 yNorm = ySamples / numpy.nansum(ySamples)
943 959 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
944 960 Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
945 961 StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
946 962 return numpy.array([Power,RadVel,StdDev])
947 963
948 964 def StopWindEstimation(self, error_code):
949 965 Vzon = numpy.NaN
950 966 Vmer = numpy.NaN
951 967 Vver = numpy.NaN
952 968 return Vzon, Vmer, Vver, error_code
953 969
954 970 def AntiAliasing(self, interval, maxstep):
955 971 """
956 972 function to prevent errors from aliased values when computing phaseslope
957 973 """
958 974 antialiased = numpy.zeros(len(interval))
959 975 copyinterval = interval.copy()
960 976
961 977 antialiased[0] = copyinterval[0]
962 978
963 979 for i in range(1,len(antialiased)):
964 980 step = interval[i] - interval[i-1]
965 981 if step > maxstep:
966 982 copyinterval -= 2*numpy.pi
967 983 antialiased[i] = copyinterval[i]
968 984 elif step < maxstep*(-1):
969 985 copyinterval += 2*numpy.pi
970 986 antialiased[i] = copyinterval[i]
971 987 else:
972 988 antialiased[i] = copyinterval[i].copy()
973 989
974 990 return antialiased
975 991
976 992 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
977 993 """
978 994 Function that Calculates Zonal, Meridional and Vertical wind velocities.
979 995 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
980 996
981 997 Input:
982 998 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
983 999 pairsList : Pairlist of channels
984 1000 ChanDist : array of xi_ij and eta_ij
985 1001 Height : height at which data is processed
986 1002 noise : noise in [channels] format for specific height
987 1003 Abbsisarange : range of the frequencies or velocities
988 1004 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
989 1005
990 1006 Output:
991 1007 Vzon, Vmer, Vver : wind velocities
992 1008 error_code : int that states where code is terminated
993 1009
994 1010 0 : no error detected
995 1011 1 : Gaussian of mean spc exceeds widthlimit
996 1012 2 : no Gaussian of mean spc found
997 1013 3 : SNR to low or velocity to high -> prec. e.g.
998 1014 4 : at least one Gaussian of cspc exceeds widthlimit
999 1015 5 : zero out of three cspc Gaussian fits converged
1000 1016 6 : phase slope fit could not be found
1001 1017 7 : arrays used to fit phase have different length
1002 1018 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
1003 1019
1004 1020 """
1005 1021
1006 1022 error_code = 0
1007 1023
1008 1024 nChan = spc.shape[0]
1009 1025 nProf = spc.shape[1]
1010 1026 nPair = cspc.shape[0]
1011 1027
1012 1028 SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
1013 1029 CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
1014 1030 phase = numpy.zeros([nPair, nProf]) # phase between channels
1015 1031 PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
1016 1032 PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
1017 1033 xFrec = AbbsisaRange[0][:-1] # frequency range
1018 1034 xVel = AbbsisaRange[2][:-1] # velocity range
1019 1035 xSamples = xFrec # the frequency range is taken
1020 1036 delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
1021 1037
1022 # only consider velocities with in NegativeLimit and PositiveLimit
1038 # only consider velocities with in NegativeLimit and PositiveLimit
1023 1039 if (NegativeLimit is None):
1024 1040 NegativeLimit = numpy.min(xVel)
1025 1041 if (PositiveLimit is None):
1026 1042 PositiveLimit = numpy.max(xVel)
1027 1043 xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
1028 1044 xSamples_zoom = xSamples[xvalid]
1029 1045
1030 1046 '''Getting Eij and Nij'''
1031 1047 Xi01, Xi02, Xi12 = ChanDist[:,0]
1032 1048 Eta01, Eta02, Eta12 = ChanDist[:,1]
1033 1049
1034 1050 # spwd limit - updated by D. ScipiΓ³n 30.03.2021
1035 1051 widthlimit = 10
1036 1052 '''************************* SPC is normalized ********************************'''
1037 spc_norm = spc.copy()
1053 spc_norm = spc.copy()
1038 1054 # For each channel
1039 1055 for i in range(nChan):
1040 1056 spc_sub = spc_norm[i,:] - noise[i] # only the signal power
1041 1057 SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
1042 1058
1043 1059 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1044 1060
1045 1061 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1046 1062 you only fit the curve and don't need the absolute value of height for calculation,
1047 1063 only for estimation of width. for normalization of cross spectra, you need initial,
1048 1064 unnormalized self-spectra With noise.
1049 1065
1050 1066 Technically, you don't even need to normalize the self-spectra, as you only need the
1051 1067 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1052 1068 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1053 1069 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1054 1070 """
1055 1071 # initial conditions
1056 popt = [1e-10,0,1e-10]
1072 popt = [1e-10,0,1e-10]
1057 1073 # Spectra average
1058 SPCMean = numpy.average(SPC_Samples,0)
1074 SPCMean = numpy.average(SPC_Samples,0)
1059 1075 # Moments in frequency
1060 1076 SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
1061 1077
1062 1078 # Gauss Fit SPC in frequency domain
1063 1079 if dbSNR > SNRlimit: # only if SNR > SNRth
1064 1080 try:
1065 1081 popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
1066 1082 if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
1067 1083 return self.StopWindEstimation(error_code = 1)
1068 1084 FitGauss = self.gaus(xSamples_zoom,*popt)
1069 1085 except :#RuntimeError:
1070 1086 return self.StopWindEstimation(error_code = 2)
1071 1087 else:
1072 1088 return self.StopWindEstimation(error_code = 3)
1073 1089
1074 1090 '''***************************** CSPC Normalization *************************
1075 1091 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1076 1092 influence the norm which is not desired. First, a range is identified where the
1077 1093 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1078 1094 around it gets cut off and values replaced by mean determined by the boundary
1079 1095 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1080 1096
1081 1097 The sums are then added and multiplied by range/datapoints, because you need
1082 1098 an integral and not a sum for normalization.
1083 1099
1084 1100 A norm is found according to Briggs 92.
1085 1101 '''
1086 1102 # for each pair
1087 1103 for i in range(nPair):
1088 1104 cspc_norm = cspc[i,:].copy()
1089 1105 chan_index0 = pairsList[i][0]
1090 1106 chan_index1 = pairsList[i][1]
1091 1107 CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
1092 1108 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1093 1109
1094 1110 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
1095 1111 self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
1096 1112 self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
1097 1113
1098 1114 popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
1099 1115 FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
1100 1116
1101 1117 '''*******************************FIT GAUSS CSPC************************************'''
1102 1118 try:
1103 1119 popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
1104 1120 if popt01[2] > widthlimit: # CONDITION
1105 1121 return self.StopWindEstimation(error_code = 4)
1106 1122 popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
1107 1123 if popt02[2] > widthlimit: # CONDITION
1108 1124 return self.StopWindEstimation(error_code = 4)
1109 1125 popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
1110 1126 if popt12[2] > widthlimit: # CONDITION
1111 1127 return self.StopWindEstimation(error_code = 4)
1112 1128
1113 1129 FitGauss01 = self.gaus(xSamples_zoom, *popt01)
1114 1130 FitGauss02 = self.gaus(xSamples_zoom, *popt02)
1115 1131 FitGauss12 = self.gaus(xSamples_zoom, *popt12)
1116 1132 except:
1117 1133 return self.StopWindEstimation(error_code = 5)
1118 1134
1119 1135
1120 1136 '''************* Getting Fij ***************'''
1121 1137 # x-axis point of the gaussian where the center is located from GaussFit of spectra
1122 1138 GaussCenter = popt[1]
1123 1139 ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
1124 1140 PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
1125 1141
1126 1142 # Point where e^-1 is located in the gaussian
1127 1143 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1128 1144 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
1129 1145 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1130 1146 Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
1131 1147
1132 1148 '''********** Taking frequency ranges from mean SPCs **********'''
1133 1149 GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
1134 1150 Range = numpy.empty(2)
1135 1151 Range[0] = GaussCenter - GauWidth
1136 1152 Range[1] = GaussCenter + GauWidth
1137 1153 # Point in x-axis where the bandwidth is located (min:max)
1138 1154 ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
1139 1155 ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
1140 1156 PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
1141 1157 PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
1142 1158 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1143 1159 FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
1144 1160
1145 1161 '''************************** Getting Phase Slope ***************************'''
1146 1162 for i in range(nPair):
1147 1163 if len(FrecRange) > 5:
1148 1164 PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
1149 1165 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1150 1166 if len(FrecRange) == len(PhaseRange):
1151 1167 try:
1152 1168 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1153 1169 PhaseSlope[i] = slope
1154 1170 PhaseInter[i] = intercept
1155 1171 except:
1156 1172 return self.StopWindEstimation(error_code = 6)
1157 1173 else:
1158 1174 return self.StopWindEstimation(error_code = 7)
1159 1175 else:
1160 1176 return self.StopWindEstimation(error_code = 8)
1161 1177
1162 1178 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1163 1179
1164 1180 '''Getting constant C'''
1165 1181 cC=(Fij*numpy.pi)**2
1166 1182
1167 1183 '''****** Getting constants F and G ******'''
1168 1184 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1169 1185 # MijEijNij = numpy.array([[Xi01,Eta01], [Xi02,Eta02], [Xi12,Eta12]])
1170 1186 # MijResult0 = (-PhaseSlope[0] * cC) / (2*numpy.pi)
1171 1187 MijResult1 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1172 1188 MijResult2 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1173 1189 # MijResults = numpy.array([MijResult0, MijResult1, MijResult2])
1174 1190 MijResults = numpy.array([MijResult1, MijResult2])
1175 1191 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1176 1192
1177 1193 '''****** Getting constants A, B and H ******'''
1178 1194 W01 = numpy.nanmax( FitGauss01 )
1179 1195 W02 = numpy.nanmax( FitGauss02 )
1180 1196 W12 = numpy.nanmax( FitGauss12 )
1181 1197
1182 1198 WijResult01 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1183 1199 WijResult02 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1184 1200 WijResult12 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1185 1201 WijResults = numpy.array([WijResult01, WijResult02, WijResult12])
1186 1202
1187 1203 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1188 1204 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1189 1205
1190 1206 VxVy = numpy.array([[cA,cH],[cH,cB]])
1191 1207 VxVyResults = numpy.array([-cF,-cG])
1192 1208 (Vmer,Vzon) = numpy.linalg.solve(VxVy, VxVyResults)
1193 1209 Vver = -SPCMoments[1]*SPEED_OF_LIGHT/(2*radfreq)
1194 1210 error_code = 0
1195 1211
1196 1212 return Vzon, Vmer, Vver, error_code
1197 1213
1198 1214 class SpectralMoments(Operation):
1199 1215
1200 1216 '''
1201 1217 Function SpectralMoments()
1202 1218
1203 1219 Calculates moments (power, mean, standard deviation) and SNR of the signal
1204 1220
1205 1221 Type of dataIn: Spectra
1206 1222
1207 1223 Configuration Parameters:
1208 1224
1209 1225 dirCosx : Cosine director in X axis
1210 1226 dirCosy : Cosine director in Y axis
1211 1227
1212 1228 elevation :
1213 1229 azimuth :
1214 1230
1215 1231 Input:
1216 1232 channelList : simple channel list to select e.g. [2,3,7]
1217 1233 self.dataOut.data_pre : Spectral data
1218 1234 self.dataOut.abscissaList : List of frequencies
1219 1235 self.dataOut.noise : Noise level per channel
1220 1236
1221 1237 Affected:
1222 1238 self.dataOut.moments : Parameters per channel
1223 1239 self.dataOut.data_snr : SNR per channel
1224 1240
1225 1241 '''
1226 1242
1227 1243 def run(self, dataOut):
1228 1244
1229 1245 data = dataOut.data_pre[0]
1230 1246 absc = dataOut.abscissaList[:-1]
1231 1247 noise = dataOut.noise
1232 1248 nChannel = data.shape[0]
1233 1249 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1234 1250
1235 1251 for ind in range(nChannel):
1236 1252 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1237 1253
1238 1254 dataOut.moments = data_param[:,1:,:]
1239 1255 dataOut.data_snr = data_param[:,0]
1240 1256 dataOut.data_pow = data_param[:,1]
1241 1257 dataOut.data_dop = data_param[:,2]
1242 1258 dataOut.data_width = data_param[:,3]
1243 1259
1244 1260 return dataOut
1245 1261
1246 1262 def __calculateMoments(self, oldspec, oldfreq, n0,
1247 1263 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1248 1264
1249 1265 if (nicoh is None): nicoh = 1
1250 1266 if (graph is None): graph = 0
1251 1267 if (smooth is None): smooth = 0
1252 1268 elif (self.smooth < 3): smooth = 0
1253 1269
1254 1270 if (type1 is None): type1 = 0
1255 1271 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1256 1272 if (snrth is None): snrth = -3
1257 1273 if (dc is None): dc = 0
1258 1274 if (aliasing is None): aliasing = 0
1259 1275 if (oldfd is None): oldfd = 0
1260 1276 if (wwauto is None): wwauto = 0
1261 1277
1262 1278 if (n0 < 1.e-20): n0 = 1.e-20
1263 1279
1264 1280 freq = oldfreq
1265 1281 vec_power = numpy.zeros(oldspec.shape[1])
1266 1282 vec_fd = numpy.zeros(oldspec.shape[1])
1267 1283 vec_w = numpy.zeros(oldspec.shape[1])
1268 1284 vec_snr = numpy.zeros(oldspec.shape[1])
1269 1285
1270 1286 # oldspec = numpy.ma.masked_invalid(oldspec)
1271 1287
1272 1288 for ind in range(oldspec.shape[1]):
1273 1289
1274 1290 spec = oldspec[:,ind]
1275 1291 aux = spec*fwindow
1276 1292 max_spec = aux.max()
1277 1293 m = aux.tolist().index(max_spec)
1278 1294
1279 1295 # Smooth
1280 1296 if (smooth == 0):
1281 1297 spec2 = spec
1282 1298 else:
1283 1299 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1284 1300
1285 1301 # Moments Estimation
1286 1302 bb = spec2[numpy.arange(m,spec2.size)]
1287 1303 bb = (bb<n0).nonzero()
1288 1304 bb = bb[0]
1289 1305
1290 1306 ss = spec2[numpy.arange(0,m + 1)]
1291 1307 ss = (ss<n0).nonzero()
1292 1308 ss = ss[0]
1293 1309
1294 1310 if (bb.size == 0):
1295 1311 bb0 = spec.size - 1 - m
1296 1312 else:
1297 1313 bb0 = bb[0] - 1
1298 1314 if (bb0 < 0):
1299 1315 bb0 = 0
1300 1316
1301 1317 if (ss.size == 0):
1302 1318 ss1 = 1
1303 1319 else:
1304 1320 ss1 = max(ss) + 1
1305 1321
1306 1322 if (ss1 > m):
1307 1323 ss1 = m
1308 1324
1309 1325 valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1310 1326
1311 1327 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. ScipiΓ³n added with correct definition
1312 1328 total_power = (spec2[valid] * fwindow[valid]).mean() # D. ScipiΓ³n added with correct definition
1313 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1329 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1314 1330 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1315 1331 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1316 1332 snr = (spec2.mean()-n0)/n0
1317 1333 if (snr < 1.e-20) :
1318 1334 snr = 1.e-20
1319 1335
1320 1336 # vec_power[ind] = power #D. ScipiΓ³n replaced with the line below
1321 1337 vec_power[ind] = total_power
1322 1338 vec_fd[ind] = fd
1323 1339 vec_w[ind] = w
1324 1340 vec_snr[ind] = snr
1325 1341
1326 1342 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1327 1343
1328 1344 #------------------ Get SA Parameters --------------------------
1329 1345
1330 1346 def GetSAParameters(self):
1331 1347 #SA en frecuencia
1332 1348 pairslist = self.dataOut.groupList
1333 1349 num_pairs = len(pairslist)
1334 1350
1335 1351 vel = self.dataOut.abscissaList
1336 1352 spectra = self.dataOut.data_pre
1337 1353 cspectra = self.dataIn.data_cspc
1338 1354 delta_v = vel[1] - vel[0]
1339 1355
1340 1356 #Calculating the power spectrum
1341 1357 spc_pow = numpy.sum(spectra, 3)*delta_v
1342 1358 #Normalizing Spectra
1343 1359 norm_spectra = spectra/spc_pow
1344 1360 #Calculating the norm_spectra at peak
1345 1361 max_spectra = numpy.max(norm_spectra, 3)
1346 1362
1347 1363 #Normalizing Cross Spectra
1348 1364 norm_cspectra = numpy.zeros(cspectra.shape)
1349 1365
1350 1366 for i in range(num_chan):
1351 1367 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1352 1368
1353 1369 max_cspectra = numpy.max(norm_cspectra,2)
1354 1370 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1355 1371
1356 1372 for i in range(num_pairs):
1357 1373 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1358 1374 #------------------- Get Lags ----------------------------------
1359 1375
1360 1376 class SALags(Operation):
1361 1377 '''
1362 1378 Function GetMoments()
1363 1379
1364 1380 Input:
1365 1381 self.dataOut.data_pre
1366 1382 self.dataOut.abscissaList
1367 1383 self.dataOut.noise
1368 1384 self.dataOut.normFactor
1369 1385 self.dataOut.data_snr
1370 1386 self.dataOut.groupList
1371 1387 self.dataOut.nChannels
1372 1388
1373 1389 Affected:
1374 1390 self.dataOut.data_param
1375 1391
1376 1392 '''
1377 1393 def run(self, dataOut):
1378 1394 data_acf = dataOut.data_pre[0]
1379 1395 data_ccf = dataOut.data_pre[1]
1380 1396 normFactor_acf = dataOut.normFactor[0]
1381 1397 normFactor_ccf = dataOut.normFactor[1]
1382 1398 pairs_acf = dataOut.groupList[0]
1383 1399 pairs_ccf = dataOut.groupList[1]
1384 1400
1385 1401 nHeights = dataOut.nHeights
1386 1402 absc = dataOut.abscissaList
1387 1403 noise = dataOut.noise
1388 1404 SNR = dataOut.data_snr
1389 1405 nChannels = dataOut.nChannels
1390 1406 # pairsList = dataOut.groupList
1391 1407 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1392 1408
1393 1409 for l in range(len(pairs_acf)):
1394 1410 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1395 1411
1396 1412 for l in range(len(pairs_ccf)):
1397 1413 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1398 1414
1399 1415 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1400 1416 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1401 1417 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1402 1418 return
1403 1419
1404 1420 # def __getPairsAutoCorr(self, pairsList, nChannels):
1405 1421 #
1406 1422 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1407 1423 #
1408 1424 # for l in range(len(pairsList)):
1409 1425 # firstChannel = pairsList[l][0]
1410 1426 # secondChannel = pairsList[l][1]
1411 1427 #
1412 1428 # #Obteniendo pares de Autocorrelacion
1413 1429 # if firstChannel == secondChannel:
1414 1430 # pairsAutoCorr[firstChannel] = int(l)
1415 1431 #
1416 1432 # pairsAutoCorr = pairsAutoCorr.astype(int)
1417 1433 #
1418 1434 # pairsCrossCorr = range(len(pairsList))
1419 1435 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1420 1436 #
1421 1437 # return pairsAutoCorr, pairsCrossCorr
1422 1438
1423 1439 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1424 1440
1425 1441 lag0 = data_acf.shape[1]/2
1426 1442 #Funcion de Autocorrelacion
1427 1443 mean_acf = stats.nanmean(data_acf, axis = 0)
1428 1444
1429 1445 #Obtencion Indice de TauCross
1430 1446 ind_ccf = data_ccf.argmax(axis = 1)
1431 1447 #Obtencion Indice de TauAuto
1432 1448 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1433 1449 ccf_lag0 = data_ccf[:,lag0,:]
1434 1450
1435 1451 for i in range(ccf_lag0.shape[0]):
1436 1452 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1437 1453
1438 1454 #Obtencion de TauCross y TauAuto
1439 1455 tau_ccf = lagRange[ind_ccf]
1440 1456 tau_acf = lagRange[ind_acf]
1441 1457
1442 1458 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1443 1459
1444 1460 tau_ccf[Nan1,Nan2] = numpy.nan
1445 1461 tau_acf[Nan1,Nan2] = numpy.nan
1446 1462 tau = numpy.vstack((tau_ccf,tau_acf))
1447 1463
1448 1464 return tau
1449 1465
1450 1466 def __calculateLag1Phase(self, data, lagTRange):
1451 1467 data1 = stats.nanmean(data, axis = 0)
1452 1468 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1453 1469
1454 1470 phase = numpy.angle(data1[lag1,:])
1455 1471
1456 1472 return phase
1457 1473
1458 1474 class SpectralFitting(Operation):
1459 1475 '''
1460 1476 Function GetMoments()
1461 1477
1462 1478 Input:
1463 1479 Output:
1464 1480 Variables modified:
1465 1481 '''
1466 1482
1467 1483 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1468 1484
1469 1485
1470 1486 if path != None:
1471 1487 sys.path.append(path)
1472 1488 self.dataOut.library = importlib.import_module(file)
1473 1489
1474 1490 #To be inserted as a parameter
1475 1491 groupArray = numpy.array(groupList)
1476 1492 # groupArray = numpy.array([[0,1],[2,3]])
1477 1493 self.dataOut.groupList = groupArray
1478 1494
1479 1495 nGroups = groupArray.shape[0]
1480 1496 nChannels = self.dataIn.nChannels
1481 1497 nHeights=self.dataIn.heightList.size
1482 1498
1483 1499 #Parameters Array
1484 1500 self.dataOut.data_param = None
1485 1501
1486 1502 #Set constants
1487 1503 constants = self.dataOut.library.setConstants(self.dataIn)
1488 1504 self.dataOut.constants = constants
1489 1505 M = self.dataIn.normFactor
1490 1506 N = self.dataIn.nFFTPoints
1491 1507 ippSeconds = self.dataIn.ippSeconds
1492 1508 K = self.dataIn.nIncohInt
1493 1509 pairsArray = numpy.array(self.dataIn.pairsList)
1494 1510
1495 1511 #List of possible combinations
1496 1512 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1497 1513 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1498 1514
1499 1515 if getSNR:
1500 1516 listChannels = groupArray.reshape((groupArray.size))
1501 1517 listChannels.sort()
1502 1518 noise = self.dataIn.getNoise()
1503 1519 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1504 1520
1505 1521 for i in range(nGroups):
1506 1522 coord = groupArray[i,:]
1507 1523
1508 1524 #Input data array
1509 1525 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1510 1526 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1511 1527
1512 1528 #Cross Spectra data array for Covariance Matrixes
1513 1529 ind = 0
1514 1530 for pairs in listComb:
1515 1531 pairsSel = numpy.array([coord[x],coord[y]])
1516 1532 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1517 1533 ind += 1
1518 1534 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1519 1535 dataCross = dataCross**2/K
1520 1536
1521 1537 for h in range(nHeights):
1522 1538
1523 1539 #Input
1524 1540 d = data[:,h]
1525 1541
1526 1542 #Covariance Matrix
1527 1543 D = numpy.diag(d**2/K)
1528 1544 ind = 0
1529 1545 for pairs in listComb:
1530 1546 #Coordinates in Covariance Matrix
1531 1547 x = pairs[0]
1532 1548 y = pairs[1]
1533 1549 #Channel Index
1534 1550 S12 = dataCross[ind,:,h]
1535 1551 D12 = numpy.diag(S12)
1536 1552 #Completing Covariance Matrix with Cross Spectras
1537 1553 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1538 1554 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1539 1555 ind += 1
1540 1556 Dinv=numpy.linalg.inv(D)
1541 1557 L=numpy.linalg.cholesky(Dinv)
1542 1558 LT=L.T
1543 1559
1544 1560 dp = numpy.dot(LT,d)
1545 1561
1546 1562 #Initial values
1547 1563 data_spc = self.dataIn.data_spc[coord,:,h]
1548 1564
1549 1565 if (h>0)and(error1[3]<5):
1550 1566 p0 = self.dataOut.data_param[i,:,h-1]
1551 1567 else:
1552 1568 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1553 1569
1554 1570 try:
1555 1571 #Least Squares
1556 1572 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1557 1573 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1558 1574 #Chi square error
1559 1575 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1560 1576 #Error with Jacobian
1561 1577 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1562 1578 except:
1563 1579 minp = p0*numpy.nan
1564 1580 error0 = numpy.nan
1565 1581 error1 = p0*numpy.nan
1566 1582
1567 1583 #Save
1568 1584 if self.dataOut.data_param is None:
1569 1585 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1570 1586 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1571 1587
1572 1588 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1573 1589 self.dataOut.data_param[i,:,h] = minp
1574 1590 return
1575 1591
1576 1592 def __residFunction(self, p, dp, LT, constants):
1577 1593
1578 1594 fm = self.dataOut.library.modelFunction(p, constants)
1579 1595 fmp=numpy.dot(LT,fm)
1580 1596
1581 1597 return dp-fmp
1582 1598
1583 1599 def __getSNR(self, z, noise):
1584 1600
1585 1601 avg = numpy.average(z, axis=1)
1586 1602 SNR = (avg.T-noise)/noise
1587 1603 SNR = SNR.T
1588 1604 return SNR
1589 1605
1590 1606 def __chisq(p,chindex,hindex):
1591 1607 #similar to Resid but calculates CHI**2
1592 1608 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1593 1609 dp=numpy.dot(LT,d)
1594 1610 fmp=numpy.dot(LT,fm)
1595 1611 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1596 1612 return chisq
1597 1613
1598 1614 class WindProfiler(Operation):
1599 1615
1600 1616 __isConfig = False
1601 1617
1602 1618 __initime = None
1603 1619 __lastdatatime = None
1604 1620 __integrationtime = None
1605 1621
1606 1622 __buffer = None
1607 1623
1608 1624 __dataReady = False
1609 1625
1610 1626 __firstdata = None
1611 1627
1612 1628 n = None
1613 1629
1614 1630 def __init__(self):
1615 1631 Operation.__init__(self)
1616 1632
1617 1633 def __calculateCosDir(self, elev, azim):
1618 1634 zen = (90 - elev)*numpy.pi/180
1619 1635 azim = azim*numpy.pi/180
1620 1636 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1621 1637 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1622 1638
1623 1639 signX = numpy.sign(numpy.cos(azim))
1624 1640 signY = numpy.sign(numpy.sin(azim))
1625 1641
1626 1642 cosDirX = numpy.copysign(cosDirX, signX)
1627 1643 cosDirY = numpy.copysign(cosDirY, signY)
1628 1644 return cosDirX, cosDirY
1629 1645
1630 1646 def __calculateAngles(self, theta_x, theta_y, azimuth):
1631 1647
1632 1648 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1633 1649 zenith_arr = numpy.arccos(dir_cosw)
1634 1650 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1635 1651
1636 1652 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1637 1653 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1638 1654
1639 1655 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1640 1656
1641 1657 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1642 1658
1643 1659 #
1644 1660 if horOnly:
1645 1661 A = numpy.c_[dir_cosu,dir_cosv]
1646 1662 else:
1647 1663 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1648 1664 A = numpy.asmatrix(A)
1649 1665 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1650 1666
1651 1667 return A1
1652 1668
1653 1669 def __correctValues(self, heiRang, phi, velRadial, SNR):
1654 1670 listPhi = phi.tolist()
1655 1671 maxid = listPhi.index(max(listPhi))
1656 1672 minid = listPhi.index(min(listPhi))
1657 1673
1658 1674 rango = list(range(len(phi)))
1659 1675 # rango = numpy.delete(rango,maxid)
1660 1676
1661 1677 heiRang1 = heiRang*math.cos(phi[maxid])
1662 1678 heiRangAux = heiRang*math.cos(phi[minid])
1663 1679 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1664 1680 heiRang1 = numpy.delete(heiRang1,indOut)
1665 1681
1666 1682 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1667 1683 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1668 1684
1669 1685 for i in rango:
1670 1686 x = heiRang*math.cos(phi[i])
1671 1687 y1 = velRadial[i,:]
1672 1688 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1673 1689
1674 1690 x1 = heiRang1
1675 1691 y11 = f1(x1)
1676 1692
1677 1693 y2 = SNR[i,:]
1678 1694 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1679 1695 y21 = f2(x1)
1680 1696
1681 1697 velRadial1[i,:] = y11
1682 1698 SNR1[i,:] = y21
1683 1699
1684 1700 return heiRang1, velRadial1, SNR1
1685 1701
1686 1702 def __calculateVelUVW(self, A, velRadial):
1687 1703
1688 1704 #Operacion Matricial
1689 1705 # velUVW = numpy.zeros((velRadial.shape[1],3))
1690 1706 # for ind in range(velRadial.shape[1]):
1691 1707 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1692 1708 # velUVW = velUVW.transpose()
1693 1709 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1694 1710 velUVW[:,:] = numpy.dot(A,velRadial)
1695 1711
1696 1712
1697 1713 return velUVW
1698 1714
1699 1715 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1700 1716
1701 1717 def techniqueDBS(self, kwargs):
1702 1718 """
1703 1719 Function that implements Doppler Beam Swinging (DBS) technique.
1704 1720
1705 1721 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1706 1722 Direction correction (if necessary), Ranges and SNR
1707 1723
1708 1724 Output: Winds estimation (Zonal, Meridional and Vertical)
1709 1725
1710 1726 Parameters affected: Winds, height range, SNR
1711 1727 """
1712 1728 velRadial0 = kwargs['velRadial']
1713 1729 heiRang = kwargs['heightList']
1714 1730 SNR0 = kwargs['SNR']
1715 1731
1716 1732 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1717 1733 theta_x = numpy.array(kwargs['dirCosx'])
1718 1734 theta_y = numpy.array(kwargs['dirCosy'])
1719 1735 else:
1720 1736 elev = numpy.array(kwargs['elevation'])
1721 1737 azim = numpy.array(kwargs['azimuth'])
1722 1738 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1723 1739 azimuth = kwargs['correctAzimuth']
1724 1740 if 'horizontalOnly' in kwargs:
1725 1741 horizontalOnly = kwargs['horizontalOnly']
1726 1742 else: horizontalOnly = False
1727 1743 if 'correctFactor' in kwargs:
1728 1744 correctFactor = kwargs['correctFactor']
1729 1745 else: correctFactor = 1
1730 1746 if 'channelList' in kwargs:
1731 1747 channelList = kwargs['channelList']
1732 1748 if len(channelList) == 2:
1733 1749 horizontalOnly = True
1734 1750 arrayChannel = numpy.array(channelList)
1735 1751 param = param[arrayChannel,:,:]
1736 1752 theta_x = theta_x[arrayChannel]
1737 1753 theta_y = theta_y[arrayChannel]
1738 1754
1739 1755 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1740 1756 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1741 1757 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1742 1758
1743 1759 #Calculo de Componentes de la velocidad con DBS
1744 1760 winds = self.__calculateVelUVW(A,velRadial1)
1745 1761
1746 1762 return winds, heiRang1, SNR1
1747 1763
1748 1764 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1749 1765
1750 1766 nPairs = len(pairs_ccf)
1751 1767 posx = numpy.asarray(posx)
1752 1768 posy = numpy.asarray(posy)
1753 1769
1754 1770 #Rotacion Inversa para alinear con el azimuth
1755 1771 if azimuth!= None:
1756 1772 azimuth = azimuth*math.pi/180
1757 1773 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1758 1774 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1759 1775 else:
1760 1776 posx1 = posx
1761 1777 posy1 = posy
1762 1778
1763 1779 #Calculo de Distancias
1764 1780 distx = numpy.zeros(nPairs)
1765 1781 disty = numpy.zeros(nPairs)
1766 1782 dist = numpy.zeros(nPairs)
1767 1783 ang = numpy.zeros(nPairs)
1768 1784
1769 1785 for i in range(nPairs):
1770 1786 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1771 1787 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1772 1788 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1773 1789 ang[i] = numpy.arctan2(disty[i],distx[i])
1774 1790
1775 1791 return distx, disty, dist, ang
1776 1792 #Calculo de Matrices
1777 1793 # nPairs = len(pairs)
1778 1794 # ang1 = numpy.zeros((nPairs, 2, 1))
1779 1795 # dist1 = numpy.zeros((nPairs, 2, 1))
1780 1796 #
1781 1797 # for j in range(nPairs):
1782 1798 # dist1[j,0,0] = dist[pairs[j][0]]
1783 1799 # dist1[j,1,0] = dist[pairs[j][1]]
1784 1800 # ang1[j,0,0] = ang[pairs[j][0]]
1785 1801 # ang1[j,1,0] = ang[pairs[j][1]]
1786 1802 #
1787 1803 # return distx,disty, dist1,ang1
1788 1804
1789 1805
1790 1806 def __calculateVelVer(self, phase, lagTRange, _lambda):
1791 1807
1792 1808 Ts = lagTRange[1] - lagTRange[0]
1793 1809 velW = -_lambda*phase/(4*math.pi*Ts)
1794 1810
1795 1811 return velW
1796 1812
1797 1813 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1798 1814 nPairs = tau1.shape[0]
1799 1815 nHeights = tau1.shape[1]
1800 1816 vel = numpy.zeros((nPairs,3,nHeights))
1801 1817 dist1 = numpy.reshape(dist, (dist.size,1))
1802 1818
1803 1819 angCos = numpy.cos(ang)
1804 1820 angSin = numpy.sin(ang)
1805 1821
1806 1822 vel0 = dist1*tau1/(2*tau2**2)
1807 1823 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1808 1824 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1809 1825
1810 1826 ind = numpy.where(numpy.isinf(vel))
1811 1827 vel[ind] = numpy.nan
1812 1828
1813 1829 return vel
1814 1830
1815 1831 # def __getPairsAutoCorr(self, pairsList, nChannels):
1816 1832 #
1817 1833 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1818 1834 #
1819 1835 # for l in range(len(pairsList)):
1820 1836 # firstChannel = pairsList[l][0]
1821 1837 # secondChannel = pairsList[l][1]
1822 1838 #
1823 1839 # #Obteniendo pares de Autocorrelacion
1824 1840 # if firstChannel == secondChannel:
1825 1841 # pairsAutoCorr[firstChannel] = int(l)
1826 1842 #
1827 1843 # pairsAutoCorr = pairsAutoCorr.astype(int)
1828 1844 #
1829 1845 # pairsCrossCorr = range(len(pairsList))
1830 1846 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1831 1847 #
1832 1848 # return pairsAutoCorr, pairsCrossCorr
1833 1849
1834 1850 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1835 1851 def techniqueSA(self, kwargs):
1836 1852
1837 1853 """
1838 1854 Function that implements Spaced Antenna (SA) technique.
1839 1855
1840 1856 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1841 1857 Direction correction (if necessary), Ranges and SNR
1842 1858
1843 1859 Output: Winds estimation (Zonal, Meridional and Vertical)
1844 1860
1845 1861 Parameters affected: Winds
1846 1862 """
1847 1863 position_x = kwargs['positionX']
1848 1864 position_y = kwargs['positionY']
1849 1865 azimuth = kwargs['azimuth']
1850 1866
1851 1867 if 'correctFactor' in kwargs:
1852 1868 correctFactor = kwargs['correctFactor']
1853 1869 else:
1854 1870 correctFactor = 1
1855 1871
1856 1872 groupList = kwargs['groupList']
1857 1873 pairs_ccf = groupList[1]
1858 1874 tau = kwargs['tau']
1859 1875 _lambda = kwargs['_lambda']
1860 1876
1861 1877 #Cross Correlation pairs obtained
1862 1878 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1863 1879 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1864 1880 # pairsSelArray = numpy.array(pairsSelected)
1865 1881 # pairs = []
1866 1882 #
1867 1883 # #Wind estimation pairs obtained
1868 1884 # for i in range(pairsSelArray.shape[0]/2):
1869 1885 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1870 1886 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1871 1887 # pairs.append((ind1,ind2))
1872 1888
1873 1889 indtau = tau.shape[0]/2
1874 1890 tau1 = tau[:indtau,:]
1875 1891 tau2 = tau[indtau:-1,:]
1876 1892 # tau1 = tau1[pairs,:]
1877 1893 # tau2 = tau2[pairs,:]
1878 1894 phase1 = tau[-1,:]
1879 1895
1880 1896 #---------------------------------------------------------------------
1881 1897 #Metodo Directo
1882 1898 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1883 1899 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1884 1900 winds = stats.nanmean(winds, axis=0)
1885 1901 #---------------------------------------------------------------------
1886 1902 #Metodo General
1887 1903 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1888 1904 # #Calculo Coeficientes de Funcion de Correlacion
1889 1905 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1890 1906 # #Calculo de Velocidades
1891 1907 # winds = self.calculateVelUV(F,G,A,B,H)
1892 1908
1893 1909 #---------------------------------------------------------------------
1894 1910 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1895 1911 winds = correctFactor*winds
1896 1912 return winds
1897 1913
1898 1914 def __checkTime(self, currentTime, paramInterval, outputInterval):
1899 1915
1900 1916 dataTime = currentTime + paramInterval
1901 1917 deltaTime = dataTime - self.__initime
1902 1918
1903 1919 if deltaTime >= outputInterval or deltaTime < 0:
1904 1920 self.__dataReady = True
1905 1921 return
1906 1922
1907 1923 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1908 1924 '''
1909 1925 Function that implements winds estimation technique with detected meteors.
1910 1926
1911 1927 Input: Detected meteors, Minimum meteor quantity to wind estimation
1912 1928
1913 1929 Output: Winds estimation (Zonal and Meridional)
1914 1930
1915 1931 Parameters affected: Winds
1916 1932 '''
1917 1933 #Settings
1918 1934 nInt = (heightMax - heightMin)/2
1919 1935 nInt = int(nInt)
1920 1936 winds = numpy.zeros((2,nInt))*numpy.nan
1921 1937
1922 1938 #Filter errors
1923 1939 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1924 1940 finalMeteor = arrayMeteor[error,:]
1925 1941
1926 1942 #Meteor Histogram
1927 1943 finalHeights = finalMeteor[:,2]
1928 1944 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1929 1945 nMeteorsPerI = hist[0]
1930 1946 heightPerI = hist[1]
1931 1947
1932 1948 #Sort of meteors
1933 1949 indSort = finalHeights.argsort()
1934 1950 finalMeteor2 = finalMeteor[indSort,:]
1935 1951
1936 1952 # Calculating winds
1937 1953 ind1 = 0
1938 1954 ind2 = 0
1939 1955
1940 1956 for i in range(nInt):
1941 1957 nMet = nMeteorsPerI[i]
1942 1958 ind1 = ind2
1943 1959 ind2 = ind1 + nMet
1944 1960
1945 1961 meteorAux = finalMeteor2[ind1:ind2,:]
1946 1962
1947 1963 if meteorAux.shape[0] >= meteorThresh:
1948 1964 vel = meteorAux[:, 6]
1949 1965 zen = meteorAux[:, 4]*numpy.pi/180
1950 1966 azim = meteorAux[:, 3]*numpy.pi/180
1951 1967
1952 1968 n = numpy.cos(zen)
1953 1969 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1954 1970 # l = m*numpy.tan(azim)
1955 1971 l = numpy.sin(zen)*numpy.sin(azim)
1956 1972 m = numpy.sin(zen)*numpy.cos(azim)
1957 1973
1958 1974 A = numpy.vstack((l, m)).transpose()
1959 1975 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1960 1976 windsAux = numpy.dot(A1, vel)
1961 1977
1962 1978 winds[0,i] = windsAux[0]
1963 1979 winds[1,i] = windsAux[1]
1964 1980
1965 1981 return winds, heightPerI[:-1]
1966 1982
1967 1983 def techniqueNSM_SA(self, **kwargs):
1968 1984 metArray = kwargs['metArray']
1969 1985 heightList = kwargs['heightList']
1970 1986 timeList = kwargs['timeList']
1971 1987
1972 1988 rx_location = kwargs['rx_location']
1973 1989 groupList = kwargs['groupList']
1974 1990 azimuth = kwargs['azimuth']
1975 1991 dfactor = kwargs['dfactor']
1976 1992 k = kwargs['k']
1977 1993
1978 1994 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
1979 1995 d = dist*dfactor
1980 1996 #Phase calculation
1981 1997 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
1982 1998
1983 1999 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
1984 2000
1985 2001 velEst = numpy.zeros((heightList.size,2))*numpy.nan
1986 2002 azimuth1 = azimuth1*numpy.pi/180
1987 2003
1988 2004 for i in range(heightList.size):
1989 2005 h = heightList[i]
1990 2006 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
1991 2007 metHeight = metArray1[indH,:]
1992 2008 if metHeight.shape[0] >= 2:
1993 2009 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
1994 2010 iazim = metHeight[:,1].astype(int)
1995 2011 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
1996 2012 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
1997 2013 A = numpy.asmatrix(A)
1998 2014 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
1999 2015 velHor = numpy.dot(A1,velAux)
2000 2016
2001 2017 velEst[i,:] = numpy.squeeze(velHor)
2002 2018 return velEst
2003 2019
2004 2020 def __getPhaseSlope(self, metArray, heightList, timeList):
2005 2021 meteorList = []
2006 2022 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2007 2023 #Putting back together the meteor matrix
2008 2024 utctime = metArray[:,0]
2009 2025 uniqueTime = numpy.unique(utctime)
2010 2026
2011 2027 phaseDerThresh = 0.5
2012 2028 ippSeconds = timeList[1] - timeList[0]
2013 2029 sec = numpy.where(timeList>1)[0][0]
2014 2030 nPairs = metArray.shape[1] - 6
2015 2031 nHeights = len(heightList)
2016 2032
2017 2033 for t in uniqueTime:
2018 2034 metArray1 = metArray[utctime==t,:]
2019 2035 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2020 2036 tmet = metArray1[:,1].astype(int)
2021 2037 hmet = metArray1[:,2].astype(int)
2022 2038
2023 2039 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2024 2040 metPhase[:,:] = numpy.nan
2025 2041 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2026 2042
2027 2043 #Delete short trails
2028 2044 metBool = ~numpy.isnan(metPhase[0,:,:])
2029 2045 heightVect = numpy.sum(metBool, axis = 1)
2030 2046 metBool[heightVect<sec,:] = False
2031 2047 metPhase[:,heightVect<sec,:] = numpy.nan
2032 2048
2033 2049 #Derivative
2034 2050 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2035 2051 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2036 2052 metPhase[phDerAux] = numpy.nan
2037 2053
2038 2054 #--------------------------METEOR DETECTION -----------------------------------------
2039 2055 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2040 2056
2041 2057 for p in numpy.arange(nPairs):
2042 2058 phase = metPhase[p,:,:]
2043 2059 phDer = metDer[p,:,:]
2044 2060
2045 2061 for h in indMet:
2046 2062 height = heightList[h]
2047 2063 phase1 = phase[h,:] #82
2048 2064 phDer1 = phDer[h,:]
2049 2065
2050 2066 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2051 2067
2052 2068 indValid = numpy.where(~numpy.isnan(phase1))[0]
2053 2069 initMet = indValid[0]
2054 2070 endMet = 0
2055 2071
2056 2072 for i in range(len(indValid)-1):
2057 2073
2058 2074 #Time difference
2059 2075 inow = indValid[i]
2060 2076 inext = indValid[i+1]
2061 2077 idiff = inext - inow
2062 2078 #Phase difference
2063 2079 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2064 2080
2065 2081 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2066 2082 sizeTrail = inow - initMet + 1
2067 2083 if sizeTrail>3*sec: #Too short meteors
2068 2084 x = numpy.arange(initMet,inow+1)*ippSeconds
2069 2085 y = phase1[initMet:inow+1]
2070 2086 ynnan = ~numpy.isnan(y)
2071 2087 x = x[ynnan]
2072 2088 y = y[ynnan]
2073 2089 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2074 2090 ylin = x*slope + intercept
2075 2091 rsq = r_value**2
2076 2092 if rsq > 0.5:
2077 2093 vel = slope#*height*1000/(k*d)
2078 2094 estAux = numpy.array([utctime,p,height, vel, rsq])
2079 2095 meteorList.append(estAux)
2080 2096 initMet = inext
2081 2097 metArray2 = numpy.array(meteorList)
2082 2098
2083 2099 return metArray2
2084 2100
2085 2101 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2086 2102
2087 2103 azimuth1 = numpy.zeros(len(pairslist))
2088 2104 dist = numpy.zeros(len(pairslist))
2089 2105
2090 2106 for i in range(len(rx_location)):
2091 2107 ch0 = pairslist[i][0]
2092 2108 ch1 = pairslist[i][1]
2093 2109
2094 2110 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2095 2111 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2096 2112 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2097 2113 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2098 2114
2099 2115 azimuth1 -= azimuth0
2100 2116 return azimuth1, dist
2101 2117
2102 2118 def techniqueNSM_DBS(self, **kwargs):
2103 2119 metArray = kwargs['metArray']
2104 2120 heightList = kwargs['heightList']
2105 2121 timeList = kwargs['timeList']
2106 2122 azimuth = kwargs['azimuth']
2107 2123 theta_x = numpy.array(kwargs['theta_x'])
2108 2124 theta_y = numpy.array(kwargs['theta_y'])
2109 2125
2110 2126 utctime = metArray[:,0]
2111 2127 cmet = metArray[:,1].astype(int)
2112 2128 hmet = metArray[:,3].astype(int)
2113 2129 SNRmet = metArray[:,4]
2114 2130 vmet = metArray[:,5]
2115 2131 spcmet = metArray[:,6]
2116 2132
2117 2133 nChan = numpy.max(cmet) + 1
2118 2134 nHeights = len(heightList)
2119 2135
2120 2136 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2121 2137 hmet = heightList[hmet]
2122 2138 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2123 2139
2124 2140 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2125 2141
2126 2142 for i in range(nHeights - 1):
2127 2143 hmin = heightList[i]
2128 2144 hmax = heightList[i + 1]
2129 2145
2130 2146 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2131 2147 indthisH = numpy.where(thisH)
2132 2148
2133 2149 if numpy.size(indthisH) > 3:
2134 2150
2135 2151 vel_aux = vmet[thisH]
2136 2152 chan_aux = cmet[thisH]
2137 2153 cosu_aux = dir_cosu[chan_aux]
2138 2154 cosv_aux = dir_cosv[chan_aux]
2139 2155 cosw_aux = dir_cosw[chan_aux]
2140 2156
2141 2157 nch = numpy.size(numpy.unique(chan_aux))
2142 2158 if nch > 1:
2143 2159 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2144 2160 velEst[i,:] = numpy.dot(A,vel_aux)
2145 2161
2146 2162 return velEst
2147 2163
2148 2164 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2149 2165
2150 2166 param = dataOut.data_param
2151 2167 if dataOut.abscissaList != None:
2152 2168 absc = dataOut.abscissaList[:-1]
2153 2169 # noise = dataOut.noise
2154 2170 heightList = dataOut.heightList
2155 2171 SNR = dataOut.data_snr
2156 2172
2157 2173 if technique == 'DBS':
2158 2174
2159 2175 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2160 2176 kwargs['heightList'] = heightList
2161 2177 kwargs['SNR'] = SNR
2162 2178
2163 2179 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
2164 2180 dataOut.utctimeInit = dataOut.utctime
2165 2181 dataOut.outputInterval = dataOut.paramInterval
2166 2182
2167 2183 elif technique == 'SA':
2168 2184
2169 2185 #Parameters
2170 2186 # position_x = kwargs['positionX']
2171 2187 # position_y = kwargs['positionY']
2172 2188 # azimuth = kwargs['azimuth']
2173 2189 #
2174 2190 # if kwargs.has_key('crosspairsList'):
2175 2191 # pairs = kwargs['crosspairsList']
2176 2192 # else:
2177 2193 # pairs = None
2178 2194 #
2179 2195 # if kwargs.has_key('correctFactor'):
2180 2196 # correctFactor = kwargs['correctFactor']
2181 2197 # else:
2182 2198 # correctFactor = 1
2183 2199
2184 2200 # tau = dataOut.data_param
2185 2201 # _lambda = dataOut.C/dataOut.frequency
2186 2202 # pairsList = dataOut.groupList
2187 2203 # nChannels = dataOut.nChannels
2188 2204
2189 2205 kwargs['groupList'] = dataOut.groupList
2190 2206 kwargs['tau'] = dataOut.data_param
2191 2207 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2192 2208 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2193 2209 dataOut.data_output = self.techniqueSA(kwargs)
2194 2210 dataOut.utctimeInit = dataOut.utctime
2195 2211 dataOut.outputInterval = dataOut.timeInterval
2196 2212
2197 2213 elif technique == 'Meteors':
2198 2214 dataOut.flagNoData = True
2199 2215 self.__dataReady = False
2200 2216
2201 2217 if 'nHours' in kwargs:
2202 2218 nHours = kwargs['nHours']
2203 2219 else:
2204 2220 nHours = 1
2205 2221
2206 2222 if 'meteorsPerBin' in kwargs:
2207 2223 meteorThresh = kwargs['meteorsPerBin']
2208 2224 else:
2209 2225 meteorThresh = 6
2210 2226
2211 2227 if 'hmin' in kwargs:
2212 2228 hmin = kwargs['hmin']
2213 2229 else: hmin = 70
2214 2230 if 'hmax' in kwargs:
2215 2231 hmax = kwargs['hmax']
2216 2232 else: hmax = 110
2217 2233
2218 2234 dataOut.outputInterval = nHours*3600
2219 2235
2220 2236 if self.__isConfig == False:
2221 2237 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2222 2238 #Get Initial LTC time
2223 2239 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2224 2240 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2225 2241
2226 2242 self.__isConfig = True
2227 2243
2228 2244 if self.__buffer is None:
2229 2245 self.__buffer = dataOut.data_param
2230 2246 self.__firstdata = copy.copy(dataOut)
2231 2247
2232 2248 else:
2233 2249 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2234 2250
2235 2251 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2236 2252
2237 2253 if self.__dataReady:
2238 2254 dataOut.utctimeInit = self.__initime
2239 2255
2240 2256 self.__initime += dataOut.outputInterval #to erase time offset
2241 2257
2242 2258 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2243 2259 dataOut.flagNoData = False
2244 2260 self.__buffer = None
2245 2261
2246 2262 elif technique == 'Meteors1':
2247 2263 dataOut.flagNoData = True
2248 2264 self.__dataReady = False
2249 2265
2250 2266 if 'nMins' in kwargs:
2251 2267 nMins = kwargs['nMins']
2252 2268 else: nMins = 20
2253 2269 if 'rx_location' in kwargs:
2254 2270 rx_location = kwargs['rx_location']
2255 2271 else: rx_location = [(0,1),(1,1),(1,0)]
2256 2272 if 'azimuth' in kwargs:
2257 2273 azimuth = kwargs['azimuth']
2258 2274 else: azimuth = 51.06
2259 2275 if 'dfactor' in kwargs:
2260 2276 dfactor = kwargs['dfactor']
2261 2277 if 'mode' in kwargs:
2262 2278 mode = kwargs['mode']
2263 2279 if 'theta_x' in kwargs:
2264 2280 theta_x = kwargs['theta_x']
2265 2281 if 'theta_y' in kwargs:
2266 2282 theta_y = kwargs['theta_y']
2267 2283 else: mode = 'SA'
2268 2284
2269 2285 #Borrar luego esto
2270 2286 if dataOut.groupList is None:
2271 2287 dataOut.groupList = [(0,1),(0,2),(1,2)]
2272 2288 groupList = dataOut.groupList
2273 2289 C = 3e8
2274 2290 freq = 50e6
2275 2291 lamb = C/freq
2276 2292 k = 2*numpy.pi/lamb
2277 2293
2278 2294 timeList = dataOut.abscissaList
2279 2295 heightList = dataOut.heightList
2280 2296
2281 2297 if self.__isConfig == False:
2282 2298 dataOut.outputInterval = nMins*60
2283 2299 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2284 2300 #Get Initial LTC time
2285 2301 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2286 2302 minuteAux = initime.minute
2287 2303 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2288 2304 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2289 2305
2290 2306 self.__isConfig = True
2291 2307
2292 2308 if self.__buffer is None:
2293 2309 self.__buffer = dataOut.data_param
2294 2310 self.__firstdata = copy.copy(dataOut)
2295 2311
2296 2312 else:
2297 2313 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2298 2314
2299 2315 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2300 2316
2301 2317 if self.__dataReady:
2302 2318 dataOut.utctimeInit = self.__initime
2303 2319 self.__initime += dataOut.outputInterval #to erase time offset
2304 2320
2305 2321 metArray = self.__buffer
2306 2322 if mode == 'SA':
2307 2323 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2308 2324 elif mode == 'DBS':
2309 2325 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2310 2326 dataOut.data_output = dataOut.data_output.T
2311 2327 dataOut.flagNoData = False
2312 2328 self.__buffer = None
2313 2329
2314 2330 return
2315 2331
2316 2332 class EWDriftsEstimation(Operation):
2317 2333
2318 2334 def __init__(self):
2319 2335 Operation.__init__(self)
2320 2336
2321 2337 def __correctValues(self, heiRang, phi, velRadial, SNR):
2322 2338 listPhi = phi.tolist()
2323 2339 maxid = listPhi.index(max(listPhi))
2324 2340 minid = listPhi.index(min(listPhi))
2325 2341
2326 2342 rango = list(range(len(phi)))
2327 2343 # rango = numpy.delete(rango,maxid)
2328 2344
2329 2345 heiRang1 = heiRang*math.cos(phi[maxid])
2330 2346 heiRangAux = heiRang*math.cos(phi[minid])
2331 2347 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2332 2348 heiRang1 = numpy.delete(heiRang1,indOut)
2333 2349
2334 2350 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2335 2351 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2336 2352
2337 2353 for i in rango:
2338 2354 x = heiRang*math.cos(phi[i])
2339 2355 y1 = velRadial[i,:]
2340 2356 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2341 2357
2342 2358 x1 = heiRang1
2343 2359 y11 = f1(x1)
2344 2360
2345 2361 y2 = SNR[i,:]
2346 2362 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2347 2363 y21 = f2(x1)
2348 2364
2349 2365 velRadial1[i,:] = y11
2350 2366 SNR1[i,:] = y21
2351 2367
2352 2368 return heiRang1, velRadial1, SNR1
2353 2369
2354 2370 def run(self, dataOut, zenith, zenithCorrection):
2355 2371 heiRang = dataOut.heightList
2356 2372 velRadial = dataOut.data_param[:,3,:]
2357 2373 SNR = dataOut.data_snr
2358 2374
2359 2375 zenith = numpy.array(zenith)
2360 2376 zenith -= zenithCorrection
2361 2377 zenith *= numpy.pi/180
2362 2378
2363 2379 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2364 2380
2365 2381 alp = zenith[0]
2366 2382 bet = zenith[1]
2367 2383
2368 2384 w_w = velRadial1[0,:]
2369 2385 w_e = velRadial1[1,:]
2370 2386
2371 2387 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2372 2388 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2373 2389
2374 2390 winds = numpy.vstack((u,w))
2375 2391
2376 2392 dataOut.heightList = heiRang1
2377 2393 dataOut.data_output = winds
2378 2394 dataOut.data_snr = SNR1
2379 2395
2380 2396 dataOut.utctimeInit = dataOut.utctime
2381 2397 dataOut.outputInterval = dataOut.timeInterval
2382 2398 return
2383 2399
2384 2400 #--------------- Non Specular Meteor ----------------
2385 2401
2386 2402 class NonSpecularMeteorDetection(Operation):
2387 2403
2388 2404 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2389 2405 data_acf = dataOut.data_pre[0]
2390 2406 data_ccf = dataOut.data_pre[1]
2391 2407 pairsList = dataOut.groupList[1]
2392 2408
2393 2409 lamb = dataOut.C/dataOut.frequency
2394 2410 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2395 2411 paramInterval = dataOut.paramInterval
2396 2412
2397 2413 nChannels = data_acf.shape[0]
2398 2414 nLags = data_acf.shape[1]
2399 2415 nProfiles = data_acf.shape[2]
2400 2416 nHeights = dataOut.nHeights
2401 2417 nCohInt = dataOut.nCohInt
2402 2418 sec = numpy.round(nProfiles/dataOut.paramInterval)
2403 2419 heightList = dataOut.heightList
2404 2420 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2405 2421 utctime = dataOut.utctime
2406 2422
2407 2423 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2408 2424
2409 2425 #------------------------ SNR --------------------------------------
2410 2426 power = data_acf[:,0,:,:].real
2411 2427 noise = numpy.zeros(nChannels)
2412 2428 SNR = numpy.zeros(power.shape)
2413 2429 for i in range(nChannels):
2414 2430 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2415 2431 SNR[i] = (power[i]-noise[i])/noise[i]
2416 2432 SNRm = numpy.nanmean(SNR, axis = 0)
2417 2433 SNRdB = 10*numpy.log10(SNR)
2418 2434
2419 2435 if mode == 'SA':
2420 2436 dataOut.groupList = dataOut.groupList[1]
2421 2437 nPairs = data_ccf.shape[0]
2422 2438 #---------------------- Coherence and Phase --------------------------
2423 2439 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2424 2440 # phase1 = numpy.copy(phase)
2425 2441 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2426 2442
2427 2443 for p in range(nPairs):
2428 2444 ch0 = pairsList[p][0]
2429 2445 ch1 = pairsList[p][1]
2430 2446 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2431 2447 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2432 2448 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2433 2449 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2434 2450 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2435 2451 coh = numpy.nanmax(coh1, axis = 0)
2436 2452 # struc = numpy.ones((5,1))
2437 2453 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2438 2454 #---------------------- Radial Velocity ----------------------------
2439 2455 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2440 2456 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2441 2457
2442 2458 if allData:
2443 2459 boolMetFin = ~numpy.isnan(SNRm)
2444 2460 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2445 2461 else:
2446 2462 #------------------------ Meteor mask ---------------------------------
2447 2463 # #SNR mask
2448 2464 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2449 2465 #
2450 2466 # #Erase small objects
2451 2467 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2452 2468 #
2453 2469 # auxEEJ = numpy.sum(boolMet1,axis=0)
2454 2470 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2455 2471 # indEEJ = numpy.where(indOver)[0]
2456 2472 # indNEEJ = numpy.where(~indOver)[0]
2457 2473 #
2458 2474 # boolMetFin = boolMet1
2459 2475 #
2460 2476 # if indEEJ.size > 0:
2461 2477 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2462 2478 #
2463 2479 # boolMet2 = coh > cohThresh
2464 2480 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2465 2481 #
2466 2482 # #Final Meteor mask
2467 2483 # boolMetFin = boolMet1|boolMet2
2468 2484
2469 2485 #Coherence mask
2470 2486 boolMet1 = coh > 0.75
2471 2487 struc = numpy.ones((30,1))
2472 2488 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2473 2489
2474 2490 #Derivative mask
2475 2491 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2476 2492 boolMet2 = derPhase < 0.2
2477 2493 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2478 2494 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2479 2495 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2480 2496 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2481 2497 # #Final mask
2482 2498 # boolMetFin = boolMet2
2483 2499 boolMetFin = boolMet1&boolMet2
2484 2500 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2485 2501 #Creating data_param
2486 2502 coordMet = numpy.where(boolMetFin)
2487 2503
2488 2504 tmet = coordMet[0]
2489 2505 hmet = coordMet[1]
2490 2506
2491 2507 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2492 2508 data_param[:,0] = utctime
2493 2509 data_param[:,1] = tmet
2494 2510 data_param[:,2] = hmet
2495 2511 data_param[:,3] = SNRm[tmet,hmet]
2496 2512 data_param[:,4] = velRad[tmet,hmet]
2497 2513 data_param[:,5] = coh[tmet,hmet]
2498 2514 data_param[:,6:] = phase[:,tmet,hmet].T
2499 2515
2500 2516 elif mode == 'DBS':
2501 2517 dataOut.groupList = numpy.arange(nChannels)
2502 2518
2503 2519 #Radial Velocities
2504 2520 phase = numpy.angle(data_acf[:,1,:,:])
2505 2521 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2506 2522 velRad = phase*lamb/(4*numpy.pi*tSamp)
2507 2523
2508 2524 #Spectral width
2509 2525 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2510 2526 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2511 2527 acf1 = data_acf[:,1,:,:]
2512 2528 acf2 = data_acf[:,2,:,:]
2513 2529
2514 2530 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2515 2531 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2516 2532 if allData:
2517 2533 boolMetFin = ~numpy.isnan(SNRdB)
2518 2534 else:
2519 2535 #SNR
2520 2536 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2521 2537 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2522 2538
2523 2539 #Radial velocity
2524 2540 boolMet2 = numpy.abs(velRad) < 20
2525 2541 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2526 2542
2527 2543 #Spectral Width
2528 2544 boolMet3 = spcWidth < 30
2529 2545 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2530 2546 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2531 2547 boolMetFin = boolMet1&boolMet2&boolMet3
2532 2548
2533 2549 #Creating data_param
2534 2550 coordMet = numpy.where(boolMetFin)
2535 2551
2536 2552 cmet = coordMet[0]
2537 2553 tmet = coordMet[1]
2538 2554 hmet = coordMet[2]
2539 2555
2540 2556 data_param = numpy.zeros((tmet.size, 7))
2541 2557 data_param[:,0] = utctime
2542 2558 data_param[:,1] = cmet
2543 2559 data_param[:,2] = tmet
2544 2560 data_param[:,3] = hmet
2545 2561 data_param[:,4] = SNR[cmet,tmet,hmet].T
2546 2562 data_param[:,5] = velRad[cmet,tmet,hmet].T
2547 2563 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2548 2564
2549 2565 # self.dataOut.data_param = data_int
2550 2566 if len(data_param) == 0:
2551 2567 dataOut.flagNoData = True
2552 2568 else:
2553 2569 dataOut.data_param = data_param
2554 2570
2555 2571 def __erase_small(self, binArray, threshX, threshY):
2556 2572 labarray, numfeat = ndimage.measurements.label(binArray)
2557 2573 binArray1 = numpy.copy(binArray)
2558 2574
2559 2575 for i in range(1,numfeat + 1):
2560 2576 auxBin = (labarray==i)
2561 2577 auxSize = auxBin.sum()
2562 2578
2563 2579 x,y = numpy.where(auxBin)
2564 2580 widthX = x.max() - x.min()
2565 2581 widthY = y.max() - y.min()
2566 2582
2567 2583 #width X: 3 seg -> 12.5*3
2568 2584 #width Y:
2569 2585
2570 2586 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2571 2587 binArray1[auxBin] = False
2572 2588
2573 2589 return binArray1
2574 2590
2575 2591 #--------------- Specular Meteor ----------------
2576 2592
2577 2593 class SMDetection(Operation):
2578 2594 '''
2579 2595 Function DetectMeteors()
2580 2596 Project developed with paper:
2581 2597 HOLDSWORTH ET AL. 2004
2582 2598
2583 2599 Input:
2584 2600 self.dataOut.data_pre
2585 2601
2586 2602 centerReceiverIndex: From the channels, which is the center receiver
2587 2603
2588 2604 hei_ref: Height reference for the Beacon signal extraction
2589 2605 tauindex:
2590 2606 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2591 2607
2592 2608 cohDetection: Whether to user Coherent detection or not
2593 2609 cohDet_timeStep: Coherent Detection calculation time step
2594 2610 cohDet_thresh: Coherent Detection phase threshold to correct phases
2595 2611
2596 2612 noise_timeStep: Noise calculation time step
2597 2613 noise_multiple: Noise multiple to define signal threshold
2598 2614
2599 2615 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2600 2616 multDet_rangeLimit: Multiple Detection Removal range limit in km
2601 2617
2602 2618 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2603 2619 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2604 2620
2605 2621 hmin: Minimum Height of the meteor to use it in the further wind estimations
2606 2622 hmax: Maximum Height of the meteor to use it in the further wind estimations
2607 2623 azimuth: Azimuth angle correction
2608 2624
2609 2625 Affected:
2610 2626 self.dataOut.data_param
2611 2627
2612 2628 Rejection Criteria (Errors):
2613 2629 0: No error; analysis OK
2614 2630 1: SNR < SNR threshold
2615 2631 2: angle of arrival (AOA) ambiguously determined
2616 2632 3: AOA estimate not feasible
2617 2633 4: Large difference in AOAs obtained from different antenna baselines
2618 2634 5: echo at start or end of time series
2619 2635 6: echo less than 5 examples long; too short for analysis
2620 2636 7: echo rise exceeds 0.3s
2621 2637 8: echo decay time less than twice rise time
2622 2638 9: large power level before echo
2623 2639 10: large power level after echo
2624 2640 11: poor fit to amplitude for estimation of decay time
2625 2641 12: poor fit to CCF phase variation for estimation of radial drift velocity
2626 2642 13: height unresolvable echo: not valid height within 70 to 110 km
2627 2643 14: height ambiguous echo: more then one possible height within 70 to 110 km
2628 2644 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2629 2645 16: oscilatory echo, indicating event most likely not an underdense echo
2630 2646
2631 2647 17: phase difference in meteor Reestimation
2632 2648
2633 2649 Data Storage:
2634 2650 Meteors for Wind Estimation (8):
2635 2651 Utc Time | Range Height
2636 2652 Azimuth Zenith errorCosDir
2637 2653 VelRad errorVelRad
2638 2654 Phase0 Phase1 Phase2 Phase3
2639 2655 TypeError
2640 2656
2641 2657 '''
2642 2658
2643 2659 def run(self, dataOut, hei_ref = None, tauindex = 0,
2644 2660 phaseOffsets = None,
2645 2661 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2646 2662 noise_timeStep = 4, noise_multiple = 4,
2647 2663 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2648 2664 phaseThresh = 20, SNRThresh = 5,
2649 2665 hmin = 50, hmax=150, azimuth = 0,
2650 2666 channelPositions = None) :
2651 2667
2652 2668
2653 2669 #Getting Pairslist
2654 2670 if channelPositions is None:
2655 2671 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2656 2672 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2657 2673 meteorOps = SMOperations()
2658 2674 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2659 2675 heiRang = dataOut.heightList
2660 2676 #Get Beacon signal - No Beacon signal anymore
2661 2677 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2662 2678 #
2663 2679 # if hei_ref != None:
2664 2680 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2665 2681 #
2666 2682
2667 2683
2668 2684 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2669 2685 # see if the user put in pre defined phase shifts
2670 2686 voltsPShift = dataOut.data_pre.copy()
2671 2687
2672 2688 # if predefinedPhaseShifts != None:
2673 2689 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2674 2690 #
2675 2691 # # elif beaconPhaseShifts:
2676 2692 # # #get hardware phase shifts using beacon signal
2677 2693 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2678 2694 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2679 2695 #
2680 2696 # else:
2681 2697 # hardwarePhaseShifts = numpy.zeros(5)
2682 2698 #
2683 2699 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2684 2700 # for i in range(self.dataOut.data_pre.shape[0]):
2685 2701 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2686 2702
2687 2703 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2688 2704
2689 2705 #Remove DC
2690 2706 voltsDC = numpy.mean(voltsPShift,1)
2691 2707 voltsDC = numpy.mean(voltsDC,1)
2692 2708 for i in range(voltsDC.shape[0]):
2693 2709 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2694 2710
2695 2711 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2696 2712 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2697 2713
2698 2714 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2699 2715 #Coherent Detection
2700 2716 if cohDetection:
2701 2717 #use coherent detection to get the net power
2702 2718 cohDet_thresh = cohDet_thresh*numpy.pi/180
2703 2719 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2704 2720
2705 2721 #Non-coherent detection!
2706 2722 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2707 2723 #********** END OF COH/NON-COH POWER CALCULATION**********************
2708 2724
2709 2725 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2710 2726 #Get noise
2711 2727 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2712 2728 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2713 2729 #Get signal threshold
2714 2730 signalThresh = noise_multiple*noise
2715 2731 #Meteor echoes detection
2716 2732 listMeteors = self.__findMeteors(powerNet, signalThresh)
2717 2733 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2718 2734
2719 2735 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2720 2736 #Parameters
2721 2737 heiRange = dataOut.heightList
2722 2738 rangeInterval = heiRange[1] - heiRange[0]
2723 2739 rangeLimit = multDet_rangeLimit/rangeInterval
2724 2740 timeLimit = multDet_timeLimit/dataOut.timeInterval
2725 2741 #Multiple detection removals
2726 2742 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2727 2743 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2728 2744
2729 2745 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2730 2746 #Parameters
2731 2747 phaseThresh = phaseThresh*numpy.pi/180
2732 2748 thresh = [phaseThresh, noise_multiple, SNRThresh]
2733 2749 #Meteor reestimation (Errors N 1, 6, 12, 17)
2734 2750 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2735 2751 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2736 2752 #Estimation of decay times (Errors N 7, 8, 11)
2737 2753 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2738 2754 #******************* END OF METEOR REESTIMATION *******************
2739 2755
2740 2756 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2741 2757 #Calculating Radial Velocity (Error N 15)
2742 2758 radialStdThresh = 10
2743 2759 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2744 2760
2745 2761 if len(listMeteors4) > 0:
2746 2762 #Setting New Array
2747 2763 date = dataOut.utctime
2748 2764 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2749 2765
2750 2766 #Correcting phase offset
2751 2767 if phaseOffsets != None:
2752 2768 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2753 2769 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2754 2770
2755 2771 #Second Pairslist
2756 2772 pairsList = []
2757 2773 pairx = (0,1)
2758 2774 pairy = (2,3)
2759 2775 pairsList.append(pairx)
2760 2776 pairsList.append(pairy)
2761 2777
2762 2778 jph = numpy.array([0,0,0,0])
2763 2779 h = (hmin,hmax)
2764 2780 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2765 2781
2766 2782 # #Calculate AOA (Error N 3, 4)
2767 2783 # #JONES ET AL. 1998
2768 2784 # error = arrayParameters[:,-1]
2769 2785 # AOAthresh = numpy.pi/8
2770 2786 # phases = -arrayParameters[:,9:13]
2771 2787 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2772 2788 #
2773 2789 # #Calculate Heights (Error N 13 and 14)
2774 2790 # error = arrayParameters[:,-1]
2775 2791 # Ranges = arrayParameters[:,2]
2776 2792 # zenith = arrayParameters[:,5]
2777 2793 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2778 2794 # error = arrayParameters[:,-1]
2779 2795 #********************* END OF PARAMETERS CALCULATION **************************
2780 2796
2781 2797 #***************************+ PASS DATA TO NEXT STEP **********************
2782 2798 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2783 2799 dataOut.data_param = arrayParameters
2784 2800
2785 2801 if arrayParameters is None:
2786 2802 dataOut.flagNoData = True
2787 2803 else:
2788 2804 dataOut.flagNoData = True
2789 2805
2790 2806 return
2791 2807
2792 2808 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2793 2809
2794 2810 minIndex = min(newheis[0])
2795 2811 maxIndex = max(newheis[0])
2796 2812
2797 2813 voltage = voltage0[:,:,minIndex:maxIndex+1]
2798 2814 nLength = voltage.shape[1]/n
2799 2815 nMin = 0
2800 2816 nMax = 0
2801 2817 phaseOffset = numpy.zeros((len(pairslist),n))
2802 2818
2803 2819 for i in range(n):
2804 2820 nMax += nLength
2805 2821 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2806 2822 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2807 2823 phaseOffset[:,i] = phaseCCF.transpose()
2808 2824 nMin = nMax
2809 2825 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2810 2826
2811 2827 #Remove Outliers
2812 2828 factor = 2
2813 2829 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2814 2830 dw = numpy.std(wt,axis = 1)
2815 2831 dw = dw.reshape((dw.size,1))
2816 2832 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2817 2833 phaseOffset[ind] = numpy.nan
2818 2834 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2819 2835
2820 2836 return phaseOffset
2821 2837
2822 2838 def __shiftPhase(self, data, phaseShift):
2823 2839 #this will shift the phase of a complex number
2824 2840 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2825 2841 return dataShifted
2826 2842
2827 2843 def __estimatePhaseDifference(self, array, pairslist):
2828 2844 nChannel = array.shape[0]
2829 2845 nHeights = array.shape[2]
2830 2846 numPairs = len(pairslist)
2831 2847 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2832 2848 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2833 2849
2834 2850 #Correct phases
2835 2851 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2836 2852 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2837 2853
2838 2854 if indDer[0].shape[0] > 0:
2839 2855 for i in range(indDer[0].shape[0]):
2840 2856 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2841 2857 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2842 2858
2843 2859 # for j in range(numSides):
2844 2860 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2845 2861 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2846 2862 #
2847 2863 #Linear
2848 2864 phaseInt = numpy.zeros((numPairs,1))
2849 2865 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2850 2866 for j in range(numPairs):
2851 2867 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2852 2868 phaseInt[j] = fit[1]
2853 2869 #Phase Differences
2854 2870 phaseDiff = phaseInt - phaseCCF[:,2,:]
2855 2871 phaseArrival = phaseInt.reshape(phaseInt.size)
2856 2872
2857 2873 #Dealias
2858 2874 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2859 2875 # indAlias = numpy.where(phaseArrival > numpy.pi)
2860 2876 # phaseArrival[indAlias] -= 2*numpy.pi
2861 2877 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2862 2878 # phaseArrival[indAlias] += 2*numpy.pi
2863 2879
2864 2880 return phaseDiff, phaseArrival
2865 2881
2866 2882 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2867 2883 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2868 2884 #find the phase shifts of each channel over 1 second intervals
2869 2885 #only look at ranges below the beacon signal
2870 2886 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2871 2887 numBlocks = int(volts.shape[1]/numProfPerBlock)
2872 2888 numHeights = volts.shape[2]
2873 2889 nChannel = volts.shape[0]
2874 2890 voltsCohDet = volts.copy()
2875 2891
2876 2892 pairsarray = numpy.array(pairslist)
2877 2893 indSides = pairsarray[:,1]
2878 2894 # indSides = numpy.array(range(nChannel))
2879 2895 # indSides = numpy.delete(indSides, indCenter)
2880 2896 #
2881 2897 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2882 2898 listBlocks = numpy.array_split(volts, numBlocks, 1)
2883 2899
2884 2900 startInd = 0
2885 2901 endInd = 0
2886 2902
2887 2903 for i in range(numBlocks):
2888 2904 startInd = endInd
2889 2905 endInd = endInd + listBlocks[i].shape[1]
2890 2906
2891 2907 arrayBlock = listBlocks[i]
2892 2908 # arrayBlockCenter = listCenter[i]
2893 2909
2894 2910 #Estimate the Phase Difference
2895 2911 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2896 2912 #Phase Difference RMS
2897 2913 arrayPhaseRMS = numpy.abs(phaseDiff)
2898 2914 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2899 2915 indPhase = numpy.where(phaseRMSaux==4)
2900 2916 #Shifting
2901 2917 if indPhase[0].shape[0] > 0:
2902 2918 for j in range(indSides.size):
2903 2919 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2904 2920 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2905 2921
2906 2922 return voltsCohDet
2907 2923
2908 2924 def __calculateCCF(self, volts, pairslist ,laglist):
2909 2925
2910 2926 nHeights = volts.shape[2]
2911 2927 nPoints = volts.shape[1]
2912 2928 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2913 2929
2914 2930 for i in range(len(pairslist)):
2915 2931 volts1 = volts[pairslist[i][0]]
2916 2932 volts2 = volts[pairslist[i][1]]
2917 2933
2918 2934 for t in range(len(laglist)):
2919 2935 idxT = laglist[t]
2920 2936 if idxT >= 0:
2921 2937 vStacked = numpy.vstack((volts2[idxT:,:],
2922 2938 numpy.zeros((idxT, nHeights),dtype='complex')))
2923 2939 else:
2924 2940 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2925 2941 volts2[:(nPoints + idxT),:]))
2926 2942 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2927 2943
2928 2944 vStacked = None
2929 2945 return voltsCCF
2930 2946
2931 2947 def __getNoise(self, power, timeSegment, timeInterval):
2932 2948 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2933 2949 numBlocks = int(power.shape[0]/numProfPerBlock)
2934 2950 numHeights = power.shape[1]
2935 2951
2936 2952 listPower = numpy.array_split(power, numBlocks, 0)
2937 2953 noise = numpy.zeros((power.shape[0], power.shape[1]))
2938 2954 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2939 2955
2940 2956 startInd = 0
2941 2957 endInd = 0
2942 2958
2943 2959 for i in range(numBlocks): #split por canal
2944 2960 startInd = endInd
2945 2961 endInd = endInd + listPower[i].shape[0]
2946 2962
2947 2963 arrayBlock = listPower[i]
2948 2964 noiseAux = numpy.mean(arrayBlock, 0)
2949 2965 # noiseAux = numpy.median(noiseAux)
2950 2966 # noiseAux = numpy.mean(arrayBlock)
2951 2967 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2952 2968
2953 2969 noiseAux1 = numpy.mean(arrayBlock)
2954 2970 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2955 2971
2956 2972 return noise, noise1
2957 2973
2958 2974 def __findMeteors(self, power, thresh):
2959 2975 nProf = power.shape[0]
2960 2976 nHeights = power.shape[1]
2961 2977 listMeteors = []
2962 2978
2963 2979 for i in range(nHeights):
2964 2980 powerAux = power[:,i]
2965 2981 threshAux = thresh[:,i]
2966 2982
2967 2983 indUPthresh = numpy.where(powerAux > threshAux)[0]
2968 2984 indDNthresh = numpy.where(powerAux <= threshAux)[0]
2969 2985
2970 2986 j = 0
2971 2987
2972 2988 while (j < indUPthresh.size - 2):
2973 2989 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
2974 2990 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
2975 2991 indDNthresh = indDNthresh[indDNAux]
2976 2992
2977 2993 if (indDNthresh.size > 0):
2978 2994 indEnd = indDNthresh[0] - 1
2979 2995 indInit = indUPthresh[j]
2980 2996
2981 2997 meteor = powerAux[indInit:indEnd + 1]
2982 2998 indPeak = meteor.argmax() + indInit
2983 2999 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
2984 3000
2985 3001 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
2986 3002 j = numpy.where(indUPthresh == indEnd)[0] + 1
2987 3003 else: j+=1
2988 3004 else: j+=1
2989 3005
2990 3006 return listMeteors
2991 3007
2992 3008 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
2993 3009
2994 3010 arrayMeteors = numpy.asarray(listMeteors)
2995 3011 listMeteors1 = []
2996 3012
2997 3013 while arrayMeteors.shape[0] > 0:
2998 3014 FLAs = arrayMeteors[:,4]
2999 3015 maxFLA = FLAs.argmax()
3000 3016 listMeteors1.append(arrayMeteors[maxFLA,:])
3001 3017
3002 3018 MeteorInitTime = arrayMeteors[maxFLA,1]
3003 3019 MeteorEndTime = arrayMeteors[maxFLA,3]
3004 3020 MeteorHeight = arrayMeteors[maxFLA,0]
3005 3021
3006 3022 #Check neighborhood
3007 3023 maxHeightIndex = MeteorHeight + rangeLimit
3008 3024 minHeightIndex = MeteorHeight - rangeLimit
3009 3025 minTimeIndex = MeteorInitTime - timeLimit
3010 3026 maxTimeIndex = MeteorEndTime + timeLimit
3011 3027
3012 3028 #Check Heights
3013 3029 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3014 3030 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3015 3031 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3016 3032
3017 3033 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3018 3034
3019 3035 return listMeteors1
3020 3036
3021 3037 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3022 3038 numHeights = volts.shape[2]
3023 3039 nChannel = volts.shape[0]
3024 3040
3025 3041 thresholdPhase = thresh[0]
3026 3042 thresholdNoise = thresh[1]
3027 3043 thresholdDB = float(thresh[2])
3028 3044
3029 3045 thresholdDB1 = 10**(thresholdDB/10)
3030 3046 pairsarray = numpy.array(pairslist)
3031 3047 indSides = pairsarray[:,1]
3032 3048
3033 3049 pairslist1 = list(pairslist)
3034 3050 pairslist1.append((0,1))
3035 3051 pairslist1.append((3,4))
3036 3052
3037 3053 listMeteors1 = []
3038 3054 listPowerSeries = []
3039 3055 listVoltageSeries = []
3040 3056 #volts has the war data
3041 3057
3042 3058 if frequency == 30e6:
3043 3059 timeLag = 45*10**-3
3044 3060 else:
3045 3061 timeLag = 15*10**-3
3046 3062 lag = numpy.ceil(timeLag/timeInterval)
3047 3063
3048 3064 for i in range(len(listMeteors)):
3049 3065
3050 3066 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3051 3067 meteorAux = numpy.zeros(16)
3052 3068
3053 3069 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3054 3070 mHeight = listMeteors[i][0]
3055 3071 mStart = listMeteors[i][1]
3056 3072 mPeak = listMeteors[i][2]
3057 3073 mEnd = listMeteors[i][3]
3058 3074
3059 3075 #get the volt data between the start and end times of the meteor
3060 3076 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3061 3077 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3062 3078
3063 3079 #3.6. Phase Difference estimation
3064 3080 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3065 3081
3066 3082 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3067 3083 #meteorVolts0.- all Channels, all Profiles
3068 3084 meteorVolts0 = volts[:,:,mHeight]
3069 3085 meteorThresh = noise[:,mHeight]*thresholdNoise
3070 3086 meteorNoise = noise[:,mHeight]
3071 3087 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3072 3088 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3073 3089
3074 3090 #Times reestimation
3075 3091 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3076 3092 if mStart1.size > 0:
3077 3093 mStart1 = mStart1[-1] + 1
3078 3094
3079 3095 else:
3080 3096 mStart1 = mPeak
3081 3097
3082 3098 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3083 3099 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3084 3100 if mEndDecayTime1.size == 0:
3085 3101 mEndDecayTime1 = powerNet0.size
3086 3102 else:
3087 3103 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3088 3104 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3089 3105
3090 3106 #meteorVolts1.- all Channels, from start to end
3091 3107 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3092 3108 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3093 3109 if meteorVolts2.shape[1] == 0:
3094 3110 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3095 3111 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3096 3112 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3097 3113 ##################### END PARAMETERS REESTIMATION #########################
3098 3114
3099 3115 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3100 3116 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3101 3117 if meteorVolts2.shape[1] > 0:
3102 3118 #Phase Difference re-estimation
3103 3119 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3104 3120 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3105 3121 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3106 3122 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3107 3123 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3108 3124
3109 3125 #Phase Difference RMS
3110 3126 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3111 3127 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3112 3128 #Data from Meteor
3113 3129 mPeak1 = powerNet1.argmax() + mStart1
3114 3130 mPeakPower1 = powerNet1.max()
3115 3131 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3116 3132 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3117 3133 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3118 3134 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3119 3135 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3120 3136 #Vectorize
3121 3137 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3122 3138 meteorAux[7:11] = phaseDiffint[0:4]
3123 3139
3124 3140 #Rejection Criterions
3125 3141 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3126 3142 meteorAux[-1] = 17
3127 3143 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3128 3144 meteorAux[-1] = 1
3129 3145
3130 3146
3131 3147 else:
3132 3148 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3133 3149 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3134 3150 PowerSeries = 0
3135 3151
3136 3152 listMeteors1.append(meteorAux)
3137 3153 listPowerSeries.append(PowerSeries)
3138 3154 listVoltageSeries.append(meteorVolts1)
3139 3155
3140 3156 return listMeteors1, listPowerSeries, listVoltageSeries
3141 3157
3142 3158 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3143 3159
3144 3160 threshError = 10
3145 3161 #Depending if it is 30 or 50 MHz
3146 3162 if frequency == 30e6:
3147 3163 timeLag = 45*10**-3
3148 3164 else:
3149 3165 timeLag = 15*10**-3
3150 3166 lag = numpy.ceil(timeLag/timeInterval)
3151 3167
3152 3168 listMeteors1 = []
3153 3169
3154 3170 for i in range(len(listMeteors)):
3155 3171 meteorPower = listPower[i]
3156 3172 meteorAux = listMeteors[i]
3157 3173
3158 3174 if meteorAux[-1] == 0:
3159 3175
3160 3176 try:
3161 3177 indmax = meteorPower.argmax()
3162 3178 indlag = indmax + lag
3163 3179
3164 3180 y = meteorPower[indlag:]
3165 3181 x = numpy.arange(0, y.size)*timeLag
3166 3182
3167 3183 #first guess
3168 3184 a = y[0]
3169 3185 tau = timeLag
3170 3186 #exponential fit
3171 3187 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3172 3188 y1 = self.__exponential_function(x, *popt)
3173 3189 #error estimation
3174 3190 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3175 3191
3176 3192 decayTime = popt[1]
3177 3193 riseTime = indmax*timeInterval
3178 3194 meteorAux[11:13] = [decayTime, error]
3179 3195
3180 3196 #Table items 7, 8 and 11
3181 3197 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3182 3198 meteorAux[-1] = 7
3183 3199 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3184 3200 meteorAux[-1] = 8
3185 3201 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3186 3202 meteorAux[-1] = 11
3187 3203
3188 3204
3189 3205 except:
3190 3206 meteorAux[-1] = 11
3191 3207
3192 3208
3193 3209 listMeteors1.append(meteorAux)
3194 3210
3195 3211 return listMeteors1
3196 3212
3197 3213 #Exponential Function
3198 3214
3199 3215 def __exponential_function(self, x, a, tau):
3200 3216 y = a*numpy.exp(-x/tau)
3201 3217 return y
3202 3218
3203 3219 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3204 3220
3205 3221 pairslist1 = list(pairslist)
3206 3222 pairslist1.append((0,1))
3207 3223 pairslist1.append((3,4))
3208 3224 numPairs = len(pairslist1)
3209 3225 #Time Lag
3210 3226 timeLag = 45*10**-3
3211 3227 c = 3e8
3212 3228 lag = numpy.ceil(timeLag/timeInterval)
3213 3229 freq = 30e6
3214 3230
3215 3231 listMeteors1 = []
3216 3232
3217 3233 for i in range(len(listMeteors)):
3218 3234 meteorAux = listMeteors[i]
3219 3235 if meteorAux[-1] == 0:
3220 3236 mStart = listMeteors[i][1]
3221 3237 mPeak = listMeteors[i][2]
3222 3238 mLag = mPeak - mStart + lag
3223 3239
3224 3240 #get the volt data between the start and end times of the meteor
3225 3241 meteorVolts = listVolts[i]
3226 3242 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3227 3243
3228 3244 #Get CCF
3229 3245 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3230 3246
3231 3247 #Method 2
3232 3248 slopes = numpy.zeros(numPairs)
3233 3249 time = numpy.array([-2,-1,1,2])*timeInterval
3234 3250 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3235 3251
3236 3252 #Correct phases
3237 3253 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3238 3254 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3239 3255
3240 3256 if indDer[0].shape[0] > 0:
3241 3257 for i in range(indDer[0].shape[0]):
3242 3258 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3243 3259 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3244 3260
3245 3261 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3246 3262 for j in range(numPairs):
3247 3263 fit = stats.linregress(time, angAllCCF[j,:])
3248 3264 slopes[j] = fit[0]
3249 3265
3250 3266 #Remove Outlier
3251 3267 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3252 3268 # slopes = numpy.delete(slopes,indOut)
3253 3269 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3254 3270 # slopes = numpy.delete(slopes,indOut)
3255 3271
3256 3272 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3257 3273 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3258 3274 meteorAux[-2] = radialError
3259 3275 meteorAux[-3] = radialVelocity
3260 3276
3261 3277 #Setting Error
3262 3278 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3263 3279 if numpy.abs(radialVelocity) > 200:
3264 3280 meteorAux[-1] = 15
3265 3281 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3266 3282 elif radialError > radialStdThresh:
3267 3283 meteorAux[-1] = 12
3268 3284
3269 3285 listMeteors1.append(meteorAux)
3270 3286 return listMeteors1
3271 3287
3272 3288 def __setNewArrays(self, listMeteors, date, heiRang):
3273 3289
3274 3290 #New arrays
3275 3291 arrayMeteors = numpy.array(listMeteors)
3276 3292 arrayParameters = numpy.zeros((len(listMeteors), 13))
3277 3293
3278 3294 #Date inclusion
3279 3295 # date = re.findall(r'\((.*?)\)', date)
3280 3296 # date = date[0].split(',')
3281 3297 # date = map(int, date)
3282 3298 #
3283 3299 # if len(date)<6:
3284 3300 # date.append(0)
3285 3301 #
3286 3302 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3287 3303 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3288 3304 arrayDate = numpy.tile(date, (len(listMeteors)))
3289 3305
3290 3306 #Meteor array
3291 3307 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3292 3308 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3293 3309
3294 3310 #Parameters Array
3295 3311 arrayParameters[:,0] = arrayDate #Date
3296 3312 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3297 3313 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3298 3314 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3299 3315 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3300 3316
3301 3317
3302 3318 return arrayParameters
3303 3319
3304 3320 class CorrectSMPhases(Operation):
3305 3321
3306 3322 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3307 3323
3308 3324 arrayParameters = dataOut.data_param
3309 3325 pairsList = []
3310 3326 pairx = (0,1)
3311 3327 pairy = (2,3)
3312 3328 pairsList.append(pairx)
3313 3329 pairsList.append(pairy)
3314 3330 jph = numpy.zeros(4)
3315 3331
3316 3332 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3317 3333 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3318 3334 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3319 3335
3320 3336 meteorOps = SMOperations()
3321 3337 if channelPositions is None:
3322 3338 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3323 3339 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3324 3340
3325 3341 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3326 3342 h = (hmin,hmax)
3327 3343
3328 3344 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3329 3345
3330 3346 dataOut.data_param = arrayParameters
3331 3347 return
3332 3348
3333 3349 class SMPhaseCalibration(Operation):
3334 3350
3335 3351 __buffer = None
3336 3352
3337 3353 __initime = None
3338 3354
3339 3355 __dataReady = False
3340 3356
3341 3357 __isConfig = False
3342 3358
3343 3359 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3344 3360
3345 3361 dataTime = currentTime + paramInterval
3346 3362 deltaTime = dataTime - initTime
3347 3363
3348 3364 if deltaTime >= outputInterval or deltaTime < 0:
3349 3365 return True
3350 3366
3351 3367 return False
3352 3368
3353 3369 def __getGammas(self, pairs, d, phases):
3354 3370 gammas = numpy.zeros(2)
3355 3371
3356 3372 for i in range(len(pairs)):
3357 3373
3358 3374 pairi = pairs[i]
3359 3375
3360 3376 phip3 = phases[:,pairi[0]]
3361 3377 d3 = d[pairi[0]]
3362 3378 phip2 = phases[:,pairi[1]]
3363 3379 d2 = d[pairi[1]]
3364 3380 #Calculating gamma
3365 3381 # jdcos = alp1/(k*d1)
3366 3382 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3367 3383 jgamma = -phip2*d3/d2 - phip3
3368 3384 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3369 3385 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3370 3386 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3371 3387
3372 3388 #Revised distribution
3373 3389 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3374 3390
3375 3391 #Histogram
3376 3392 nBins = 64
3377 3393 rmin = -0.5*numpy.pi
3378 3394 rmax = 0.5*numpy.pi
3379 3395 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3380 3396
3381 3397 meteorsY = phaseHisto[0]
3382 3398 phasesX = phaseHisto[1][:-1]
3383 3399 width = phasesX[1] - phasesX[0]
3384 3400 phasesX += width/2
3385 3401
3386 3402 #Gaussian aproximation
3387 3403 bpeak = meteorsY.argmax()
3388 3404 peak = meteorsY.max()
3389 3405 jmin = bpeak - 5
3390 3406 jmax = bpeak + 5 + 1
3391 3407
3392 3408 if jmin<0:
3393 3409 jmin = 0
3394 3410 jmax = 6
3395 3411 elif jmax > meteorsY.size:
3396 3412 jmin = meteorsY.size - 6
3397 3413 jmax = meteorsY.size
3398 3414
3399 3415 x0 = numpy.array([peak,bpeak,50])
3400 3416 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3401 3417
3402 3418 #Gammas
3403 3419 gammas[i] = coeff[0][1]
3404 3420
3405 3421 return gammas
3406 3422
3407 3423 def __residualFunction(self, coeffs, y, t):
3408 3424
3409 3425 return y - self.__gauss_function(t, coeffs)
3410 3426
3411 3427 def __gauss_function(self, t, coeffs):
3412 3428
3413 3429 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3414 3430
3415 3431 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3416 3432 meteorOps = SMOperations()
3417 3433 nchan = 4
3418 3434 pairx = pairsList[0] #x es 0
3419 3435 pairy = pairsList[1] #y es 1
3420 3436 center_xangle = 0
3421 3437 center_yangle = 0
3422 3438 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3423 3439 ntimes = len(range_angle)
3424 3440
3425 3441 nstepsx = 20
3426 3442 nstepsy = 20
3427 3443
3428 3444 for iz in range(ntimes):
3429 3445 min_xangle = -range_angle[iz]/2 + center_xangle
3430 3446 max_xangle = range_angle[iz]/2 + center_xangle
3431 3447 min_yangle = -range_angle[iz]/2 + center_yangle
3432 3448 max_yangle = range_angle[iz]/2 + center_yangle
3433 3449
3434 3450 inc_x = (max_xangle-min_xangle)/nstepsx
3435 3451 inc_y = (max_yangle-min_yangle)/nstepsy
3436 3452
3437 3453 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3438 3454 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3439 3455 penalty = numpy.zeros((nstepsx,nstepsy))
3440 3456 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3441 3457 jph = numpy.zeros(nchan)
3442 3458
3443 3459 # Iterations looking for the offset
3444 3460 for iy in range(int(nstepsy)):
3445 3461 for ix in range(int(nstepsx)):
3446 3462 d3 = d[pairsList[1][0]]
3447 3463 d2 = d[pairsList[1][1]]
3448 3464 d5 = d[pairsList[0][0]]
3449 3465 d4 = d[pairsList[0][1]]
3450 3466
3451 3467 alp2 = alpha_y[iy] #gamma 1
3452 3468 alp4 = alpha_x[ix] #gamma 0
3453 3469
3454 3470 alp3 = -alp2*d3/d2 - gammas[1]
3455 3471 alp5 = -alp4*d5/d4 - gammas[0]
3456 3472 # jph[pairy[1]] = alpha_y[iy]
3457 3473 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3458 3474
3459 3475 # jph[pairx[1]] = alpha_x[ix]
3460 3476 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3461 3477 jph[pairsList[0][1]] = alp4
3462 3478 jph[pairsList[0][0]] = alp5
3463 3479 jph[pairsList[1][0]] = alp3
3464 3480 jph[pairsList[1][1]] = alp2
3465 3481 jph_array[:,ix,iy] = jph
3466 3482 # d = [2.0,2.5,2.5,2.0]
3467 3483 #falta chequear si va a leer bien los meteoros
3468 3484 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3469 3485 error = meteorsArray1[:,-1]
3470 3486 ind1 = numpy.where(error==0)[0]
3471 3487 penalty[ix,iy] = ind1.size
3472 3488
3473 3489 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3474 3490 phOffset = jph_array[:,i,j]
3475 3491
3476 3492 center_xangle = phOffset[pairx[1]]
3477 3493 center_yangle = phOffset[pairy[1]]
3478 3494
3479 3495 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3480 3496 phOffset = phOffset*180/numpy.pi
3481 3497 return phOffset
3482 3498
3483 3499
3484 3500 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3485 3501
3486 3502 dataOut.flagNoData = True
3487 3503 self.__dataReady = False
3488 3504 dataOut.outputInterval = nHours*3600
3489 3505
3490 3506 if self.__isConfig == False:
3491 3507 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3492 3508 #Get Initial LTC time
3493 3509 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3494 3510 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3495 3511
3496 3512 self.__isConfig = True
3497 3513
3498 3514 if self.__buffer is None:
3499 3515 self.__buffer = dataOut.data_param.copy()
3500 3516
3501 3517 else:
3502 3518 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3503 3519
3504 3520 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3505 3521
3506 3522 if self.__dataReady:
3507 3523 dataOut.utctimeInit = self.__initime
3508 3524 self.__initime += dataOut.outputInterval #to erase time offset
3509 3525
3510 3526 freq = dataOut.frequency
3511 3527 c = dataOut.C #m/s
3512 3528 lamb = c/freq
3513 3529 k = 2*numpy.pi/lamb
3514 3530 azimuth = 0
3515 3531 h = (hmin, hmax)
3516 3532 # pairs = ((0,1),(2,3)) #Estrella
3517 3533 # pairs = ((1,0),(2,3)) #T
3518 3534
3519 3535 if channelPositions is None:
3520 3536 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3521 3537 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3522 3538 meteorOps = SMOperations()
3523 3539 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3524 3540
3525 3541 #Checking correct order of pairs
3526 3542 pairs = []
3527 3543 if distances[1] > distances[0]:
3528 3544 pairs.append((1,0))
3529 3545 else:
3530 3546 pairs.append((0,1))
3531 3547
3532 3548 if distances[3] > distances[2]:
3533 3549 pairs.append((3,2))
3534 3550 else:
3535 3551 pairs.append((2,3))
3536 3552 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3537 3553
3538 3554 meteorsArray = self.__buffer
3539 3555 error = meteorsArray[:,-1]
3540 3556 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3541 3557 ind1 = numpy.where(boolError)[0]
3542 3558 meteorsArray = meteorsArray[ind1,:]
3543 3559 meteorsArray[:,-1] = 0
3544 3560 phases = meteorsArray[:,8:12]
3545 3561
3546 3562 #Calculate Gammas
3547 3563 gammas = self.__getGammas(pairs, distances, phases)
3548 3564 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3549 3565 #Calculate Phases
3550 3566 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3551 3567 phasesOff = phasesOff.reshape((1,phasesOff.size))
3552 3568 dataOut.data_output = -phasesOff
3553 3569 dataOut.flagNoData = False
3554 3570 self.__buffer = None
3555 3571
3556 3572
3557 3573 return
3558 3574
3559 3575 class SMOperations():
3560 3576
3561 3577 def __init__(self):
3562 3578
3563 3579 return
3564 3580
3565 3581 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3566 3582
3567 3583 arrayParameters = arrayParameters0.copy()
3568 3584 hmin = h[0]
3569 3585 hmax = h[1]
3570 3586
3571 3587 #Calculate AOA (Error N 3, 4)
3572 3588 #JONES ET AL. 1998
3573 3589 AOAthresh = numpy.pi/8
3574 3590 error = arrayParameters[:,-1]
3575 3591 phases = -arrayParameters[:,8:12] + jph
3576 3592 # phases = numpy.unwrap(phases)
3577 3593 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3578 3594
3579 3595 #Calculate Heights (Error N 13 and 14)
3580 3596 error = arrayParameters[:,-1]
3581 3597 Ranges = arrayParameters[:,1]
3582 3598 zenith = arrayParameters[:,4]
3583 3599 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3584 3600
3585 3601 #----------------------- Get Final data ------------------------------------
3586 3602 # error = arrayParameters[:,-1]
3587 3603 # ind1 = numpy.where(error==0)[0]
3588 3604 # arrayParameters = arrayParameters[ind1,:]
3589 3605
3590 3606 return arrayParameters
3591 3607
3592 3608 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3593 3609
3594 3610 arrayAOA = numpy.zeros((phases.shape[0],3))
3595 3611 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3596 3612
3597 3613 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3598 3614 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3599 3615 arrayAOA[:,2] = cosDirError
3600 3616
3601 3617 azimuthAngle = arrayAOA[:,0]
3602 3618 zenithAngle = arrayAOA[:,1]
3603 3619
3604 3620 #Setting Error
3605 3621 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3606 3622 error[indError] = 0
3607 3623 #Number 3: AOA not fesible
3608 3624 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3609 3625 error[indInvalid] = 3
3610 3626 #Number 4: Large difference in AOAs obtained from different antenna baselines
3611 3627 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3612 3628 error[indInvalid] = 4
3613 3629 return arrayAOA, error
3614 3630
3615 3631 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3616 3632
3617 3633 #Initializing some variables
3618 3634 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3619 3635 ang_aux = ang_aux.reshape(1,ang_aux.size)
3620 3636
3621 3637 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3622 3638 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3623 3639
3624 3640
3625 3641 for i in range(2):
3626 3642 ph0 = arrayPhase[:,pairsList[i][0]]
3627 3643 ph1 = arrayPhase[:,pairsList[i][1]]
3628 3644 d0 = distances[pairsList[i][0]]
3629 3645 d1 = distances[pairsList[i][1]]
3630 3646
3631 3647 ph0_aux = ph0 + ph1
3632 3648 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3633 3649 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3634 3650 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3635 3651 #First Estimation
3636 3652 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3637 3653
3638 3654 #Most-Accurate Second Estimation
3639 3655 phi1_aux = ph0 - ph1
3640 3656 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3641 3657 #Direction Cosine 1
3642 3658 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3643 3659
3644 3660 #Searching the correct Direction Cosine
3645 3661 cosdir0_aux = cosdir0[:,i]
3646 3662 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3647 3663 #Minimum Distance
3648 3664 cosDiff = (cosdir1 - cosdir0_aux)**2
3649 3665 indcos = cosDiff.argmin(axis = 1)
3650 3666 #Saving Value obtained
3651 3667 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3652 3668
3653 3669 return cosdir0, cosdir
3654 3670
3655 3671 def __calculateAOA(self, cosdir, azimuth):
3656 3672 cosdirX = cosdir[:,0]
3657 3673 cosdirY = cosdir[:,1]
3658 3674
3659 3675 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3660 3676 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3661 3677 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3662 3678
3663 3679 return angles
3664 3680
3665 3681 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3666 3682
3667 3683 Ramb = 375 #Ramb = c/(2*PRF)
3668 3684 Re = 6371 #Earth Radius
3669 3685 heights = numpy.zeros(Ranges.shape)
3670 3686
3671 3687 R_aux = numpy.array([0,1,2])*Ramb
3672 3688 R_aux = R_aux.reshape(1,R_aux.size)
3673 3689
3674 3690 Ranges = Ranges.reshape(Ranges.size,1)
3675 3691
3676 3692 Ri = Ranges + R_aux
3677 3693 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3678 3694
3679 3695 #Check if there is a height between 70 and 110 km
3680 3696 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3681 3697 ind_h = numpy.where(h_bool == 1)[0]
3682 3698
3683 3699 hCorr = hi[ind_h, :]
3684 3700 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3685 3701
3686 3702 hCorr = hi[ind_hCorr][:len(ind_h)]
3687 3703 heights[ind_h] = hCorr
3688 3704
3689 3705 #Setting Error
3690 3706 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3691 3707 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3692 3708 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3693 3709 error[indError] = 0
3694 3710 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3695 3711 error[indInvalid2] = 14
3696 3712 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3697 3713 error[indInvalid1] = 13
3698 3714
3699 3715 return heights, error
3700 3716
3701 3717 def getPhasePairs(self, channelPositions):
3702 3718 chanPos = numpy.array(channelPositions)
3703 3719 listOper = list(itertools.combinations(list(range(5)),2))
3704 3720
3705 3721 distances = numpy.zeros(4)
3706 3722 axisX = []
3707 3723 axisY = []
3708 3724 distX = numpy.zeros(3)
3709 3725 distY = numpy.zeros(3)
3710 3726 ix = 0
3711 3727 iy = 0
3712 3728
3713 3729 pairX = numpy.zeros((2,2))
3714 3730 pairY = numpy.zeros((2,2))
3715 3731
3716 3732 for i in range(len(listOper)):
3717 3733 pairi = listOper[i]
3718 3734
3719 3735 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3720 3736
3721 3737 if posDif[0] == 0:
3722 3738 axisY.append(pairi)
3723 3739 distY[iy] = posDif[1]
3724 3740 iy += 1
3725 3741 elif posDif[1] == 0:
3726 3742 axisX.append(pairi)
3727 3743 distX[ix] = posDif[0]
3728 3744 ix += 1
3729 3745
3730 3746 for i in range(2):
3731 3747 if i==0:
3732 3748 dist0 = distX
3733 3749 axis0 = axisX
3734 3750 else:
3735 3751 dist0 = distY
3736 3752 axis0 = axisY
3737 3753
3738 3754 side = numpy.argsort(dist0)[:-1]
3739 3755 axis0 = numpy.array(axis0)[side,:]
3740 3756 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3741 3757 axis1 = numpy.unique(numpy.reshape(axis0,4))
3742 3758 side = axis1[axis1 != chanC]
3743 3759 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3744 3760 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3745 3761 if diff1<0:
3746 3762 chan2 = side[0]
3747 3763 d2 = numpy.abs(diff1)
3748 3764 chan1 = side[1]
3749 3765 d1 = numpy.abs(diff2)
3750 3766 else:
3751 3767 chan2 = side[1]
3752 3768 d2 = numpy.abs(diff2)
3753 3769 chan1 = side[0]
3754 3770 d1 = numpy.abs(diff1)
3755 3771
3756 3772 if i==0:
3757 3773 chanCX = chanC
3758 3774 chan1X = chan1
3759 3775 chan2X = chan2
3760 3776 distances[0:2] = numpy.array([d1,d2])
3761 3777 else:
3762 3778 chanCY = chanC
3763 3779 chan1Y = chan1
3764 3780 chan2Y = chan2
3765 3781 distances[2:4] = numpy.array([d1,d2])
3766 3782 # axisXsides = numpy.reshape(axisX[ix,:],4)
3767 3783 #
3768 3784 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3769 3785 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3770 3786 #
3771 3787 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3772 3788 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3773 3789 # channel25X = int(pairX[0,ind25X])
3774 3790 # channel20X = int(pairX[1,ind20X])
3775 3791 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3776 3792 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3777 3793 # channel25Y = int(pairY[0,ind25Y])
3778 3794 # channel20Y = int(pairY[1,ind20Y])
3779 3795
3780 3796 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3781 3797 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3782 3798
3783 3799 return pairslist, distances
3784 3800 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3785 3801 #
3786 3802 # arrayAOA = numpy.zeros((phases.shape[0],3))
3787 3803 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3788 3804 #
3789 3805 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3790 3806 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3791 3807 # arrayAOA[:,2] = cosDirError
3792 3808 #
3793 3809 # azimuthAngle = arrayAOA[:,0]
3794 3810 # zenithAngle = arrayAOA[:,1]
3795 3811 #
3796 3812 # #Setting Error
3797 3813 # #Number 3: AOA not fesible
3798 3814 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3799 3815 # error[indInvalid] = 3
3800 3816 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3801 3817 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3802 3818 # error[indInvalid] = 4
3803 3819 # return arrayAOA, error
3804 3820 #
3805 3821 # def __getDirectionCosines(self, arrayPhase, pairsList):
3806 3822 #
3807 3823 # #Initializing some variables
3808 3824 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3809 3825 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3810 3826 #
3811 3827 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3812 3828 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3813 3829 #
3814 3830 #
3815 3831 # for i in range(2):
3816 3832 # #First Estimation
3817 3833 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3818 3834 # #Dealias
3819 3835 # indcsi = numpy.where(phi0_aux > numpy.pi)
3820 3836 # phi0_aux[indcsi] -= 2*numpy.pi
3821 3837 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3822 3838 # phi0_aux[indcsi] += 2*numpy.pi
3823 3839 # #Direction Cosine 0
3824 3840 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3825 3841 #
3826 3842 # #Most-Accurate Second Estimation
3827 3843 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3828 3844 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3829 3845 # #Direction Cosine 1
3830 3846 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3831 3847 #
3832 3848 # #Searching the correct Direction Cosine
3833 3849 # cosdir0_aux = cosdir0[:,i]
3834 3850 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3835 3851 # #Minimum Distance
3836 3852 # cosDiff = (cosdir1 - cosdir0_aux)**2
3837 3853 # indcos = cosDiff.argmin(axis = 1)
3838 3854 # #Saving Value obtained
3839 3855 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3840 3856 #
3841 3857 # return cosdir0, cosdir
3842 3858 #
3843 3859 # def __calculateAOA(self, cosdir, azimuth):
3844 3860 # cosdirX = cosdir[:,0]
3845 3861 # cosdirY = cosdir[:,1]
3846 3862 #
3847 3863 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3848 3864 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3849 3865 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3850 3866 #
3851 3867 # return angles
3852 3868 #
3853 3869 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3854 3870 #
3855 3871 # Ramb = 375 #Ramb = c/(2*PRF)
3856 3872 # Re = 6371 #Earth Radius
3857 3873 # heights = numpy.zeros(Ranges.shape)
3858 3874 #
3859 3875 # R_aux = numpy.array([0,1,2])*Ramb
3860 3876 # R_aux = R_aux.reshape(1,R_aux.size)
3861 3877 #
3862 3878 # Ranges = Ranges.reshape(Ranges.size,1)
3863 3879 #
3864 3880 # Ri = Ranges + R_aux
3865 3881 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3866 3882 #
3867 3883 # #Check if there is a height between 70 and 110 km
3868 3884 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3869 3885 # ind_h = numpy.where(h_bool == 1)[0]
3870 3886 #
3871 3887 # hCorr = hi[ind_h, :]
3872 3888 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3873 3889 #
3874 3890 # hCorr = hi[ind_hCorr]
3875 3891 # heights[ind_h] = hCorr
3876 3892 #
3877 3893 # #Setting Error
3878 3894 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3879 3895 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3880 3896 #
3881 3897 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3882 3898 # error[indInvalid2] = 14
3883 3899 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3884 3900 # error[indInvalid1] = 13
3885 3901 #
3886 3902 # return heights, error
3903
3904
3905 class WeatherRadar(Operation):
3906 '''
3907 Function tat implements Weather Radar operations-
3908 Input:
3909 Output:
3910 Parameters affected:
3911 '''
3912 isConfig = False
3913
3914 def __init__(self):
3915 Operation.__init__(self)
3916
3917 def setup(self,dataOut,Pt=0,Gt=0,Gr=0,lambda_=0, aL=0,
3918 tauW= 0,thetaT=0,thetaR=0,Km =0):
3919 self.nCh = dataOut.nChannels
3920 self.nHeis = dataOut.nHeights
3921 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
3922 self.Range = numpy.arange(dataOut.nHeights)*deltaHeight + dataOut.heightList[0]
3923 self.Range = self.Range.reshape(1,self.nHeis)
3924 self.Range = numpy.tile(self.Range,[self.nCh,1])
3925 '''-----------1 Constante del Radar----------'''
3926 self.Pt = Pt
3927 self.Gt = Gt
3928 self.Gr = Gr
3929 self.lambda_ = lambda_
3930 self.aL = aL
3931 self.tauW = tauW
3932 self.thetaT = thetaT
3933 self.thetaR = thetaR
3934 self.Km = Km
3935 Numerator = ((4*numpy.pi)**3 * aL**2 * 16 *numpy.log(2))
3936 Denominator = (Pt * Gt * Gr * lambda_**2 * SPEED_OF_LIGHT * tauW * numpy.pi*thetaT*thetaR)
3937 self.RadarConstant = Numerator/Denominator
3938 '''-----------2 Reflectividad del Radar y Factor de Reflectividad------'''
3939 self.n_radar = numpy.zeros((self.nCh,self.nHeis))
3940 self.Z_radar = numpy.zeros((self.nCh,self.nHeis))
3941
3942 def setMoments(self,dataOut,i):
3943
3944 type = dataOut.inputUnit
3945 nCh = dataOut.nChannels
3946 nHeis= dataOut.nHeights
3947 data_param = numpy.zeros((nCh,4,nHeis))
3948 if type == "Voltage":
3949 data_param[:,0,:] = dataOut.dataPP_POW/(dataOut.nCohInt**2)
3950 data_param[:,1,:] = dataOut.dataPP_DOP
3951 data_param[:,2,:] = dataOut.dataPP_WIDTH
3952 data_param[:,3,:] = dataOut.dataPP_SNR
3953 if type == "Spectra":
3954 data_param[:,0,:] = dataOut.data_POW
3955 data_param[:,1,:] = dataOut.data_DOP
3956 data_param[:,2,:] = dataOut.data_WIDTH
3957 def setMoments(self,dataOut,i):
3958 data_param[:,3,:] = dataOut.data_SNR
3959
3960 return data_param[:,i,:]
3961
3962
3963 def run(self,dataOut,Pt=25,Gt=200.0,Gr=50.0,lambda_=0.32, aL=2.5118,
3964 tauW= 4.0e-6,thetaT=0.165,thetaR=0.367,Km =0.93):
3965
3966 if not self.isConfig:
3967 self.setup(dataOut= dataOut,Pt=25,Gt=200.0,Gr=50.0,lambda_=0.32, aL=2.5118,
3968 tauW= 4.0e-6,thetaT=0.165,thetaR=0.367,Km =0.93)
3969 self.isConfig = True
3970 '''-----------------------------Potencia de Radar -Signal S-----------------------------'''
3971 Pr = self.setMoments(dataOut,0)
3972
3973 for R in range(self.nHeis):
3974 self.n_radar[:,R] = self.RadarConstant*Pr[:,R]* (self.Range[:,R])**2
3975
3976 self.Z_radar[:,R] = self.n_radar[:,R]* self.lambda_**4/( numpy.pi**5 * self.Km**2)
3977
3978 '''----------- Factor de Reflectividad Equivalente lamda_ < 10 cm , lamda_= 3.2cm-------'''
3979 Zeh = self.Z_radar
3980 dBZeh = 10*numpy.log10(Zeh)
3981 dataOut.factor_Zeh= dBZeh
3982 self.n_radar = numpy.zeros((self.nCh,self.nHeis))
3983 self.Z_radar = numpy.zeros((self.nCh,self.nHeis))
3984
3985 return dataOut
3986
3987 class PedestalInformation(Operation):
3988 path_ped = None
3989 path_adq = None
3990 t_Interval_p = None
3991 n_Muestras_p = None
3992 isConfig = False
3993 blocksPerfile= None
3994 f_a_p = None
3995 online = None
3996 angulo_adq = None
3997 nro_file = None
3998 nro_key_p = None
3999
4000
4001 def __init__(self):
4002 Operation.__init__(self)
4003
4004 def getfirstFilefromPath(self,path,meta,ext):
4005 validFilelist = []
4006 #print("SEARH",path)
4007 try:
4008 fileList = os.listdir(path)
4009 except:
4010 print("check path - fileList")
4011 if len(fileList)<1:
4012 return None
4013 # meta 1234 567 8-18 BCDE
4014 # H,D,PE YYYY DDD EPOC .ext
4015
4016 for thisFile in fileList:
4017 #print("HI",thisFile)
4018 if meta =="PE":
4019 try:
4020 number= int(thisFile[len(meta)+7:len(meta)+17])
4021 except:
4022 print("There is a file or folder with different format")
4023 if meta == "D":
4024 try:
4025 number= int(thisFile[8:11])
4026 except:
4027 print("There is a file or folder with different format")
4028
4029 if not isNumber(str=number):
4030 continue
4031 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
4032 continue
4033 validFilelist.sort()
4034 validFilelist.append(thisFile)
4035 if len(validFilelist)>0:
4036 validFilelist = sorted(validFilelist,key=str.lower)
4037 return validFilelist
4038 return None
4039
4040 def gettimeutcfromDirFilename(self,path,file):
4041 dir_file= path+"/"+file
4042 fp = h5py.File(dir_file,'r')
4043 #epoc = fp['Metadata'].get('utctimeInit')[()]
4044 epoc = fp['Data'].get('utc')[()]
4045 fp.close()
4046 return epoc
4047
4048 def getDatavaluefromDirFilename(self,path,file,value):
4049 dir_file= path+"/"+file
4050 fp = h5py.File(dir_file,'r')
4051 array = fp['Data'].get(value)[()]
4052 fp.close()
4053 return array
4054
4055 def getFile_KeyP(self,list_pedestal,list_adq):
4056 print(list_pedestal)
4057 print(list_adq)
4058
4059 def getNROFile(self,utc_adq,utc_ped_list):
4060 c=0
4061 for i in range(len(utc_ped_list)):
4062 if utc_adq>utc_ped_list[i]:
4063 c +=1
4064
4065 return c-1,utc_ped_list[c-1],utc_ped_list[c]
4066
4067
4068 def setup_offline(self,list_pedestal,list_adq):
4069 print("SETUP OFFLINE")
4070 print(self.path_ped)
4071 print(self.path_adq)
4072 print(len(self.list_pedestal))
4073 print(len(self.list_adq))
4074 utc_ped_list=[]
4075 for i in range(len(self.list_pedestal)):
4076 utc_ped_list.append(self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[i]))
4077
4078 #utc_ped_list= utc_ped_list
4079 utc_adq = self.gettimeutcfromDirFilename(path=self.path_adq,file=self.list_adq[0])
4080 #print("utc_ped_list",utc_ped_list)
4081 print("utc_adq",utc_adq)
4082 nro_file,utc_ped = self.getNROFile(utc_adq=utc_adq, utc_ped_list= utc_ped_list)
4083
4084 print("nro_file",nro_file,"utc_ped",utc_ped)
4085 print("nro_file",i)
4086 nro_key_p = int((utc_adq-utc_ped)/self.t_Interval_p)
4087 print("nro_key_p",nro_key_p)
4088
4089 ff_pedestal = self.list_pedestal[nro_file]
4090 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4091 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4092
4093 print("utc_pedestal_init :",utc_ped+nro_key_p*self.t_Interval_p)
4094 print("angulo_array :",angulo[nro_key_p])
4095 self.nro_file = nro_file
4096 self.nro_key_p = nro_key_p
4097
4098 def setup_online(self,dataOut):
4099 utc_adq =dataOut.utctime
4100 print("Online-utc_adq",utc_adq)
4101 print(len(self.list_pedestal))
4102 utc_ped_list=[]
4103 for i in range(len(self.list_pedestal)):
4104 utc_ped_list.append(self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[i]))
4105 print(utc_ped_list[:20])
4106 #print(utc_ped_list[488:498])
4107 print("ultimo UTC-PEDESTAL",utc_ped_list[-1])
4108 nro_file,utc_ped,utc_ped_1 = self.getNROFile(utc_adq=utc_adq, utc_ped_list= utc_ped_list)
4109 print("nro_file",nro_file,"utc_ped",utc_ped,"utc_ped_1",utc_ped_1)
4110 print("name_PEDESTAL",self.list_pedestal[nro_file])
4111 nro_key_p = int((utc_adq-utc_ped)/self.t_Interval_p)
4112 print("nro_key_p",nro_key_p)
4113 ff_pedestal = self.list_pedestal[nro_file]
4114 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4115 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4116
4117 print("utc_pedestal_init :",utc_ped+nro_key_p*self.t_Interval_p)
4118 print("angulo_array :",angulo[nro_key_p])
4119 self.nro_file = nro_file
4120 self.nro_key_p = nro_key_p
4121
4122
4123 '''
4124 print("############################")
4125 utc_adq = dataOut.utctime
4126 print("ONLINE",dataOut.utctime)
4127 print("utc_adq" , utc_adq)
4128 utc_pedestal= self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[0])
4129 print("utc_pedestal", utc_pedestal)
4130 flag_i = 0
4131 flag = 0
4132 ready = 0
4133 if len(self.list_pedestal)!=0:
4134 enable_p=1
4135 if (enable_p!=0):
4136 while(flag_i==0):
4137 if utc_adq>utc_pedestal:
4138 nro_file = int((utc_adq - utc_pedestal)/(self.t_Interval_p*self.n_Muestras_p))
4139 print("nro_file--------------------",nro_file)
4140 print(len(self.list_pedestal))
4141 if nro_file> len(self.list_pedestal):
4142 nro_file = len(self.list_pedestal)-1
4143 ff_pedestal = self.list_pedestal[nro_file]
4144 print(ff_pedestal)
4145 utc_pedestal = self.gettimeutcfromDirFilename(path=self.path_ped,file=ff_pedestal)
4146 while(flag==0):
4147 print("adq",utc_adq)
4148 print("ped",utc_pedestal)
4149 print("nro_file",nro_file)
4150 if utc_adq >utc_pedestal:
4151 print("DENTRO DEL IF-SETUP")
4152 ff_pedestal = self.list_pedestal[nro_file]
4153 if 0<(utc_adq - utc_pedestal)<(self.t_Interval_p*self.n_Muestras_p):
4154 nro_file= nro_file
4155 ff_pedestal = self.list_pedestal[nro_file]
4156 ready = 1
4157 if (utc_adq-utc_pedestal)>(self.t_Interval_p*self.n_Muestras_p):
4158 nro_tmp= int((utc_adq-utc_pedestal)/(self.n_Muestras_p))
4159 nro_file= nro_file+1*nro_tmp#chsssssssssssssssssssasssddasdas/ equear esta condicion
4160 if nro_tmp==0:
4161 nro_file= nro_file +1
4162 ff_pedestal = self.list_pedestal[nro_file]
4163 print("",ff_pedestal)
4164 utc_pedestal = self.gettimeutcfromDirFilename(path=self.path_ped,file=ff_pedestal)
4165 else:
4166 print("DENTRO DEL ELSE-SETUP")
4167 nro_tmp= int((utc_pedestal-utc_adq)/(self.n_Muestras_p))
4168 if utc_pedestal>utc_adq and nro_tmp==0:
4169 nro_tmp= int((utc_pedestal-utc_adq))
4170 print("nro_tmp",nro_tmp)
4171 if nro_file>nro_tmp:
4172 nro_file = nro_file-1*nro_tmp
4173 else:
4174 nro_file =nro_file -1
4175
4176 ff_pedestal = self.list_pedestal[nro_file]
4177 utc_pedestal = self.gettimeutcfromDirFilename(path=self.path_ped,file=ff_pedestal)
4178
4179 if ready:
4180 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4181 nro_key_p = int((utc_adq-utc_pedestal)/self.t_Interval_p)
4182 print("nro_file :",nro_file)
4183 print("name_file :",ff_pedestal)
4184 print("utc_pedestal_file :",utc_pedestal)
4185 print("nro_key_p :",nro_key_p)
4186 print("utc_pedestal_init :",utc_pedestal+nro_key_p*self.t_Interval_p)
4187 print("angulo_array :",angulo[nro_key_p])
4188 flag=1
4189 flag_i=1
4190 else:
4191 print("La lista de archivos de pedestal o adq esta vacia")
4192 nro_file=None
4193 nro_key_p=None
4194 self.nro_file = nro_file
4195 self.nro_key_p = nro_key_p
4196 '''
4197
4198 def setup(self,dataOut,path_ped,path_adq,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online):
4199 self.__dataReady = False
4200 self.path_ped = path_ped
4201 self.path_adq = path_adq
4202 self.t_Interval_p = t_Interval_p
4203 self.n_Muestras_p = n_Muestras_p
4204 self.blocksPerfile= blocksPerfile
4205 self.f_a_p = f_a_p
4206 self.online = online
4207 self.angulo_adq = numpy.zeros(self.blocksPerfile)
4208 self.__profIndex = 0
4209 print(self.path_ped)
4210 print(self.path_adq)
4211 self.list_pedestal = self.getfirstFilefromPath(path=self.path_ped,meta="PE",ext=".hdf5")
4212 print("LIST NEW", self.list_pedestal[:20])
4213 self.list_adq = self.getfirstFilefromPath(path=self.path_adq,meta="D",ext=".hdf5")
4214 print("*************Longitud list pedestal****************",len(self.list_pedestal))
4215
4216 if self.online:
4217 print("Enable Online")
4218 self.setup_online(dataOut)
4219 else:
4220 self.setup_offline(list_pedestal=self.list_pedestal,list_adq=self.list_adq)
4221
4222 def setNextFileP(self,dataOut):
4223 if self.online:
4224 data_pedestal = self.setNextFileonline()
4225 else:
4226 data_pedestal = self.setNextFileoffline()
4227
4228 return data_pedestal
4229
4230
4231 def setNextFileoffline(self):
4232 tmp =0
4233 for j in range(self.blocksPerfile):
4234 #print("NUMERO DEL BLOQUE:",j)
4235 iterador = self.nro_key_p +self.f_a_p*(j-tmp)
4236 #print("iterador",iterador)
4237 if iterador < self.n_Muestras_p:
4238 self.nro_file = self.nro_file
4239 else:
4240 self.nro_file = self.nro_file+1
4241 dif = self.blocksPerfile-(self.nro_key_p+self.f_a_p*(j-tmp-1))
4242 tmp = j
4243 self.nro_key_p= self.f_a_p-dif
4244 iterador = self.nro_key_p
4245 #print("nro_file",self.nro_file)
4246 try:
4247 ff_pedestal = self.list_pedestal[self.nro_file]
4248 except:
4249 return numpy.ones(self.blocksPerfile)*numpy.nan
4250
4251 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4252 self.angulo_adq[j]= angulo[iterador]
4253
4254 return self.angulo_adq
4255
4256 def setNextFileonline(self):
4257 tmp = 0
4258 self.nTries_p = 3
4259 self.delay = 3
4260 ready = 1
4261 for j in range(self.blocksPerfile):
4262 iterador = self.nro_key_p +self.f_a_p*(j-tmp)
4263 if iterador < self.n_Muestras_p:
4264 self.nro_file = self.nro_file
4265 else:
4266 self.nro_file = self.nro_file+1
4267 dif = self.blocksPerfile-(self.nro_key_p+self.f_a_p*(j-tmp-1))
4268 tmp = j
4269 self.nro_key_p= self.f_a_p-dif
4270 iterador = self.nro_key_p
4271 print("nro_file---------------- :",self.nro_file)
4272 try:
4273 # update list_pedestal
4274 self.list_pedestal = self.getfirstFilefromPath(path=self.path_ped,meta="PE",ext=".hdf5")
4275 ff_pedestal = self.list_pedestal[self.nro_file]
4276 except:
4277 ff_pedestal = None
4278 ready = 0
4279 for nTries_p in range(self.nTries_p):
4280 try:
4281 # update list_pedestal
4282 self.list_pedestal = self.getfirstFilefromPath(path=self.path_ped,meta="PE",ext=".hdf5")
4283 ff_pedestal = self.list_pedestal[self.nro_file]
4284 except:
4285 ff_pedestal = None
4286 if ff_pedestal is not None:
4287 ready=1
4288 break
4289 log.warning("Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, self.nro_file, nTries_p + 1))
4290 time.sleep(self.delay)
4291 continue
4292 #return numpy.ones(self.blocksPerfile)*numpy.nan
4293
4294 if ready == 1:
4295 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4296 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4297
4298 else:
4299 print("there is no pedestal file")
4300 angulo = numpy.ones(self.n_Muestras_p)*numpy.nan
4301 self.angulo_adq[j]= angulo[iterador]
4302 print("Angulo",self.angulo_adq)
4303 print("Angulo",len(self.angulo_adq))
4304 #self.nro_key_p=iterador + self.f_a_p
4305 #if self.nro_key_p< self.n_Muestras_p:
4306 # self.nro_file = self.nro_file
4307 #else:
4308 # self.nro_file = self.nro_file+1
4309 # self.nro_key_p= self.nro_key_p
4310 return self.angulo_adq
4311
4312
4313 def run(self, dataOut,path_ped,path_adq,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online):
4314 if not self.isConfig:
4315 self.setup( dataOut, path_ped,path_adq,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online)
4316 self.isConfig = True
4317
4318 dataOut.flagNoData = True
4319 #print("profIndex",self.__profIndex)
4320
4321 if self.__profIndex==0:
4322 angulo_adq = self.setNextFileP(dataOut)
4323 dataOut.azimuth = angulo_adq
4324 self.__dataReady = True
4325 self.__profIndex += 1
4326 if self.__profIndex== blocksPerfile:
4327 self.__profIndex = 0
4328 if self.__dataReady:
4329 #print(self.__profIndex,dataOut.azimuth[:10])
4330 dataOut.flagNoData = False
4331 return dataOut
4332
4333
4334 class Block360(Operation):
4335 '''
4336 '''
4337 isConfig = False
4338 __profIndex = 0
4339 __initime = None
4340 __lastdatatime = None
4341 __buffer = None
4342 __dataReady = False
4343 n = None
4344 __nch = 0
4345 __nHeis = 0
4346 index = 0
4347
4348 def __init__(self,**kwargs):
4349 Operation.__init__(self,**kwargs)
4350
4351 def setup(self, dataOut, n = None):
4352 '''
4353 n= Numero de PRF's de entrada
4354 '''
4355 self.__initime = None
4356 self.__lastdatatime = 0
4357 self.__dataReady = False
4358 self.__buffer = 0
4359 self.__buffer_1D = 0
4360 self.__profIndex = 0
4361 self.index = 0
4362 self.__nch = dataOut.nChannels
4363 self.__nHeis = dataOut.nHeights
4364 ##print("ELVALOR DE n es:", n)
4365 if n == None:
4366 raise ValueError("n should be specified.")
4367
4368 if n != None:
4369 if n<1:
4370 print("n should be greater than 2")
4371 raise ValueError("n should be greater than 2")
4372
4373 self.n = n
4374 #print("nHeights")
4375 self.__buffer = numpy.zeros(( dataOut.nChannels,n, dataOut.nHeights))
4376 self.__buffer2= numpy.zeros(n)
4377
4378 def putData(self,data):
4379 '''
4380 Add a profile to he __buffer and increase in one the __profiel Index
4381 '''
4382 #print("line 4049",data.dataPP_POW.shape,data.dataPP_POW[:10])
4383 #print("line 4049",data.azimuth.shape,data.azimuth)
4384 self.__buffer[:,self.__profIndex,:]= data.dataPP_POW
4385 #print("me casi",self.index,data.azimuth[self.index])
4386 #print(self.__profIndex, self.index , data.azimuth[self.index] )
4387 #print("magic",data.profileIndex)
4388 #print(data.azimuth[self.index])
4389 #print("index",self.index)
4390
4391 self.__buffer2[self.__profIndex] = data.azimuth[self.index]
4392 #print("q pasa")
4393 self.index+=1
4394 #print("index",self.index,data.azimuth[:10])
4395 self.__profIndex += 1
4396 return #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Remove DCΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
4397
4398 def pushData(self,data):
4399 '''
4400 Return the PULSEPAIR and the profiles used in the operation
4401 Affected : self.__profileIndex
4402 '''
4403 #print("pushData")
4404
4405 data_360 = self.__buffer
4406 data_p = self.__buffer2
4407 n = self.__profIndex
4408
4409 self.__buffer = numpy.zeros((self.__nch, self.n,self.__nHeis))
4410 self.__buffer2 = numpy.zeros(self.n)
4411 self.__profIndex = 0
4412 #print("pushData")
4413 return data_360,n,data_p
4414
4415
4416 def byProfiles(self,dataOut):
4417
4418 self.__dataReady = False
4419 data_360 = None
4420 data_p = None
4421 #print("dataOu",dataOut.dataPP_POW)
4422 self.putData(data=dataOut)
4423 #print("profIndex",self.__profIndex)
4424 if self.__profIndex == self.n:
4425 data_360,n,data_p = self.pushData(data=dataOut)
4426 self.__dataReady = True
4427
4428 return data_360,data_p
4429
4430
4431 def blockOp(self, dataOut, datatime= None):
4432 if self.__initime == None:
4433 self.__initime = datatime
4434 data_360,data_p = self.byProfiles(dataOut)
4435 self.__lastdatatime = datatime
4436
4437 if data_360 is None:
4438 return None, None,None
4439
4440 avgdatatime = self.__initime
4441 deltatime = datatime - self.__lastdatatime
4442 self.__initime = datatime
4443 #print(data_360.shape,avgdatatime,data_p.shape)
4444 return data_360,avgdatatime,data_p
4445
4446 def run(self, dataOut,n = None,**kwargs):
4447
4448 if not self.isConfig:
4449 self.setup(dataOut = dataOut, n = n , **kwargs)
4450 self.index = 0
4451 #print("comova",self.isConfig)
4452 self.isConfig = True
4453 if self.index==dataOut.azimuth.shape[0]:
4454 self.index=0
4455 data_360, avgdatatime,data_p = self.blockOp(dataOut, dataOut.utctime)
4456 dataOut.flagNoData = True
4457
4458 if self.__dataReady:
4459 dataOut.data_360 = data_360 # S
4460 #print("DATAREADY---------------------------------------------")
4461 print("data_360",dataOut.data_360.shape)
4462 dataOut.data_azi = data_p
4463 #print("jroproc_parameters",data_p[0],data_p[-1])#,data_360.shape,avgdatatime)
4464 dataOut.utctime = avgdatatime
4465 dataOut.flagNoData = False
4466 return dataOut
@@ -1,1625 +1,1627
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList):
59 59
60 60 channelIndexList = []
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
68 68 self.selectChannelsByIndex(channelIndexList)
69 69 return self.dataOut
70 70
71 71 def selectChannelsByIndex(self, channelIndexList):
72 72 """
73 73 Selecciona un bloque de datos en base a canales segun el channelIndexList
74 74
75 75 Input:
76 76 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
77 77
78 78 Affected:
79 79 self.dataOut.data
80 80 self.dataOut.channelIndexList
81 81 self.dataOut.nChannels
82 82 self.dataOut.m_ProcessingHeader.totalSpectra
83 83 self.dataOut.systemHeaderObj.numChannels
84 84 self.dataOut.m_ProcessingHeader.blockSize
85 85
86 86 Return:
87 87 None
88 88 """
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 99 data = self.dataOut.data[channelIndexList,:,:]
100 100 else:
101 101 data = self.dataOut.data[channelIndexList,:]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
105 105 self.dataOut.channelList = range(len(channelIndexList))
106 106
107 107 elif self.dataOut.type == 'Spectra':
108 108 data_spc = self.dataOut.data_spc[channelIndexList, :]
109 109 data_dc = self.dataOut.data_dc[channelIndexList, :]
110 110
111 111 self.dataOut.data_spc = data_spc
112 112 self.dataOut.data_dc = data_dc
113 113
114 114 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
115 115 self.dataOut.channelList = range(len(channelIndexList))
116 116 self.__selectPairsByChannel(channelIndexList)
117 117
118 118 return 1
119 119
120 120 def __selectPairsByChannel(self, channelList=None):
121 121
122 122 if channelList == None:
123 123 return
124 124
125 125 pairsIndexListSelected = []
126 126 for pairIndex in self.dataOut.pairsIndexList:
127 127 # First pair
128 128 if self.dataOut.pairsList[pairIndex][0] not in channelList:
129 129 continue
130 130 # Second pair
131 131 if self.dataOut.pairsList[pairIndex][1] not in channelList:
132 132 continue
133 133
134 134 pairsIndexListSelected.append(pairIndex)
135 135
136 136 if not pairsIndexListSelected:
137 137 self.dataOut.data_cspc = None
138 138 self.dataOut.pairsList = []
139 139 return
140 140
141 141 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
142 142 self.dataOut.pairsList = [self.dataOut.pairsList[i]
143 143 for i in pairsIndexListSelected]
144 144
145 145 return
146 146
147 147 class selectHeights(Operation):
148 148
149 149 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
150 150 """
151 151 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
152 152 minHei <= height <= maxHei
153 153
154 154 Input:
155 155 minHei : valor minimo de altura a considerar
156 156 maxHei : valor maximo de altura a considerar
157 157
158 158 Affected:
159 159 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
160 160
161 161 Return:
162 162 1 si el metodo se ejecuto con exito caso contrario devuelve 0
163 163 """
164 164
165 165 self.dataOut = dataOut
166 166
167 167 if minHei and maxHei:
168 168
169 169 if (minHei < self.dataOut.heightList[0]):
170 170 minHei = self.dataOut.heightList[0]
171 171
172 172 if (maxHei > self.dataOut.heightList[-1]):
173 173 maxHei = self.dataOut.heightList[-1]
174 174
175 175 minIndex = 0
176 176 maxIndex = 0
177 177 heights = self.dataOut.heightList
178 178
179 179 inda = numpy.where(heights >= minHei)
180 180 indb = numpy.where(heights <= maxHei)
181 181
182 182 try:
183 183 minIndex = inda[0][0]
184 184 except:
185 185 minIndex = 0
186 186
187 187 try:
188 188 maxIndex = indb[0][-1]
189 189 except:
190 190 maxIndex = len(heights)
191 191
192 192 self.selectHeightsByIndex(minIndex, maxIndex)
193 193
194 194 return self.dataOut
195 195
196 196 def selectHeightsByIndex(self, minIndex, maxIndex):
197 197 """
198 198 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
199 199 minIndex <= index <= maxIndex
200 200
201 201 Input:
202 202 minIndex : valor de indice minimo de altura a considerar
203 203 maxIndex : valor de indice maximo de altura a considerar
204 204
205 205 Affected:
206 206 self.dataOut.data
207 207 self.dataOut.heightList
208 208
209 209 Return:
210 210 1 si el metodo se ejecuto con exito caso contrario devuelve 0
211 211 """
212 212
213 213 if self.dataOut.type == 'Voltage':
214 214 if (minIndex < 0) or (minIndex > maxIndex):
215 215 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
216 216
217 217 if (maxIndex >= self.dataOut.nHeights):
218 218 maxIndex = self.dataOut.nHeights
219 219
220 220 #voltage
221 221 if self.dataOut.flagDataAsBlock:
222 222 """
223 223 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
224 224 """
225 225 data = self.dataOut.data[:,:, minIndex:maxIndex]
226 226 else:
227 227 data = self.dataOut.data[:, minIndex:maxIndex]
228 228
229 229 # firstHeight = self.dataOut.heightList[minIndex]
230 230
231 231 self.dataOut.data = data
232 232 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
233 233
234 234 if self.dataOut.nHeights <= 1:
235 235 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
236 236 elif self.dataOut.type == 'Spectra':
237 237 if (minIndex < 0) or (minIndex > maxIndex):
238 238 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
239 239 minIndex, maxIndex))
240 240
241 241 if (maxIndex >= self.dataOut.nHeights):
242 242 maxIndex = self.dataOut.nHeights - 1
243 243
244 244 # Spectra
245 245 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
246 246
247 247 data_cspc = None
248 248 if self.dataOut.data_cspc is not None:
249 249 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
250 250
251 251 data_dc = None
252 252 if self.dataOut.data_dc is not None:
253 253 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
254 254
255 255 self.dataOut.data_spc = data_spc
256 256 self.dataOut.data_cspc = data_cspc
257 257 self.dataOut.data_dc = data_dc
258 258
259 259 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
260 260
261 261 return 1
262 262
263 263
264 264 class filterByHeights(Operation):
265 265
266 266 def run(self, dataOut, window):
267 267
268 268 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
269 269
270 270 if window == None:
271 271 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
272 272
273 273 newdelta = deltaHeight * window
274 274 r = dataOut.nHeights % window
275 275 newheights = (dataOut.nHeights-r)/window
276 276
277 277 if newheights <= 1:
278 278 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
279 279
280 280 if dataOut.flagDataAsBlock:
281 281 """
282 282 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
283 283 """
284 284 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
285 285 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
286 286 buffer = numpy.sum(buffer,3)
287 287
288 288 else:
289 289 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
290 290 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
291 291 buffer = numpy.sum(buffer,2)
292 292
293 293 dataOut.data = buffer
294 294 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
295 295 dataOut.windowOfFilter = window
296 296
297 297 return dataOut
298 298
299 299
300 300 class setH0(Operation):
301 301
302 302 def run(self, dataOut, h0, deltaHeight = None):
303 303
304 304 if not deltaHeight:
305 305 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
306 306
307 307 nHeights = dataOut.nHeights
308 308
309 309 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
310 310
311 311 dataOut.heightList = newHeiRange
312 312
313 313 return dataOut
314 314
315 315
316 316 class deFlip(Operation):
317 317
318 318 def run(self, dataOut, channelList = []):
319 319
320 320 data = dataOut.data.copy()
321 321
322 322 if dataOut.flagDataAsBlock:
323 323 flip = self.flip
324 324 profileList = list(range(dataOut.nProfiles))
325 325
326 326 if not channelList:
327 327 for thisProfile in profileList:
328 328 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
329 329 flip *= -1.0
330 330 else:
331 331 for thisChannel in channelList:
332 332 if thisChannel not in dataOut.channelList:
333 333 continue
334 334
335 335 for thisProfile in profileList:
336 336 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
337 337 flip *= -1.0
338 338
339 339 self.flip = flip
340 340
341 341 else:
342 342 if not channelList:
343 343 data[:,:] = data[:,:]*self.flip
344 344 else:
345 345 for thisChannel in channelList:
346 346 if thisChannel not in dataOut.channelList:
347 347 continue
348 348
349 349 data[thisChannel,:] = data[thisChannel,:]*self.flip
350 350
351 351 self.flip *= -1.
352 352
353 353 dataOut.data = data
354 354
355 355 return dataOut
356 356
357 357
358 358 class setAttribute(Operation):
359 359 '''
360 360 Set an arbitrary attribute(s) to dataOut
361 361 '''
362 362
363 363 def __init__(self):
364 364
365 365 Operation.__init__(self)
366 366 self._ready = False
367 367
368 368 def run(self, dataOut, **kwargs):
369 369
370 370 for key, value in kwargs.items():
371 371 setattr(dataOut, key, value)
372 372
373 373 return dataOut
374 374
375 375
376 376 @MPDecorator
377 377 class printAttribute(Operation):
378 378 '''
379 379 Print an arbitrary attribute of dataOut
380 380 '''
381 381
382 382 def __init__(self):
383 383
384 384 Operation.__init__(self)
385 385
386 386 def run(self, dataOut, attributes):
387 387
388 388 if isinstance(attributes, str):
389 389 attributes = [attributes]
390 390 for attr in attributes:
391 391 if hasattr(dataOut, attr):
392 392 log.log(getattr(dataOut, attr), attr)
393 393
394 394
395 395 class interpolateHeights(Operation):
396 396
397 397 def run(self, dataOut, topLim, botLim):
398 398 #69 al 72 para julia
399 399 #82-84 para meteoros
400 400 if len(numpy.shape(dataOut.data))==2:
401 401 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
402 402 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
403 403 #dataOut.data[:,botLim:limSup+1] = sampInterp
404 404 dataOut.data[:,botLim:topLim+1] = sampInterp
405 405 else:
406 406 nHeights = dataOut.data.shape[2]
407 407 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
408 408 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
409 409 f = interpolate.interp1d(x, y, axis = 2)
410 410 xnew = numpy.arange(botLim,topLim+1)
411 411 ynew = f(xnew)
412 412 dataOut.data[:,:,botLim:topLim+1] = ynew
413 413
414 414 return dataOut
415 415
416 416
417 417 class CohInt(Operation):
418 418
419 419 isConfig = False
420 420 __profIndex = 0
421 421 __byTime = False
422 422 __initime = None
423 423 __lastdatatime = None
424 424 __integrationtime = None
425 425 __buffer = None
426 426 __bufferStride = []
427 427 __dataReady = False
428 428 __profIndexStride = 0
429 429 __dataToPutStride = False
430 430 n = None
431 431
432 432 def __init__(self, **kwargs):
433 433
434 434 Operation.__init__(self, **kwargs)
435 435
436 436 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
437 437 """
438 438 Set the parameters of the integration class.
439 439
440 440 Inputs:
441 441
442 442 n : Number of coherent integrations
443 443 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
444 444 overlapping :
445 445 """
446 446
447 447 self.__initime = None
448 448 self.__lastdatatime = 0
449 449 self.__buffer = None
450 450 self.__dataReady = False
451 451 self.byblock = byblock
452 452 self.stride = stride
453 453
454 454 if n == None and timeInterval == None:
455 455 raise ValueError("n or timeInterval should be specified ...")
456 456
457 457 if n != None:
458 458 self.n = n
459 459 self.__byTime = False
460 460 else:
461 461 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
462 462 self.n = 9999
463 463 self.__byTime = True
464 464
465 465 if overlapping:
466 466 self.__withOverlapping = True
467 467 self.__buffer = None
468 468 else:
469 469 self.__withOverlapping = False
470 470 self.__buffer = 0
471 471
472 472 self.__profIndex = 0
473 473
474 474 def putData(self, data):
475 475
476 476 """
477 477 Add a profile to the __buffer and increase in one the __profileIndex
478 478
479 479 """
480 480
481 481 if not self.__withOverlapping:
482 482 self.__buffer += data.copy()
483 483 self.__profIndex += 1
484 484 return
485 485
486 486 #Overlapping data
487 487 nChannels, nHeis = data.shape
488 488 data = numpy.reshape(data, (1, nChannels, nHeis))
489 489
490 490 #If the buffer is empty then it takes the data value
491 491 if self.__buffer is None:
492 492 self.__buffer = data
493 493 self.__profIndex += 1
494 494 return
495 495
496 496 #If the buffer length is lower than n then stakcing the data value
497 497 if self.__profIndex < self.n:
498 498 self.__buffer = numpy.vstack((self.__buffer, data))
499 499 self.__profIndex += 1
500 500 return
501 501
502 502 #If the buffer length is equal to n then replacing the last buffer value with the data value
503 503 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
504 504 self.__buffer[self.n-1] = data
505 505 self.__profIndex = self.n
506 506 return
507 507
508 508
509 509 def pushData(self):
510 510 """
511 511 Return the sum of the last profiles and the profiles used in the sum.
512 512
513 513 Affected:
514 514
515 515 self.__profileIndex
516 516
517 517 """
518 518
519 519 if not self.__withOverlapping:
520 520 data = self.__buffer
521 521 n = self.__profIndex
522 522
523 523 self.__buffer = 0
524 524 self.__profIndex = 0
525 525
526 526 return data, n
527 527
528 528 #Integration with Overlapping
529 529 data = numpy.sum(self.__buffer, axis=0)
530 530 # print data
531 531 # raise
532 532 n = self.__profIndex
533 533
534 534 return data, n
535 535
536 536 def byProfiles(self, data):
537 537
538 538 self.__dataReady = False
539 539 avgdata = None
540 540 # n = None
541 541 # print data
542 542 # raise
543 543 self.putData(data)
544 544
545 545 if self.__profIndex == self.n:
546 546 avgdata, n = self.pushData()
547 547 self.__dataReady = True
548 548
549 549 return avgdata
550 550
551 551 def byTime(self, data, datatime):
552 552
553 553 self.__dataReady = False
554 554 avgdata = None
555 555 n = None
556 556
557 557 self.putData(data)
558 558
559 559 if (datatime - self.__initime) >= self.__integrationtime:
560 560 avgdata, n = self.pushData()
561 561 self.n = n
562 562 self.__dataReady = True
563 563
564 564 return avgdata
565 565
566 566 def integrateByStride(self, data, datatime):
567 567 # print data
568 568 if self.__profIndex == 0:
569 569 self.__buffer = [[data.copy(), datatime]]
570 570 else:
571 571 self.__buffer.append([data.copy(),datatime])
572 572 self.__profIndex += 1
573 573 self.__dataReady = False
574 574
575 575 if self.__profIndex == self.n * self.stride :
576 576 self.__dataToPutStride = True
577 577 self.__profIndexStride = 0
578 578 self.__profIndex = 0
579 579 self.__bufferStride = []
580 580 for i in range(self.stride):
581 581 current = self.__buffer[i::self.stride]
582 582 data = numpy.sum([t[0] for t in current], axis=0)
583 583 avgdatatime = numpy.average([t[1] for t in current])
584 584 # print data
585 585 self.__bufferStride.append((data, avgdatatime))
586 586
587 587 if self.__dataToPutStride:
588 588 self.__dataReady = True
589 589 self.__profIndexStride += 1
590 590 if self.__profIndexStride == self.stride:
591 591 self.__dataToPutStride = False
592 592 # print self.__bufferStride[self.__profIndexStride - 1]
593 593 # raise
594 594 return self.__bufferStride[self.__profIndexStride - 1]
595 595
596 596
597 597 return None, None
598 598
599 599 def integrate(self, data, datatime=None):
600 600
601 601 if self.__initime == None:
602 602 self.__initime = datatime
603 603
604 604 if self.__byTime:
605 605 avgdata = self.byTime(data, datatime)
606 606 else:
607 607 avgdata = self.byProfiles(data)
608 608
609 609
610 610 self.__lastdatatime = datatime
611 611
612 612 if avgdata is None:
613 613 return None, None
614 614
615 615 avgdatatime = self.__initime
616 616
617 617 deltatime = datatime - self.__lastdatatime
618 618
619 619 if not self.__withOverlapping:
620 620 self.__initime = datatime
621 621 else:
622 622 self.__initime += deltatime
623 623
624 624 return avgdata, avgdatatime
625 625
626 626 def integrateByBlock(self, dataOut):
627 627
628 628 times = int(dataOut.data.shape[1]/self.n)
629 629 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
630 630
631 631 id_min = 0
632 632 id_max = self.n
633 633
634 634 for i in range(times):
635 635 junk = dataOut.data[:,id_min:id_max,:]
636 636 avgdata[:,i,:] = junk.sum(axis=1)
637 637 id_min += self.n
638 638 id_max += self.n
639 639
640 640 timeInterval = dataOut.ippSeconds*self.n
641 641 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
642 642 self.__dataReady = True
643 643 return avgdata, avgdatatime
644 644
645 645 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
646 646
647 647 if not self.isConfig:
648 648 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
649 649 self.isConfig = True
650 650
651 651 if dataOut.flagDataAsBlock:
652 652 """
653 653 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
654 654 """
655 655 avgdata, avgdatatime = self.integrateByBlock(dataOut)
656 656 dataOut.nProfiles /= self.n
657 657 else:
658 658 if stride is None:
659 659 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
660 660 else:
661 661 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
662 662
663 663
664 664 # dataOut.timeInterval *= n
665 665 dataOut.flagNoData = True
666 666
667 667 if self.__dataReady:
668 668 dataOut.data = avgdata
669 669 if not dataOut.flagCohInt:
670 670 dataOut.nCohInt *= self.n
671 671 dataOut.flagCohInt = True
672 672 dataOut.utctime = avgdatatime
673 673 # print avgdata, avgdatatime
674 674 # raise
675 675 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
676 676 dataOut.flagNoData = False
677 677 return dataOut
678 678
679 679 class Decoder(Operation):
680 680
681 681 isConfig = False
682 682 __profIndex = 0
683 683
684 684 code = None
685 685
686 686 nCode = None
687 687 nBaud = None
688 688
689 689 def __init__(self, **kwargs):
690 690
691 691 Operation.__init__(self, **kwargs)
692 692
693 693 self.times = None
694 694 self.osamp = None
695 695 # self.__setValues = False
696 696 self.isConfig = False
697 697 self.setupReq = False
698 698 def setup(self, code, osamp, dataOut):
699 699
700 700 self.__profIndex = 0
701 701
702 702 self.code = code
703 703
704 704 self.nCode = len(code)
705 705 self.nBaud = len(code[0])
706 706
707 707 if (osamp != None) and (osamp >1):
708 708 self.osamp = osamp
709 709 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
710 710 self.nBaud = self.nBaud*self.osamp
711 711
712 712 self.__nChannels = dataOut.nChannels
713 713 self.__nProfiles = dataOut.nProfiles
714 714 self.__nHeis = dataOut.nHeights
715 715
716 716 if self.__nHeis < self.nBaud:
717 717 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
718 718
719 719 #Frequency
720 720 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
721 721
722 722 __codeBuffer[:,0:self.nBaud] = self.code
723 723
724 724 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
725 725
726 726 if dataOut.flagDataAsBlock:
727 727
728 728 self.ndatadec = self.__nHeis #- self.nBaud + 1
729 729
730 730 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
731 731
732 732 else:
733 733
734 734 #Time
735 735 self.ndatadec = self.__nHeis #- self.nBaud + 1
736 736
737 737 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
738 738
739 739 def __convolutionInFreq(self, data):
740 740
741 741 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
742 742
743 743 fft_data = numpy.fft.fft(data, axis=1)
744 744
745 745 conv = fft_data*fft_code
746 746
747 747 data = numpy.fft.ifft(conv,axis=1)
748 748
749 749 return data
750 750
751 751 def __convolutionInFreqOpt(self, data):
752 752
753 753 raise NotImplementedError
754 754
755 755 def __convolutionInTime(self, data):
756 756
757 757 code = self.code[self.__profIndex]
758 758 for i in range(self.__nChannels):
759 759 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
760 760
761 761 return self.datadecTime
762 762
763 763 def __convolutionByBlockInTime(self, data):
764 764
765 765 repetitions = int(self.__nProfiles / self.nCode)
766 766 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
767 767 junk = junk.flatten()
768 768 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
769 769 profilesList = range(self.__nProfiles)
770 770
771 771 for i in range(self.__nChannels):
772 772 for j in profilesList:
773 773 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
774 774 return self.datadecTime
775 775
776 776 def __convolutionByBlockInFreq(self, data):
777 777
778 778 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
779 779
780 780
781 781 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
782 782
783 783 fft_data = numpy.fft.fft(data, axis=2)
784 784
785 785 conv = fft_data*fft_code
786 786
787 787 data = numpy.fft.ifft(conv,axis=2)
788 788
789 789 return data
790 790
791 791
792 792 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
793 793
794 794 if dataOut.flagDecodeData:
795 795 print("This data is already decoded, recoding again ...")
796 796
797 797 if not self.isConfig:
798 798
799 799 if code is None:
800 800 if dataOut.code is None:
801 801 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
802 802
803 803 code = dataOut.code
804 804 else:
805 805 code = numpy.array(code).reshape(nCode,nBaud)
806 806 self.setup(code, osamp, dataOut)
807 807
808 808 self.isConfig = True
809 809
810 810 if mode == 3:
811 811 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
812 812
813 813 if times != None:
814 814 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
815 815
816 816 if self.code is None:
817 817 print("Fail decoding: Code is not defined.")
818 818 return
819 819
820 820 self.__nProfiles = dataOut.nProfiles
821 821 datadec = None
822 822
823 823 if mode == 3:
824 824 mode = 0
825 825
826 826 if dataOut.flagDataAsBlock:
827 827 """
828 828 Decoding when data have been read as block,
829 829 """
830 830
831 831 if mode == 0:
832 832 datadec = self.__convolutionByBlockInTime(dataOut.data)
833 833 if mode == 1:
834 834 datadec = self.__convolutionByBlockInFreq(dataOut.data)
835 835 else:
836 836 """
837 837 Decoding when data have been read profile by profile
838 838 """
839 839 if mode == 0:
840 840 datadec = self.__convolutionInTime(dataOut.data)
841 841
842 842 if mode == 1:
843 843 datadec = self.__convolutionInFreq(dataOut.data)
844 844
845 845 if mode == 2:
846 846 datadec = self.__convolutionInFreqOpt(dataOut.data)
847 847
848 848 if datadec is None:
849 849 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
850 850
851 851 dataOut.code = self.code
852 852 dataOut.nCode = self.nCode
853 853 dataOut.nBaud = self.nBaud
854 854
855 855 dataOut.data = datadec
856 856
857 857 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
858 858
859 859 dataOut.flagDecodeData = True #asumo q la data esta decodificada
860 860
861 861 if self.__profIndex == self.nCode-1:
862 862 self.__profIndex = 0
863 863 return dataOut
864 864
865 865 self.__profIndex += 1
866 866
867 867 return dataOut
868 868 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
869 869
870 870
871 871 class ProfileConcat(Operation):
872 872
873 873 isConfig = False
874 874 buffer = None
875 875
876 876 def __init__(self, **kwargs):
877 877
878 878 Operation.__init__(self, **kwargs)
879 879 self.profileIndex = 0
880 880
881 881 def reset(self):
882 882 self.buffer = numpy.zeros_like(self.buffer)
883 883 self.start_index = 0
884 884 self.times = 1
885 885
886 886 def setup(self, data, m, n=1):
887 887 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
888 888 self.nHeights = data.shape[1]#.nHeights
889 889 self.start_index = 0
890 890 self.times = 1
891 891
892 892 def concat(self, data):
893 893
894 894 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
895 895 self.start_index = self.start_index + self.nHeights
896 896
897 897 def run(self, dataOut, m):
898 898 dataOut.flagNoData = True
899 899
900 900 if not self.isConfig:
901 901 self.setup(dataOut.data, m, 1)
902 902 self.isConfig = True
903 903
904 904 if dataOut.flagDataAsBlock:
905 905 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
906 906
907 907 else:
908 908 self.concat(dataOut.data)
909 909 self.times += 1
910 910 if self.times > m:
911 911 dataOut.data = self.buffer
912 912 self.reset()
913 913 dataOut.flagNoData = False
914 914 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
915 915 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
916 916 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
917 917 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
918 918 dataOut.ippSeconds *= m
919 919 return dataOut
920 920
921 921 class ProfileSelector(Operation):
922 922
923 923 profileIndex = None
924 924 # Tamanho total de los perfiles
925 925 nProfiles = None
926 926
927 927 def __init__(self, **kwargs):
928 928
929 929 Operation.__init__(self, **kwargs)
930 930 self.profileIndex = 0
931 931
932 932 def incProfileIndex(self):
933 933
934 934 self.profileIndex += 1
935 935
936 936 if self.profileIndex >= self.nProfiles:
937 937 self.profileIndex = 0
938 938
939 939 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
940 940
941 941 if profileIndex < minIndex:
942 942 return False
943 943
944 944 if profileIndex > maxIndex:
945 945 return False
946 946
947 947 return True
948 948
949 949 def isThisProfileInList(self, profileIndex, profileList):
950 950
951 951 if profileIndex not in profileList:
952 952 return False
953 953
954 954 return True
955 955
956 956 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
957 957
958 958 """
959 959 ProfileSelector:
960 960
961 961 Inputs:
962 962 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
963 963
964 964 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
965 965
966 966 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
967 967
968 968 """
969 969
970 970 if rangeList is not None:
971 971 if type(rangeList[0]) not in (tuple, list):
972 972 rangeList = [rangeList]
973 973
974 974 dataOut.flagNoData = True
975 975
976 976 if dataOut.flagDataAsBlock:
977 977 """
978 978 data dimension = [nChannels, nProfiles, nHeis]
979 979 """
980 980 if profileList != None:
981 981 dataOut.data = dataOut.data[:,profileList,:]
982 982
983 983 if profileRangeList != None:
984 984 minIndex = profileRangeList[0]
985 985 maxIndex = profileRangeList[1]
986 986 profileList = list(range(minIndex, maxIndex+1))
987 987
988 988 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
989 989
990 990 if rangeList != None:
991 991
992 992 profileList = []
993 993
994 994 for thisRange in rangeList:
995 995 minIndex = thisRange[0]
996 996 maxIndex = thisRange[1]
997 997
998 998 profileList.extend(list(range(minIndex, maxIndex+1)))
999 999
1000 1000 dataOut.data = dataOut.data[:,profileList,:]
1001 1001
1002 1002 dataOut.nProfiles = len(profileList)
1003 1003 dataOut.profileIndex = dataOut.nProfiles - 1
1004 1004 dataOut.flagNoData = False
1005 1005
1006 1006 return dataOut
1007 1007
1008 1008 """
1009 1009 data dimension = [nChannels, nHeis]
1010 1010 """
1011 1011
1012 1012 if profileList != None:
1013 1013
1014 1014 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1015 1015
1016 1016 self.nProfiles = len(profileList)
1017 1017 dataOut.nProfiles = self.nProfiles
1018 1018 dataOut.profileIndex = self.profileIndex
1019 1019 dataOut.flagNoData = False
1020 1020
1021 1021 self.incProfileIndex()
1022 1022 return dataOut
1023 1023
1024 1024 if profileRangeList != None:
1025 1025
1026 1026 minIndex = profileRangeList[0]
1027 1027 maxIndex = profileRangeList[1]
1028 1028
1029 1029 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1030 1030
1031 1031 self.nProfiles = maxIndex - minIndex + 1
1032 1032 dataOut.nProfiles = self.nProfiles
1033 1033 dataOut.profileIndex = self.profileIndex
1034 1034 dataOut.flagNoData = False
1035 1035
1036 1036 self.incProfileIndex()
1037 1037 return dataOut
1038 1038
1039 1039 if rangeList != None:
1040 1040
1041 1041 nProfiles = 0
1042 1042
1043 1043 for thisRange in rangeList:
1044 1044 minIndex = thisRange[0]
1045 1045 maxIndex = thisRange[1]
1046 1046
1047 1047 nProfiles += maxIndex - minIndex + 1
1048 1048
1049 1049 for thisRange in rangeList:
1050 1050
1051 1051 minIndex = thisRange[0]
1052 1052 maxIndex = thisRange[1]
1053 1053
1054 1054 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1055 1055
1056 1056 self.nProfiles = nProfiles
1057 1057 dataOut.nProfiles = self.nProfiles
1058 1058 dataOut.profileIndex = self.profileIndex
1059 1059 dataOut.flagNoData = False
1060 1060
1061 1061 self.incProfileIndex()
1062 1062
1063 1063 break
1064 1064
1065 1065 return dataOut
1066 1066
1067 1067
1068 1068 if beam != None: #beam is only for AMISR data
1069 1069 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1070 1070 dataOut.flagNoData = False
1071 1071 dataOut.profileIndex = self.profileIndex
1072 1072
1073 1073 self.incProfileIndex()
1074 1074
1075 1075 return dataOut
1076 1076
1077 1077 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1078 1078
1079 1079
1080 1080 class Reshaper(Operation):
1081 1081
1082 1082 def __init__(self, **kwargs):
1083 1083
1084 1084 Operation.__init__(self, **kwargs)
1085 1085
1086 1086 self.__buffer = None
1087 1087 self.__nitems = 0
1088 1088
1089 1089 def __appendProfile(self, dataOut, nTxs):
1090 1090
1091 1091 if self.__buffer is None:
1092 1092 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1093 1093 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1094 1094
1095 1095 ini = dataOut.nHeights * self.__nitems
1096 1096 end = ini + dataOut.nHeights
1097 1097
1098 1098 self.__buffer[:, ini:end] = dataOut.data
1099 1099
1100 1100 self.__nitems += 1
1101 1101
1102 1102 return int(self.__nitems*nTxs)
1103 1103
1104 1104 def __getBuffer(self):
1105 1105
1106 1106 if self.__nitems == int(1./self.__nTxs):
1107 1107
1108 1108 self.__nitems = 0
1109 1109
1110 1110 return self.__buffer.copy()
1111 1111
1112 1112 return None
1113 1113
1114 1114 def __checkInputs(self, dataOut, shape, nTxs):
1115 1115
1116 1116 if shape is None and nTxs is None:
1117 1117 raise ValueError("Reshaper: shape of factor should be defined")
1118 1118
1119 1119 if nTxs:
1120 1120 if nTxs < 0:
1121 1121 raise ValueError("nTxs should be greater than 0")
1122 1122
1123 1123 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1124 1124 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1125 1125
1126 1126 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1127 1127
1128 1128 return shape, nTxs
1129 1129
1130 1130 if len(shape) != 2 and len(shape) != 3:
1131 1131 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1132 1132
1133 1133 if len(shape) == 2:
1134 1134 shape_tuple = [dataOut.nChannels]
1135 1135 shape_tuple.extend(shape)
1136 1136 else:
1137 1137 shape_tuple = list(shape)
1138 1138
1139 1139 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1140 1140
1141 1141 return shape_tuple, nTxs
1142 1142
1143 1143 def run(self, dataOut, shape=None, nTxs=None):
1144 1144
1145 1145 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1146 1146
1147 1147 dataOut.flagNoData = True
1148 1148 profileIndex = None
1149 1149
1150 1150 if dataOut.flagDataAsBlock:
1151 1151
1152 1152 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1153 1153 dataOut.flagNoData = False
1154 1154
1155 1155 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1156 1156
1157 1157 else:
1158 1158
1159 1159 if self.__nTxs < 1:
1160 1160
1161 1161 self.__appendProfile(dataOut, self.__nTxs)
1162 1162 new_data = self.__getBuffer()
1163 1163
1164 1164 if new_data is not None:
1165 1165 dataOut.data = new_data
1166 1166 dataOut.flagNoData = False
1167 1167
1168 1168 profileIndex = dataOut.profileIndex*nTxs
1169 1169
1170 1170 else:
1171 1171 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1172 1172
1173 1173 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1174 1174
1175 1175 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1176 1176
1177 1177 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1178 1178
1179 1179 dataOut.profileIndex = profileIndex
1180 1180
1181 1181 dataOut.ippSeconds /= self.__nTxs
1182 1182
1183 1183 return dataOut
1184 1184
1185 1185 class SplitProfiles(Operation):
1186 1186
1187 1187 def __init__(self, **kwargs):
1188 1188
1189 1189 Operation.__init__(self, **kwargs)
1190 1190
1191 1191 def run(self, dataOut, n):
1192 1192
1193 1193 dataOut.flagNoData = True
1194 1194 profileIndex = None
1195 1195
1196 1196 if dataOut.flagDataAsBlock:
1197 1197
1198 1198 #nchannels, nprofiles, nsamples
1199 1199 shape = dataOut.data.shape
1200 1200
1201 1201 if shape[2] % n != 0:
1202 1202 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1203 1203
1204 1204 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1205 1205
1206 1206 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1207 1207 dataOut.flagNoData = False
1208 1208
1209 1209 profileIndex = int(dataOut.nProfiles/n) - 1
1210 1210
1211 1211 else:
1212 1212
1213 1213 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1214 1214
1215 1215 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1216 1216
1217 1217 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1218 1218
1219 1219 dataOut.nProfiles = int(dataOut.nProfiles*n)
1220 1220
1221 1221 dataOut.profileIndex = profileIndex
1222 1222
1223 1223 dataOut.ippSeconds /= n
1224 1224
1225 1225 return dataOut
1226 1226
1227 1227 class CombineProfiles(Operation):
1228 1228 def __init__(self, **kwargs):
1229 1229
1230 1230 Operation.__init__(self, **kwargs)
1231 1231
1232 1232 self.__remData = None
1233 1233 self.__profileIndex = 0
1234 1234
1235 1235 def run(self, dataOut, n):
1236 1236
1237 1237 dataOut.flagNoData = True
1238 1238 profileIndex = None
1239 1239
1240 1240 if dataOut.flagDataAsBlock:
1241 1241
1242 1242 #nchannels, nprofiles, nsamples
1243 1243 shape = dataOut.data.shape
1244 1244 new_shape = shape[0], shape[1]/n, shape[2]*n
1245 1245
1246 1246 if shape[1] % n != 0:
1247 1247 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1248 1248
1249 1249 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1250 1250 dataOut.flagNoData = False
1251 1251
1252 1252 profileIndex = int(dataOut.nProfiles*n) - 1
1253 1253
1254 1254 else:
1255 1255
1256 1256 #nchannels, nsamples
1257 1257 if self.__remData is None:
1258 1258 newData = dataOut.data
1259 1259 else:
1260 1260 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1261 1261
1262 1262 self.__profileIndex += 1
1263 1263
1264 1264 if self.__profileIndex < n:
1265 1265 self.__remData = newData
1266 1266 #continue
1267 1267 return
1268 1268
1269 1269 self.__profileIndex = 0
1270 1270 self.__remData = None
1271 1271
1272 1272 dataOut.data = newData
1273 1273 dataOut.flagNoData = False
1274 1274
1275 1275 profileIndex = dataOut.profileIndex/n
1276 1276
1277 1277
1278 1278 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1279 1279
1280 1280 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1281 1281
1282 1282 dataOut.nProfiles = int(dataOut.nProfiles/n)
1283 1283
1284 1284 dataOut.profileIndex = profileIndex
1285 1285
1286 1286 dataOut.ippSeconds *= n
1287 1287
1288 1288 return dataOut
1289 1289
1290 class PulsePairVoltage(Operation):
1290 class PulsePair(Operation):
1291 1291 '''
1292 1292 Function PulsePair(Signal Power, Velocity)
1293 1293 The real component of Lag[0] provides Intensity Information
1294 1294 The imag component of Lag[1] Phase provides Velocity Information
1295 1295
1296 1296 Configuration Parameters:
1297 1297 nPRF = Number of Several PRF
1298 1298 theta = Degree Azimuth angel Boundaries
1299 1299
1300 1300 Input:
1301 1301 self.dataOut
1302 1302 lag[N]
1303 1303 Affected:
1304 1304 self.dataOut.spc
1305 1305 '''
1306 1306 isConfig = False
1307 1307 __profIndex = 0
1308 1308 __initime = None
1309 1309 __lastdatatime = None
1310 1310 __buffer = None
1311 1311 noise = None
1312 1312 __dataReady = False
1313 1313 n = None
1314 1314 __nch = 0
1315 1315 __nHeis = 0
1316 1316 removeDC = False
1317 1317 ipp = None
1318 1318 lambda_ = 0
1319 1319
1320 1320 def __init__(self,**kwargs):
1321 1321 Operation.__init__(self,**kwargs)
1322 1322
1323 1323 def setup(self, dataOut, n = None, removeDC=False):
1324 1324 '''
1325 1325 n= Numero de PRF's de entrada
1326 1326 '''
1327 print("[INICIO]-setup del METODO PULSE PAIR")
1327 1328 self.__initime = None
1328 1329 self.__lastdatatime = 0
1329 1330 self.__dataReady = False
1330 1331 self.__buffer = 0
1331 1332 self.__profIndex = 0
1332 1333 self.noise = None
1333 1334 self.__nch = dataOut.nChannels
1334 1335 self.__nHeis = dataOut.nHeights
1335 1336 self.removeDC = removeDC
1336 1337 self.lambda_ = 3.0e8/(9345.0e6)
1337 1338 self.ippSec = dataOut.ippSeconds
1338 1339 self.nCohInt = dataOut.nCohInt
1339 1340 print("IPPseconds",dataOut.ippSeconds)
1340 1341
1341 1342 print("ELVALOR DE n es:", n)
1342 1343 if n == None:
1343 1344 raise ValueError("n should be specified.")
1344 1345
1345 1346 if n != None:
1346 1347 if n<2:
1347 1348 raise ValueError("n should be greater than 2")
1348 1349
1349 1350 self.n = n
1350 1351 self.__nProf = n
1351 1352
1352 1353 self.__buffer = numpy.zeros((dataOut.nChannels,
1353 1354 n,
1354 1355 dataOut.nHeights),
1355 1356 dtype='complex')
1356 1357
1357 1358 def putData(self,data):
1358 1359 '''
1359 1360 Add a profile to he __buffer and increase in one the __profiel Index
1360 1361 '''
1361 1362 self.__buffer[:,self.__profIndex,:]= data
1362 1363 self.__profIndex += 1
1363 1364 return
1364 1365
1365 1366 def pushData(self,dataOut):
1366 1367 '''
1367 1368 Return the PULSEPAIR and the profiles used in the operation
1368 1369 Affected : self.__profileIndex
1369 1370 '''
1370 1371 #----------------- Remove DC-----------------------------------
1371 1372 if self.removeDC==True:
1372 1373 mean = numpy.mean(self.__buffer,1)
1373 1374 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1374 1375 dc= numpy.tile(tmp,[1,self.__nProf,1])
1375 1376 self.__buffer = self.__buffer - dc
1376 1377 #------------------Calculo de Potencia ------------------------
1377 1378 pair0 = self.__buffer*numpy.conj(self.__buffer)
1378 1379 pair0 = pair0.real
1379 1380 lag_0 = numpy.sum(pair0,1)
1380 1381 #------------------Calculo de Ruido x canal--------------------
1381 1382 self.noise = numpy.zeros(self.__nch)
1382 1383 for i in range(self.__nch):
1383 1384 daux = numpy.sort(pair0[i,:,:],axis= None)
1384 1385 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1385 1386
1386 1387 self.noise = self.noise.reshape(self.__nch,1)
1387 1388 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1388 1389 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1389 1390 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1390 1391 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1391 1392 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1392 1393 #-------------------- Power --------------------------------------------------
1393 1394 data_power = lag_0/(self.n*self.nCohInt)
1394 1395 #------------------ Senal ---------------------------------------------------
1395 1396 data_intensity = pair0 - noise_buffer
1396 1397 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1397 1398 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1398 1399 for i in range(self.__nch):
1399 1400 for j in range(self.__nHeis):
1400 1401 if data_intensity[i][j] < 0:
1401 1402 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1402 1403
1403 1404 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1404 1405 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1405 1406 lag_1 = numpy.sum(pair1,1)
1406 1407 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1407 1408 data_velocity = (self.lambda_/2.0)*data_freq
1408 1409
1409 1410 #---------------- Potencia promedio estimada de la Senal-----------
1410 1411 lag_0 = lag_0/self.n
1411 1412 S = lag_0-self.noise
1412 1413
1413 1414 #---------------- Frecuencia Doppler promedio ---------------------
1414 1415 lag_1 = lag_1/(self.n-1)
1415 1416 R1 = numpy.abs(lag_1)
1416 1417
1417 1418 #---------------- Calculo del SNR----------------------------------
1418 1419 data_snrPP = S/self.noise
1419 1420 for i in range(self.__nch):
1420 1421 for j in range(self.__nHeis):
1421 1422 if data_snrPP[i][j] < 1.e-20:
1422 1423 data_snrPP[i][j] = 1.e-20
1423 1424
1424 1425 #----------------- Calculo del ancho espectral ----------------------
1425 1426 L = S/R1
1426 1427 L = numpy.where(L<0,1,L)
1427 1428 L = numpy.log(L)
1428 1429 tmp = numpy.sqrt(numpy.absolute(L))
1429 1430 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1430 1431 n = self.__profIndex
1431 1432
1432 1433 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1433 1434 self.__profIndex = 0
1434 1435 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1435 1436
1436 1437
1437 1438 def pulsePairbyProfiles(self,dataOut):
1438 1439
1439 1440 self.__dataReady = False
1440 1441 data_power = None
1441 1442 data_intensity = None
1442 1443 data_velocity = None
1443 1444 data_specwidth = None
1444 1445 data_snrPP = None
1445 1446 self.putData(data=dataOut.data)
1446 1447 if self.__profIndex == self.n:
1447 1448 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1448 1449 self.__dataReady = True
1449 1450
1450 1451 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1451 1452
1452 1453
1453 1454 def pulsePairOp(self, dataOut, datatime= None):
1454 1455
1455 1456 if self.__initime == None:
1456 1457 self.__initime = datatime
1457 1458 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1458 1459 self.__lastdatatime = datatime
1459 1460
1460 1461 if data_power is None:
1461 1462 return None, None, None,None,None,None
1462 1463
1463 1464 avgdatatime = self.__initime
1464 1465 deltatime = datatime - self.__lastdatatime
1465 1466 self.__initime = datatime
1466 1467
1467 1468 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1468 1469
1469 1470 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1470 1471
1471 1472 if not self.isConfig:
1472 1473 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1473 1474 self.isConfig = True
1474 1475 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1475 1476 dataOut.flagNoData = True
1476 1477
1477 1478 if self.__dataReady:
1478 1479 dataOut.nCohInt *= self.n
1479 1480 dataOut.dataPP_POW = data_intensity # S
1480 1481 dataOut.dataPP_POWER = data_power # P
1481 1482 dataOut.dataPP_DOP = data_velocity
1482 1483 dataOut.dataPP_SNR = data_snrPP
1483 1484 dataOut.dataPP_WIDTH = data_specwidth
1484 1485 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1486 dataOut.nProfiles = int(dataOut.nProfiles/n)
1485 1487 dataOut.utctime = avgdatatime
1486 1488 dataOut.flagNoData = False
1487 1489 return dataOut
1488 1490
1489 1491
1490 1492
1491 1493 # import collections
1492 1494 # from scipy.stats import mode
1493 1495 #
1494 1496 # class Synchronize(Operation):
1495 1497 #
1496 1498 # isConfig = False
1497 1499 # __profIndex = 0
1498 1500 #
1499 1501 # def __init__(self, **kwargs):
1500 1502 #
1501 1503 # Operation.__init__(self, **kwargs)
1502 1504 # # self.isConfig = False
1503 1505 # self.__powBuffer = None
1504 1506 # self.__startIndex = 0
1505 1507 # self.__pulseFound = False
1506 1508 #
1507 1509 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1508 1510 #
1509 1511 # #Read data
1510 1512 #
1511 1513 # powerdB = dataOut.getPower(channel = channel)
1512 1514 # noisedB = dataOut.getNoise(channel = channel)[0]
1513 1515 #
1514 1516 # self.__powBuffer.extend(powerdB.flatten())
1515 1517 #
1516 1518 # dataArray = numpy.array(self.__powBuffer)
1517 1519 #
1518 1520 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1519 1521 #
1520 1522 # maxValue = numpy.nanmax(filteredPower)
1521 1523 #
1522 1524 # if maxValue < noisedB + 10:
1523 1525 # #No se encuentra ningun pulso de transmision
1524 1526 # return None
1525 1527 #
1526 1528 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1527 1529 #
1528 1530 # if len(maxValuesIndex) < 2:
1529 1531 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1530 1532 # return None
1531 1533 #
1532 1534 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1533 1535 #
1534 1536 # #Seleccionar solo valores con un espaciamiento de nSamples
1535 1537 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1536 1538 #
1537 1539 # if len(pulseIndex) < 2:
1538 1540 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1539 1541 # return None
1540 1542 #
1541 1543 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1542 1544 #
1543 1545 # #remover senales que se distancien menos de 10 unidades o muestras
1544 1546 # #(No deberian existir IPP menor a 10 unidades)
1545 1547 #
1546 1548 # realIndex = numpy.where(spacing > 10 )[0]
1547 1549 #
1548 1550 # if len(realIndex) < 2:
1549 1551 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1550 1552 # return None
1551 1553 #
1552 1554 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1553 1555 # realPulseIndex = pulseIndex[realIndex]
1554 1556 #
1555 1557 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1556 1558 #
1557 1559 # print "IPP = %d samples" %period
1558 1560 #
1559 1561 # self.__newNSamples = dataOut.nHeights #int(period)
1560 1562 # self.__startIndex = int(realPulseIndex[0])
1561 1563 #
1562 1564 # return 1
1563 1565 #
1564 1566 #
1565 1567 # def setup(self, nSamples, nChannels, buffer_size = 4):
1566 1568 #
1567 1569 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1568 1570 # maxlen = buffer_size*nSamples)
1569 1571 #
1570 1572 # bufferList = []
1571 1573 #
1572 1574 # for i in range(nChannels):
1573 1575 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1574 1576 # maxlen = buffer_size*nSamples)
1575 1577 #
1576 1578 # bufferList.append(bufferByChannel)
1577 1579 #
1578 1580 # self.__nSamples = nSamples
1579 1581 # self.__nChannels = nChannels
1580 1582 # self.__bufferList = bufferList
1581 1583 #
1582 1584 # def run(self, dataOut, channel = 0):
1583 1585 #
1584 1586 # if not self.isConfig:
1585 1587 # nSamples = dataOut.nHeights
1586 1588 # nChannels = dataOut.nChannels
1587 1589 # self.setup(nSamples, nChannels)
1588 1590 # self.isConfig = True
1589 1591 #
1590 1592 # #Append new data to internal buffer
1591 1593 # for thisChannel in range(self.__nChannels):
1592 1594 # bufferByChannel = self.__bufferList[thisChannel]
1593 1595 # bufferByChannel.extend(dataOut.data[thisChannel])
1594 1596 #
1595 1597 # if self.__pulseFound:
1596 1598 # self.__startIndex -= self.__nSamples
1597 1599 #
1598 1600 # #Finding Tx Pulse
1599 1601 # if not self.__pulseFound:
1600 1602 # indexFound = self.__findTxPulse(dataOut, channel)
1601 1603 #
1602 1604 # if indexFound == None:
1603 1605 # dataOut.flagNoData = True
1604 1606 # return
1605 1607 #
1606 1608 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1607 1609 # self.__pulseFound = True
1608 1610 # self.__startIndex = indexFound
1609 1611 #
1610 1612 # #If pulse was found ...
1611 1613 # for thisChannel in range(self.__nChannels):
1612 1614 # bufferByChannel = self.__bufferList[thisChannel]
1613 1615 # #print self.__startIndex
1614 1616 # x = numpy.array(bufferByChannel)
1615 1617 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1616 1618 #
1617 1619 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1618 1620 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1619 1621 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1620 1622 #
1621 1623 # dataOut.data = self.__arrayBuffer
1622 1624 #
1623 1625 # self.__startIndex += self.__newNSamples
1624 1626 #
1625 1627 # return
@@ -1,183 +1,248
1 1 #!python
2 2 '''
3 3 '''
4 4
5 5 import os, sys
6 6 import datetime
7 7 import time
8 8
9 9 #path = os.path.dirname(os.getcwd())
10 10 #path = os.path.dirname(path)
11 11 #sys.path.insert(0, path)
12 12
13 13 from schainpy.controller import Project
14 14
15 15 desc = "USRP_test"
16 16 filename = "USRP_processing.xml"
17 17 controllerObj = Project()
18 18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
19 19
20 20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21 21
22 22 #######################################################################
23 23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 24 #######################################################################
25 25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26
27
28 path = '/home/soporte/data_hdf5' #### with clock 35.16 db noise
29
30 figpath = '/home/soporte/data_hdf5_imag'
26 #path = '/DATA_RM/TEST_INTEGRACION'
27 path = '/DATA_RM/TEST_ONLINE'
28 figpath = '/home/soporte/Pictures/TEST_INTEGRACION_IMG'
31 29 #remotefolder = "/home/wmaster/graficos"
32 30 #######################################################################
33 31 ################# RANGO DE PLOTEO######################################
34 32 #######################################################################
35 dBmin = '30'
36 dBmax = '60'
33 dBmin = '-5'
34 dBmax = '20'
37 35 xmin = '0'
38 36 xmax ='24'
39 37 ymin = '0'
40 38 ymax = '600'
41 39 #######################################################################
42 40 ########################FECHA##########################################
43 41 #######################################################################
44 42 str = datetime.date.today()
45 43 today = str.strftime("%Y/%m/%d")
46 44 str2 = str - datetime.timedelta(days=1)
47 45 yesterday = str2.strftime("%Y/%m/%d")
48 46 #######################################################################
49 47 ######################## UNIDAD DE LECTURA#############################
50 48 #######################################################################
51 49 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
52 50 path=path,
53 startDate="2019/01/01",#today,
54 endDate="2109/12/30",#today,
51 startDate="2021/01/01",#today,
52 endDate="2021/12/30",#today,
55 53 startTime='00:00:00',
56 54 endTime='23:59:59',
57 55 delay=0,
58 56 #set=0,
59 57 online=0,
60 58 walk=1,
61 ippKm = 1000)
59 ippKm = 60)
62 60
63 61 opObj11 = readUnitConfObj.addOperation(name='printInfo')
64 opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
62 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
65 63 #######################################################################
66 64 ################ OPERACIONES DOMINIO DEL TIEMPO########################
67 65 #######################################################################
68 66
69 67 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
70 68 #
71 69 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
72 70 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
73 71
74 72 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
75 #opObj11.addParameter(name='frequency', value='30e6', format='float')
73 #opObj11.addParameter(name='frequency', value='70312500')
74
75 '''
76 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
77 opObj11.addParameter(name='n', value='625', format='int')#10
78 opObj11.addParameter(name='removeDC', value=1, format='int')
79 '''
76 80
77 #opObj10 = procUnitConfObjA.addOperation(name='Scope', optype='external')
81 # Ploteo TEST
82 '''
83 opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
84 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
85 opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
86 #opObj11.addParameter(name='xmax', value=8)
87 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
88 '''
89 # OJO SCOPE
90 #opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
78 91 #opObj10.addParameter(name='id', value='10', format='int')
79 92 ##opObj10.addParameter(name='xmin', value='0', format='int')
80 93 ##opObj10.addParameter(name='xmax', value='50', format='int')
81 94 #opObj10.addParameter(name='type', value='iq')
82 #opObj10.addParameter(name='ymin', value='-5000', format='int')
95 ##opObj10.addParameter(name='ymin', value='-5000', format='int')
83 96 ##opObj10.addParameter(name='ymax', value='8500', format='int')
97 #opObj11.addParameter(name='save', value=figpath, format='str')
98 #opObj11.addParameter(name='save_period', value=10, format='int')
84 99
85 100 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
86 101 #opObj10.addParameter(name='h0', value='-5000', format='float')
87 102
88 103 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
89 104 #opObj11.addParameter(name='window', value='1', format='int')
90 105
91 106 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
92 107 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
93 108 #opObj11.addParameter(name='code', value=codigo, format='floatlist')
94 109 #opObj11.addParameter(name='nCode', value='1', format='int')
95 110 #opObj11.addParameter(name='nBaud', value='28', format='int')
96 111
97 112 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
98 113 #opObj11.addParameter(name='n', value='100', format='int')
99 114
100 115 #######################################################################
116 ########## OPERACIONES ParametersProc########################
117 #######################################################################
118 ###procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
119 '''
120
121 opObj11 = procUnitConfObjA.addOperation(name='PedestalInformation')
122 opObj11.addParameter(name='path_ped', value=path_ped)
123 opObj11.addParameter(name='path_adq', value=path_adq)
124 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
125 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
126 opObj11.addParameter(name='blocksPerfile', value='100', format='int')
127 opObj11.addParameter(name='f_a_p', value='25', format='int')
128 opObj11.addParameter(name='online', value='0', format='int')
129
130 opObj11 = procUnitConfObjA.addOperation(name='Block360')
131 opObj11.addParameter(name='n', value='40', format='int')
132
133 opObj11= procUnitConfObjA.addOperation(name='WeatherPlot',optype='other')
134 opObj11.addParameter(name='save', value=figpath)
135 opObj11.addParameter(name='save_period', value=1)
136
137
138 '''
139
140 #######################################################################
101 141 ########## OPERACIONES DOMINIO DE LA FRECUENCIA########################
102 142 #######################################################################
103 procUnitConfObjSousySpectra = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
104 procUnitConfObjSousySpectra.addParameter(name='nFFTPoints', value='100', format='int')
105 procUnitConfObjSousySpectra.addParameter(name='nProfiles', value='100', format='int')
106 #procUnitConfObjSousySpectra.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
107 143
108 #opObj13 = procUnitConfObjSousySpectra.addOperation(name='removeDC')
144 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
145 procUnitConfObjB.addParameter(name='nFFTPoints', value='32', format='int')
146 procUnitConfObjB.addParameter(name='nProfiles', value='32', format='int')
147
148 procUnitConfObjC = controllerObj.addProcUnit(datatype='SpectraHeisProc', inputId=procUnitConfObjA.getId())
149 #procUnitConfObjB.addParameter(name='nFFTPoints', value='64', format='int')
150 #procUnitConfObjB.addParameter(name='nProfiles', value='64', format='int')
151 opObj11 = procUnitConfObjC.addOperation(name='IncohInt4SpectraHeis', optype='other')
152 opObj11.addParameter(name='timeInterval', value='8', format='int')
153
154
155 #procUnitConfObjB.addParameter(name='pairsList', value='(0,0),(1,1),(0,1)', format='pairsList')
156
157 #opObj13 = procUnitConfObjB.addOperation(name='removeDC')
109 158 #opObj13.addParameter(name='mode', value='2', format='int')
110 159
111 #opObj11 = procUnitConfObjSousySpectra.addOperation(name='IncohInt', optype='other')
112 #opObj11.addParameter(name='n', value='60', format='float')
160 opObj11 = procUnitConfObjB.addOperation(name='IncohInt', optype='other')
161 opObj11.addParameter(name='n', value='8', format='float')
113 162 #######################################################################
114 163 ########## PLOTEO DOMINIO DE LA FRECUENCIA#############################
115 164 #######################################################################
165 #----
166
167 opObj11 = procUnitConfObjC.addOperation(name='SpectraHeisPlot')
168 opObj11.addParameter(name='id', value='10', format='int')
169 opObj11.addParameter(name='wintitle', value='Spectra_Alturas', format='str')
170 #opObj11.addParameter(name='xmin', value=-100000, format='float')
171 #opObj11.addParameter(name='xmax', value=100000, format='float')
172 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
173 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
174 opObj11.addParameter(name='ymin', value=-20, format='int')
175 opObj11.addParameter(name='ymax', value=50, format='int')
176 opObj11.addParameter(name='showprofile', value='1', format='int')
177 opObj11.addParameter(name='save', value=figpath, format='str')
178 opObj11.addParameter(name='save_period', value=10, format='int')
179
180
116 181 #SpectraPlot
117 182
118 opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraPlot', optype='external')
183 opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
119 184 opObj11.addParameter(name='id', value='1', format='int')
120 185 opObj11.addParameter(name='wintitle', value='Spectra', format='str')
121 186 #opObj11.addParameter(name='xmin', value=-0.01, format='float')
122 187 #opObj11.addParameter(name='xmax', value=0.01, format='float')
123 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
124 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
188 opObj11.addParameter(name='zmin', value=dBmin, format='int')
189 opObj11.addParameter(name='zmax', value=dBmax, format='int')
125 190 #opObj11.addParameter(name='ymin', value=ymin, format='int')
126 191 #opObj11.addParameter(name='ymax', value=ymax, format='int')
127 192 opObj11.addParameter(name='showprofile', value='1', format='int')
128 193 opObj11.addParameter(name='save', value=figpath, format='str')
129 194 opObj11.addParameter(name='save_period', value=10, format='int')
130 195
131
132 196 #RTIPLOT
133 197
134 opObj11 = procUnitConfObjSousySpectra.addOperation(name='RTIPlot', optype='external')
198 opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
135 199 opObj11.addParameter(name='id', value='2', format='int')
136 200 opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
137 #opObj11.addParameter(name='zmin', value=dBmin, format='int')
138 #opObj11.addParameter(name='zmax', value=dBmax, format='int')
201 opObj11.addParameter(name='zmin', value=dBmin, format='int')
202 opObj11.addParameter(name='zmax', value=dBmax, format='int')
139 203 #opObj11.addParameter(name='ymin', value=ymin, format='int')
140 204 #opObj11.addParameter(name='ymax', value=ymax, format='int')
141 opObj11.addParameter(name='xmin', value=0, format='int')
142 opObj11.addParameter(name='xmax', value=23, format='int')
205 #opObj11.addParameter(name='xmin', value=15, format='int')
206 #opObj11.addParameter(name='xmax', value=16, format='int')
143 207
144 208 opObj11.addParameter(name='showprofile', value='1', format='int')
145 209 opObj11.addParameter(name='save', value=figpath, format='str')
146 210 opObj11.addParameter(name='save_period', value=10, format='int')
147 211
148 212
149 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='CrossSpectraPlot', optype='other')
213 # opObj11 = procUnitConfObjB.addOperation(name='CrossSpectraPlot', optype='other')
150 214 # opObj11.addParameter(name='id', value='3', format='int')
151 215 # opObj11.addParameter(name='wintitle', value='CrossSpectraPlot', format='str')
152 216 # opObj11.addParameter(name='ymin', value=ymin, format='int')
153 217 # opObj11.addParameter(name='ymax', value=ymax, format='int')
154 218 # opObj11.addParameter(name='phase_cmap', value='jet', format='str')
155 219 # opObj11.addParameter(name='zmin', value=dBmin, format='int')
156 220 # opObj11.addParameter(name='zmax', value=dBmax, format='int')
157 221 # opObj11.addParameter(name='figpath', value=figures_path, format='str')
158 222 # opObj11.addParameter(name='save', value=0, format='bool')
159 223 # opObj11.addParameter(name='pairsList', value='(0,1)', format='pairsList')
160 224 # #
161 # opObj11 = procUnitConfObjSousySpectra.addOperation(name='CoherenceMap', optype='other')
225 # opObj11 = procUnitConfObjB.addOperation(name='CoherenceMap', optype='other')
162 226 # opObj11.addParameter(name='id', value='4', format='int')
163 227 # opObj11.addParameter(name='wintitle', value='Coherence', format='str')
164 228 # opObj11.addParameter(name='phase_cmap', value='jet', format='str')
165 229 # opObj11.addParameter(name='xmin', value=xmin, format='float')
166 230 # opObj11.addParameter(name='xmax', value=xmax, format='float')
167 231 # opObj11.addParameter(name='figpath', value=figures_path, format='str')
168 232 # opObj11.addParameter(name='save', value=0, format='bool')
169 233 # opObj11.addParameter(name='pairsList', value='(0,1)', format='pairsList')
170 234 #
235
236 '''
171 237 #######################################################################
172 238 ############### UNIDAD DE ESCRITURA ###################################
173 239 #######################################################################
174 #opObj11 = procUnitConfObjSousySpectra.addOperation(name='SpectraWriter', optype='other')
240 #opObj11 = procUnitConfObjB.addOperation(name='SpectraWriter', optype='other')
175 241 #opObj11.addParameter(name='path', value=wr_path)
176 242 #opObj11.addParameter(name='blocksPerFile', value='50', format='int')
177 243 print ("Escribiendo el archivo XML")
178 244 print ("Leyendo el archivo XML")
179
245 '''
180 246
181 247
182 248 controllerObj.start()
183
@@ -1,82 +1,93
1 1 import os, sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 '''
6 NOTA:
7 Este script de prueba.
8 - Unidad del lectura 'SimulatorReader'.
9 - Unidad de procesamiento VoltageProc
10 - Unidad de procesamiento SpectraProc (profileIndex no esta en metadata porque se queda en voltage.)
11 - Operacion removeDC.
12 - Unidad de procesamiento ParametersProc
13 - Operacion SpectralMoments
14 - Operacion SpectralMomentsPlot
15 - Unidad de escrituda 'HDFWriter'.
16 '''
5 17
6 18 desc = "USRP_test"
7 19 filename = "USRP_processing.xml"
8 20 controllerObj = Project()
9 21 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
10 22
11 23 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
12 24 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
13 path = '/home/alex/Downloads/test_rawdata'
14 figpath = '/home/alex/Downloads/hdf5_test'
25 path = '/home/soporte/Downloads/RAWDATA'
26 figpath = '/home/soporte/Downloads/IMAGE'
15 27 ######################## UNIDAD DE LECTURA#############################
16 28 '''
17 29 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
18 30 path=path,
19 31 startDate="2020/01/01", #"2020/01/01",#today,
20 32 endDate= "2020/12/01", #"2020/12/30",#today,
21 33 startTime='00:00:00',
22 34 endTime='23:59:59',
23 35 delay=0,
24 36 #set=0,
25 37 online=0,
26 38 walk=1)
27 39
28 40 '''
29 41 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
30 42 frequency=9.345e9,
31 43 FixRCP_IPP= 60,
32 44 Tau_0 = 30,
33 45 AcqH0_0=0,
34 46 samples=330,
35 47 AcqDH_0=0.15,
36 48 FixRCP_TXA=0.15,
37 49 FixRCP_TXB=0.15,
38 50 Fdoppler=600.0,
39 51 Hdoppler=36,
40 52 Adoppler=300,#300
41 53 delay=0,
42 54 online=0,
43 55 walk=0,
44 56 profilesPerBlock=625,
45 57 dataBlocksPerFile=100)
46 58 #nTotalReadFiles=2)
47 59
48 60
49 61 #opObj11 = readUnitConfObj.addOperation(name='printInfo')
50 62
51 63 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
52 64
53 65 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
54 66 procUnitConfObjB.addParameter(name='nFFTPoints', value=625, format='int')
55 67 procUnitConfObjB.addParameter(name='nProfiles', value=625, format='int')
56 68
57 69 opObj11 = procUnitConfObjB.addOperation(name='removeDC')
58 70 opObj11.addParameter(name='mode', value=2)
59 71 #opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot')
60 72 #opObj11 = procUnitConfObjB.addOperation(name='PowerProfilePlot')
61 73
62 74 procUnitConfObjC= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjB.getId())
63 75 procUnitConfObjC.addOperation(name='SpectralMoments')
64 76 #opObj11 = procUnitConfObjC.addOperation(name='PowerPlot')
65 77
66 '''
78
67 79 opObj11 = procUnitConfObjC.addOperation(name='SpectralMomentsPlot')
68 80 #opObj11.addParameter(name='xmin', value=14)
69 81 #opObj11.addParameter(name='xmax', value=15)
70 #opObj11.addParameter(name='save', value=figpath)
82 opObj11.addParameter(name='save', value=figpath)
71 83 opObj11.addParameter(name='showprofile', value=1)
72 #opObj11.addParameter(name='save_period', value=10)
73 '''
84 opObj11.addParameter(name='save_period', value=10)
74 85
75 opObj10 = procUnitConfObjC.addOperation(name='ParameterWriter')
76 opObj10.addParameter(name='path',value=figpath)
86 opObj10 = procUnitConfObjC.addOperation(name='HDFWriter')
87 opObj10.addParameter(name='path',value=path)
77 88 #opObj10.addParameter(name='mode',value=0)
78 89 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
79 opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
80 opObj10.addParameter(name='dataList',value='data_POW,data_DOP,data_WIDTH,data_SNR')#,format='list'
90 opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
91 opObj10.addParameter(name='dataList',value='data_pow,data_dop,utctime',format='list')#,format='list'
81 92
82 93 controllerObj.start()
@@ -1,73 +1,73
1 1 import os,sys
2 2 import datetime
3 3 import time
4 4 from schainpy.controller import Project
5 path = '/home/alex/Downloads/NEW_WR2/spc16removeDC'
5 path = '/home/soporte/Downloads/RAWDATA_PP'
6 6 figpath = path
7 7 desc = "Simulator Test"
8 8
9 9 controllerObj = Project()
10 10
11 11 controllerObj.setup(id='10',name='Test Simulator',description=desc)
12 12
13 13 readUnitConfObj = controllerObj.addReadUnit(datatype='SimulatorReader',
14 14 frequency=9.345e9,
15 15 FixRCP_IPP= 60,
16 16 Tau_0 = 30,
17 17 AcqH0_0=0,
18 18 samples=330,
19 19 AcqDH_0=0.15,
20 20 FixRCP_TXA=0.15,
21 21 FixRCP_TXB=0.15,
22 22 Fdoppler=600.0,
23 23 Hdoppler=36,
24 24 Adoppler=300,#300
25 25 delay=0,
26 26 online=0,
27 27 walk=0,
28 28 profilesPerBlock=625,
29 29 dataBlocksPerFile=100)#,#nTotalReadFiles=2)
30 30 '''
31 31 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
32 32 path=path,
33 33 startDate="2020/01/01", #"2020/01/01",#today,
34 34 endDate= "2020/12/01", #"2020/12/30",#today,
35 35 startTime='00:00:00',
36 36 endTime='23:59:59',
37 37 delay=0,
38 38 #set=0,
39 39 online=0,
40 40 walk=1)
41 41 '''
42 42 opObj11 = readUnitConfObj.addOperation(name='printInfo')
43 43
44 44 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
45 45 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
46 46 #opObj11.addParameter(name='n', value='10', format='int')
47 47
48 48 #opObj10 = procUnitConfObjA.addOperation(name='selectChannels')
49 49 #opObj10.addParameter(name='channelList', value=[0])
50 50 opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
51 51 opObj11.addParameter(name='n', value='625', format='int')#10
52 52 opObj11.addParameter(name='removeDC', value=1, format='int')
53 53
54 54 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
55 55 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
56 56
57 57
58 58 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
59 59 #opObj11.addParameter(name='xmax', value=8)
60 60
61 61 #opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
62 62
63 63 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
64 64
65 65
66 opObj10 = procUnitConfObjB.addOperation(name='ParameterWriter')
66 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
67 67 opObj10.addParameter(name='path',value=figpath)
68 68 #opObj10.addParameter(name='mode',value=0)
69 69 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
70 opObj10.addParameter(name='metadataList',value='utctimeInit,timeInterval',format='list')
71 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH')#,format='list'
70 opObj10.addParameter(name='metadataList',value='utctimeInit,paramInterval,profileIndex,heightList,flagDataAsBlock',format='list')
71 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,dataPP_SNR,dataPP_WIDTH,utctime',format='list')#,format='list'
72 72
73 73 controllerObj.start()
General Comments 0
You need to be logged in to leave comments. Login now