##// END OF EJS Templates
last 30-16:19
avaldezp -
r1405:49f004b59e80
parent child
Show More
@@ -0,0 +1,41
1 # Ing. AVP
2 # 01/11/2021
3 # ARCHIVO DE LECTURA
4 import os, sys
5 import datetime
6 import time
7 from schainpy.controller import Project
8 print("----[Setup]-RadarMeteorologico--------")
9 Vel = 6
10 modo_proc = 1
11 #-----------PATH DE DATOS-----------------------#
12 path = "/DATA_RM/10"
13 path_ped = "/DATA_RM/TEST_PEDESTAL/P20211111-173856"
14 print("----[OPCIONES]------------------------")
15 op_plot = 0
16 op_integration = 0
17 op_save = 0
18 op_plot_spec = 0
19
20 ########################SIGNAL CHAIN ##################################
21 desc = "USRP_test"
22 filename = "USRP_processing.xml"
23 controllerObj = Project()
24 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
25 ######################## UNIDAD DE LECTURA#############################
26 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
27 path=path,
28 startDate="2021/11/11",#today,
29 endDate="2021/12/30",#today,
30 startTime='17:39:25',
31 endTime='23:59:59',
32 delay=0,
33 #set=0,
34 online=0,
35 walk=1,
36 ippKm = 60)
37
38 opObj11 = readUnitConfObj.addOperation(name='printInfo')
39
40 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc',inputId=readUnitConfObj.getId())
41 controllerObj.start()
@@ -0,0 +1,72
1 import os, sys
2 import datetime
3 import time
4 from schainpy.controller import Project
5
6 desc = "USRP_test"
7 filename = "USRP_processing.xml"
8 controllerObj = Project()
9 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
10
11 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
12 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
13 path = '/home/alex/Downloads/test_rawdata'
14 path = '/DATA_RM/WR_POT_09_2'
15 figpath = '/home/alex/Downloads'
16 figpath_pp = "/home/soporte/Pictures/TEST_POT"
17 ################# RANGO DE PLOTEO######################################
18 dBmin = '30'
19 dBmax = '60'
20 xmin = '0'
21 xmax ='24'
22 ymin = '0'
23 ymax = '600'
24 ########################FECHA##########################################
25 str = datetime.date.today()
26 today = str.strftime("%Y/%m/%d")
27 str2 = str - datetime.timedelta(days=1)
28 yesterday = str2.strftime("%Y/%m/%d")
29 ######################## UNIDAD DE LECTURA#############################
30 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
31 path=path,
32 startDate="2021/01/01", #"2020/01/01",#today,
33 endDate= "2021/12/01", #"2020/12/30",#today,
34 startTime='00:00:00',
35 endTime='23:59:59',
36 delay=0,
37 #set=0,
38 online=0,
39 walk =1,
40 ippKm = 60 )
41
42 opObj11 = readUnitConfObj.addOperation(name='printInfo')
43 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
44 #######################################################################
45 ################ OPERACIONES DOMINIO DEL TIEMPO########################
46 #######################################################################
47
48 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
49 '''
50 opObj11 = procUnitConfObjA.addOperation(name='PulsePairVoltage', optype='other')
51 opObj11.addParameter(name='n', value='256', format='int')
52 opObj11.addParameter(name='removeDC', value=1, format='int')
53 '''
54
55 _type="iq"
56 opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
57 #opObj10.addParameter(name='id', value='12')
58 opObj10.addParameter(name='wintitle', value=_type )
59 opObj10.addParameter(name='type', value=_type)
60
61
62 '''
63 type="WeatherPower"
64 opObj10 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='external')
65 #opObj10.addParameter(name='id', value='12')
66 opObj10.addParameter(name='wintitle', value=type )
67
68 opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
69 opObj11.addParameter(name='xmax', value=8)
70 '''
71
72 controllerObj.start()
@@ -0,0 +1,118
1 # Ing. AVP
2 # 01/11/2021
3 # ARCHIVO DE LECTURA
4 import os, sys
5 import datetime
6 import time
7 from schainpy.controller import Project
8 print("----[Setup]-RadarMeteorologico--------")
9 Vel = 10
10 modo_proc = 0 # 0 Pulse Pair 1 Spectros
11 #-----------PATH DE DATOS-----------------------#
12 #------------VERIFICAR SIEMPRE LA FECHA DE LA DATA
13 path = "/DATA_RM/10"
14 path_ped = "/DATA_RM/TEST_PEDESTAL/P20211110-171003"
15 figpath_pp = "/home/soporte/Pictures/TEST_POT"
16 figpath_ppi_pp = "/home/soporte/Pictures/ppi_PP_30DIC_4"
17 #-------------------------------------------------------------------
18 print("----[OPCIONES]------------------------")
19 op_plot = 0
20 op_integration = 1
21 op_save = 1
22 op_plot_spec = 0
23 #-------------------------------------
24 ################# RANGO DE PLOTEO######################################
25 dBmin = '1'
26 dBmax = '85'
27 xmin = '17.1'
28 xmax = '17.25'
29 ymin = '0'
30 ymax = '600'
31 #-------------------NRO Perfiles PROCESAMIENTO --------------------
32 V=Vel
33 IPP=400*1e-6
34 n= int(1/(V*IPP))
35 print("* n - NRO Perfiles Proc:", n )
36 time.sleep(3)
37
38 #------------------------SIGNAL CHAIN ------------------------------
39 desc = "USRP_test"
40 filename = "USRP_processing.xml"
41 controllerObj = Project()
42 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
43 ######################## UNIDAD DE LECTURA#############################
44 readUnitConfObj = controllerObj.addReadUnit(datatype = 'DigitalRFReader',
45 path = path,
46 startDate= "2021/11/10",#today,
47 endDate = "2021/12/30",#today,
48 startTime= '00:00:25',
49 endTime = '23:59:59',
50 delay = 0,
51 online = 0,
52 walk = 1,
53 ippKm = 60)
54
55 opObj11 = readUnitConfObj.addOperation(name='printInfo')
56 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc',inputId=readUnitConfObj.getId())
57
58 opObj11 = procUnitConfObjA.addOperation(name='selectHeights')
59 opObj11.addParameter(name='minIndex', value='1', format='int')
60 # opObj11.addParameter(name='maxIndex', value='10000', format='int')
61 opObj11.addParameter(name='maxIndex', value='400', format='int')
62
63 if modo_proc ==0:
64 #----------------------------------------PULSE PAIR --------------------------------------------------#
65 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
66 opObj11.addParameter(name='n', value=int(n), format='int')#10 VOY A USAR 250 DADO QUE LA VELOCIDAD ES 10 GRADOS
67 #opObj11.addParameter(name='removeDC', value=1, format='int')
68 #------------------------ METODO Parametros -----------------------------------------------------------
69 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
70 if op_plot==1:
71 opObj11 = procUnitConfObjB.addOperation(name='GenericRTIPlot',optype='external')
72 opObj11.addParameter(name='attr_data', value='dataPP_POW')
73 opObj11.addParameter(name='colormap', value='jet')
74 #opObj11.addParameter(name='xmin', value=xmin)
75 #opObj11.addParameter(name='xmax', value=xmax)
76 opObj11.addParameter(name='zmin', value=dBmin)
77 opObj11.addParameter(name='zmax', value=dBmax)
78 opObj11.addParameter(name='save', value=figpath_pp)
79 opObj11.addParameter(name='showprofile', value=0)
80 opObj11.addParameter(name='save_period', value=50)
81
82 ####################### METODO ESCRITURA #######################################################################
83
84 if op_integration==1:
85 V=V
86 blocksPerfile=100
87 print("* Velocidad del Pedestal:",V)
88 tmp_blocksPerfile = 100
89 f_a_p= int(tmp_blocksPerfile/V)
90
91 opObj11 = procUnitConfObjB.addOperation(name='PedestalInformation')
92 opObj11.addParameter(name='path_ped', value=path_ped)
93 #opObj11.addParameter(name='path_adq', value=path_adq)
94 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
95 opObj11.addParameter(name='blocksPerfile', value=blocksPerfile, format='int')
96 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
97 opObj11.addParameter(name='f_a_p', value=f_a_p, format='int')
98 opObj11.addParameter(name='online', value='0', format='int')
99
100 opObj11 = procUnitConfObjB.addOperation(name='Block360')
101 opObj11.addParameter(name='n', value='10', format='int')
102 opObj11.addParameter(name='mode', value=modo_proc, format='int')
103
104 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
105
106 opObj11= procUnitConfObjB.addOperation(name='WeatherPlot',optype='other')
107 opObj11.addParameter(name='save', value=figpath_ppi_pp)
108 opObj11.addParameter(name='save_period', value=1)
109
110 if op_save==1:
111 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
112 opObj10.addParameter(name='path',value=figpath_ppi_pp)
113 #opObj10.addParameter(name='mode',value=0)
114 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
115 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
116 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,azimuth,utctime',format='list')#,format='list'
117
118 controllerObj.start()
@@ -0,0 +1,14
1 import numpy
2 a= numpy.array([0,1,2,3,4,5,10,11,12,18,19,20,21,22,23,24,25,26,27,28])
3 print(a)
4 list=[]
5 list2=[]
6 for i in reversed(range(1,len(a))):
7 dif=int(a[i])-int(a[i-1])
8 print(i,a[i],dif )
9 if dif>1:
10 list.append(i-1)
11 list2.append(dif-1)
12 print("result")
13 print(list)
14 print(list2)
@@ -0,0 +1,147
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
12
13 from schainpy.controller import Project
14
15 desc = "USRP_test"
16 filename = "USRP_processing.xml"
17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
19
20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21
22 #######################################################################
23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 #path = '/DATA_RM/TEST_INTEGRACION'
27 #path = '/DATA_RM/TEST_ONLINE'
28 #path = '/DATA_RM/TEST_INTEGRACION/ADQ_OFFLINE/'
29 # ULTIMO TEST 22 DE SEPTIEMBRE
30 path = '/DATA_RM/USRP_22'
31 #path_pp = '/DATA_RM/TEST_HDF5'
32 # UTIMO TEST 22 DE SEPTIEMBRE
33 path_pp = '/DATA_RM/TEST_HDF5_PP_22'
34 ######################################################
35 ##### OJO TENER EN CUENTA EL n= para el Pulse Pair ###
36 ######################################################
37 ######## BUSCAMOS EL numero de IPP equivalente 1Β°#####
38 ######## Sea V la velocidad del Pedestal en Β°/seg#####
39 ######## 1Β° sera Recorrido en un tiempo de 1/V ######
40 ######## IPP del Radar 400 useg --> 60 Km ############
41 ######## n = 1/(V*IPP) , NUMERO DE IPP #############
42 ######## n = 1/(V*IPP) #############################
43 V=2
44 IPP=400*1e-6
45 n= 1/(V*IPP)
46 print("n numero de Perfiles a procesar con Pulse Pair: ", n)
47
48
49
50
51 figpath = '/home/soporte/Pictures/TEST_INTEGRACION_IMG'
52 #remotefolder = "/home/wmaster/graficos"
53 #######################################################################
54 ################# RANGO DE PLOTEO######################################
55 #######################################################################
56 dBmin = '-5'
57 dBmax = '20'
58 xmin = '0'
59 xmax ='24'
60 ymin = '0'
61 ymax = '600'
62 #######################################################################
63 ########################FECHA##########################################
64 #######################################################################
65 str = datetime.date.today()
66 today = str.strftime("%Y/%m/%d")
67 str2 = str - datetime.timedelta(days=1)
68 yesterday = str2.strftime("%Y/%m/%d")
69 #######################################################################
70 ######################## UNIDAD DE LECTURA#############################
71 #######################################################################
72 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
73 path=path,
74 startDate="2021/01/01",#today,
75 endDate="2021/12/30",#today,
76 startTime='00:00:00',
77 endTime='23:59:59',
78 delay=0,
79 #set=0,
80 online=0,
81 walk=1,
82 ippKm = 60)
83
84 opObj11 = readUnitConfObj.addOperation(name='printInfo')
85 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
86 #######################################################################
87 ################ OPERACIONES DOMINIO DEL TIEMPO########################
88 #######################################################################
89
90 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
91
92 #
93 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
94 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
95
96 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
97 #opObj11.addParameter(name='frequency', value='70312500')
98 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
99 opObj11.addParameter(name='n', value=int(n), format='int')#10 VOY A USAR 250 DADO QUE LA VELOCIDAD ES 10 GRADOS
100 opObj11.addParameter(name='removeDC', value=1, format='int')
101 # Ploteo TEST
102 '''
103 opObj11 = procUnitConfObjA.addOperation(name='PulsepairPowerPlot', optype='other')
104 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSignalPlot', optype='other')
105 opObj11 = procUnitConfObjA.addOperation(name='PulsepairVelocityPlot', optype='other')
106 #opObj11.addParameter(name='xmax', value=8)
107 opObj11 = procUnitConfObjA.addOperation(name='PulsepairSpecwidthPlot', optype='other')
108 '''
109 # OJO SCOPE
110 #opObj10 = procUnitConfObjA.addOperation(name='ScopePlot', optype='external')
111 #opObj10.addParameter(name='id', value='10', format='int')
112 ##opObj10.addParameter(name='xmin', value='0', format='int')
113 ##opObj10.addParameter(name='xmax', value='50', format='int')
114 #opObj10.addParameter(name='type', value='iq')
115 ##opObj10.addParameter(name='ymin', value='-5000', format='int')
116 ##opObj10.addParameter(name='ymax', value='8500', format='int')
117 #opObj11.addParameter(name='save', value=figpath, format='str')
118 #opObj11.addParameter(name='save_period', value=10, format='int')
119
120 #opObj10 = procUnitConfObjA.addOperation(name='setH0')
121 #opObj10.addParameter(name='h0', value='-5000', format='float')
122
123 #opObj11 = procUnitConfObjA.addOperation(name='filterByHeights')
124 #opObj11.addParameter(name='window', value='1', format='int')
125
126 #codigo='1,1,-1,1,1,-1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,1,1,1,-1,-1,-1'
127 #opObj11 = procUnitConfObjSousy.addOperation(name='Decoder', optype='other')
128 #opObj11.addParameter(name='code', value=codigo, formatyesterday='floatlist')
129 #opObj11.addParameter(name='nCode', value='1', format='int')
130 #opObj11.addParameter(name='nBaud', value='28', format='int')
131
132 #opObj11 = procUnitConfObjA.addOperation(name='CohInt', optype='other')
133 #opObj11.addParameter(name='n', value='100', format='int')
134
135 #######################################################################
136 ########## OPERACIONES ParametersProc########################
137 #######################################################################
138
139 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
140 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
141 opObj10.addParameter(name='path',value=path_pp)
142 #opObj10.addParameter(name='mode',value=0)
143 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
144 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
145 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,utctime',format='list')#,format='list'
146
147 controllerObj.start()
@@ -0,0 +1,115
1 #!python
2 '''
3 '''
4
5 import os, sys
6 import datetime
7 import time
8
9 #path = os.path.dirname(os.getcwd())
10 #path = os.path.dirname(path)
11 #sys.path.insert(0, path)
12
13 from schainpy.controller import Project
14
15 desc = "USRP_test"
16 filename = "USRP_processing.xml"
17 controllerObj = Project()
18 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
19
20 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
21
22 #######################################################################
23 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
24 #######################################################################
25 #path = '/media/data/data/vientos/57.2063km/echoes/NCO_Woodman'
26 #path = '/DATA_RM/TEST_INTEGRACION'
27 #path = '/DATA_RM/TEST_ONLINE'
28 #path = '/DATA_RM/TEST_INTEGRACION/ADQ_OFFLINE/'
29 # ULTIMO TEST 22 DE SEPTIEMBRE
30 path = '/DATA_RM/USRP_22'
31 #path_pp = '/DATA_RM/TEST_HDF5'
32 # UTIMO TEST 22 DE SEPTIEMBRE
33 path_pp = '/DATA_RM/TEST_HDF5_PP_22'
34 ######################################################
35 ##### OJO TENER EN CUENTA EL n= para el Pulse Pair ###
36 ######################################################
37 ######## BUSCAMOS EL numero de IPP equivalente 1Β°#####
38 ######## Sea V la velocidad del Pedestal en Β°/seg#####
39 ######## 1Β° sera Recorrido en un tiempo de 1/V ######
40 ######## IPP del Radar 400 useg --> 60 Km ############
41 ######## n = 1/(V*IPP) , NUMERO DE IPP #############
42 ######## n = 1/(V*IPP) #############################
43 V=2
44 IPP=400*1e-6
45 n= 1/(V*IPP)
46 print("n numero de Perfiles a procesar con Pulse Pair: ", n)
47
48
49
50
51 figpath = '/home/soporte/Pictures/TEST_INTEGRACION_IMG'
52 #remotefolder = "/home/wmaster/graficos"
53 #######################################################################
54 ################# RANGO DE PLOTEO######################################
55 #######################################################################
56 dBmin = '-5'
57 dBmax = '20'
58 xmin = '0'
59 xmax ='24'
60 ymin = '0'
61 ymax = '600'
62 #######################################################################
63 ########################FECHA##########################################
64 #######################################################################
65 str = datetime.date.today()
66 today = str.strftime("%Y/%m/%d")
67 str2 = str - datetime.timedelta(days=1)
68 yesterday = str2.strftime("%Y/%m/%d")
69 #######################################################################
70 ######################## UNIDAD DE LECTURA#############################
71 #######################################################################
72 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
73 path=path,
74 startDate="2021/01/01",#today,
75 endDate="2021/12/30",#today,
76 startTime='00:00:00',
77 endTime='23:59:59',
78 delay=0,
79 #set=0,
80 online=0,
81 walk=1,
82 ippKm = 60)
83
84 opObj11 = readUnitConfObj.addOperation(name='printInfo')
85 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
86 #######################################################################
87 ################ OPERACIONES DOMINIO DEL TIEMPO########################
88 #######################################################################
89
90 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
91
92 #
93 # codigo64='1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0,'+\
94 # '1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,0,1,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1'
95
96 #opObj11 = procUnitConfObjA.addOperation(name='setRadarFrequency')
97 #opObj11.addParameter(name='frequency', value='70312500')
98 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
99 opObj11.addParameter(name='n', value=int(n), format='int')#10 VOY A USAR 250 DADO QUE LA VELOCIDAD ES 10 GRADOS
100 opObj11.addParameter(name='removeDC', value=1, format='int')
101
102
103 #######################################################################
104 ########## OPERACIONES ParametersProc########################
105 #######################################################################
106
107 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
108 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
109 opObj10.addParameter(name='path',value=path_pp)
110 #opObj10.addParameter(name='mode',value=0)
111 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
112 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
113 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,utctime',format='list')#,format='list'
114
115 controllerObj.start()
@@ -0,0 +1,88
1 #!python
2
3 import os, sys
4 import datetime
5 import time
6
7 from schainpy.controller import Project
8
9 desc = "USRP_test"
10 filename = "USRP_processing.xml"
11 controllerObj = Project()
12 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
13
14 ############## USED TO PLOT IQ VOLTAGE, POWER AND SPECTRA #############
15
16 #######################################################################
17 ######PATH DE LECTURA, ESCRITURA, GRAFICOS Y ENVIO WEB#################
18 #######################################################################
19
20 path = '/DATA_RM/TEST_19OCTUBRE/10MHZ'
21 #path_pp = '/DATA_RM/TEST_HDF5'
22 path_pp = '/DATA_RM/TEST_HDF5_19OCT'
23
24 #######################################################################
25 ################# RANGO DE PLOTEO######################################
26 #######################################################################
27 dBmin = '-5'
28 dBmax = '20'
29 xmin = '0'
30 xmax ='24'
31 ymin = '0'
32 ymax = '600'
33 #######################################################################
34 ########################FECHA##########################################
35 #######################################################################
36 str = datetime.date.today()
37 today = str.strftime("%Y/%m/%d")
38 str2 = str - datetime.timedelta(days=1)
39 yesterday = str2.strftime("%Y/%m/%d")
40 #######################################################################
41 ######################## UNIDAD DE LECTURA#############################
42 #######################################################################
43 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
44 path=path,
45 startDate="2021/01/01",#today,
46 endDate="2021/12/30",#today,
47 startTime='00:00:00',
48 endTime='23:59:59',
49 delay=0,
50 #set=0,
51 online=0,
52 walk=1,
53 ippKm = 60)
54
55 opObj11 = readUnitConfObj.addOperation(name='printInfo')
56 #opObj11 = readUnitConfObj.addOperation(name='printNumberOfBlock')
57 #######################################################################
58 ################ OPERACIONES DOMINIO DEL TIEMPO########################
59 #######################################################################
60 V=10 # aca se coloca la velocidad
61 IPP=400*1e-6
62 n= int(1/(V*IPP))
63 print("n numero de Perfiles a procesar con nFFTPoints ", n)
64
65 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
66
67 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
68 procUnitConfObjB.addParameter(name='nFFTPoints', value=n, format='int')
69 procUnitConfObjB.addParameter(name='nProfiles' , value=n, format='int')
70
71 #######################################################################
72 ########## OPERACIONES ParametersProc########################
73 #######################################################################
74
75 procUnitConfObjC= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjB.getId())
76
77 procUnitConfObjC.addOperation(name='SpectralMoments')
78
79 opObj10 = procUnitConfObjC.addOperation(name='HDFWriter')
80 opObj10.addParameter(name='path',value=path_pp)
81 #opObj10.addParameter(name='mode',value=0)
82 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
83 #opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
84 opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
85
86 opObj10.addParameter(name='dataList',value='data_pow,data_dop,utctime',format='list')#,format='list'
87
88 controllerObj.start()
@@ -0,0 +1,36
1 import numpy as np
2 import matplotlib.pyplot as pl
3 import wradlib
4 import warnings
5 #export WRADLIB_DATA=/path/to/wradlib-data
6 warnings.filterwarnings('ignore')
7 '''
8 try:
9 get_ipython().magic('matplotlib inline')
10 except:
11 pl.ion()
12 '''
13 filename = wradlib.util.get_wradlib_data_file("/home/soporte/Downloads/raa00-dx_10908-0806021735-fbg---bin.gz")
14 img, meta = wradlib.io.read_dx(filename)
15 print("Shape of polar array: %r\n" % (img.shape,))
16 print("Some meta data of the DX file:")
17 print("\tdatetime: %r" % (meta["datetime"],))
18 print("\tRadar ID: %s" % (meta["radarid"],))
19
20 img[200:250,:]= np.ones([50,img.shape[1]])*np.nan
21
22 img[300:360,:]= np.ones([60,img.shape[1]])*np.nan
23
24 cgax, pm= wradlib.vis.plot_ppi(img)
25 txt = pl.title('Simple PPI')
26 print("coordenada angular",img[:,0],len(img[:,0]))
27 print("COORDENADA 0",img[0],len(img[0]))
28 cbar = pl.gcf().colorbar(pm, pad=0.075)
29
30 #r = np.arange(40, 80)
31 #az = np.arange(200, 250)
32 #ax, pm = wradlib.vis.plot_ppi(img[200:250, 40:80], r, az, autoext=False)
33 #ax, pm = wradlib.vis.plot_ppi(img[200:250, 40:80], r, az)
34
35 #txt = pl.title('Sector PPI')
36 pl.show()
@@ -0,0 +1,17
1 # Ing. AVP
2 # 01/11/2021
3 # ARCHIVO DE LECTURA
4 import os, sys
5 import datetime
6 import time
7 from schainpy.controller import Project
8 print("----[Setup]-RadarMeteorologico--------")
9 Vel = 6
10 modo_proc = 1
11 path = "/DATA_RM/10"
12 path_ped = "/DATA_RM/TEST_PEDESTAL/P20211111-173856"
13 print("----[OPCIONES]------------------------")
14 op_plot = 0
15 op_integration = 0
16 op_save = 0
17 op_plot_spec = 0
@@ -1,517 +1,605
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot, SpectraCutPlot
7 7 from schainpy.utils import log
8 8 # libreria wradlib
9 9 import wradlib as wrl
10 10
11 11 EARTH_RADIUS = 6.3710e3
12 12
13 13
14 14 def ll2xy(lat1, lon1, lat2, lon2):
15 15
16 16 p = 0.017453292519943295
17 17 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
18 18 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
19 19 r = 12742 * numpy.arcsin(numpy.sqrt(a))
20 20 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
21 21 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
22 22 theta = -theta + numpy.pi/2
23 23 return r*numpy.cos(theta), r*numpy.sin(theta)
24 24
25 25
26 26 def km2deg(km):
27 27 '''
28 28 Convert distance in km to degrees
29 29 '''
30 30
31 31 return numpy.rad2deg(km/EARTH_RADIUS)
32 32
33 33
34 34
35 35 class SpectralMomentsPlot(SpectraPlot):
36 36 '''
37 37 Plot for Spectral Moments
38 38 '''
39 39 CODE = 'spc_moments'
40 40 # colormap = 'jet'
41 41 # plot_type = 'pcolor'
42 42
43 43 class DobleGaussianPlot(SpectraPlot):
44 44 '''
45 45 Plot for Double Gaussian Plot
46 46 '''
47 47 CODE = 'gaussian_fit'
48 48 # colormap = 'jet'
49 49 # plot_type = 'pcolor'
50 50
51 51 class DoubleGaussianSpectraCutPlot(SpectraCutPlot):
52 52 '''
53 53 Plot SpectraCut with Double Gaussian Fit
54 54 '''
55 55 CODE = 'cut_gaussian_fit'
56 56
57 57 class SnrPlot(RTIPlot):
58 58 '''
59 59 Plot for SNR Data
60 60 '''
61 61
62 62 CODE = 'snr'
63 63 colormap = 'jet'
64 64
65 65 def update(self, dataOut):
66 66
67 67 data = {
68 68 'snr': 10*numpy.log10(dataOut.data_snr)
69 69 }
70 70
71 71 return data, {}
72 72
73 73 class DopplerPlot(RTIPlot):
74 74 '''
75 75 Plot for DOPPLER Data (1st moment)
76 76 '''
77 77
78 78 CODE = 'dop'
79 79 colormap = 'jet'
80 80
81 81 def update(self, dataOut):
82 82
83 83 data = {
84 84 'dop': 10*numpy.log10(dataOut.data_dop)
85 85 }
86 86
87 87 return data, {}
88 88
89 89 class PowerPlot(RTIPlot):
90 90 '''
91 91 Plot for Power Data (0 moment)
92 92 '''
93 93
94 94 CODE = 'pow'
95 95 colormap = 'jet'
96 96
97 97 def update(self, dataOut):
98 98 data = {
99 99 'pow': 10*numpy.log10(dataOut.data_pow/dataOut.normFactor)
100 100 }
101 101 return data, {}
102 102
103 103 class SpectralWidthPlot(RTIPlot):
104 104 '''
105 105 Plot for Spectral Width Data (2nd moment)
106 106 '''
107 107
108 108 CODE = 'width'
109 109 colormap = 'jet'
110 110
111 111 def update(self, dataOut):
112 112
113 113 data = {
114 114 'width': dataOut.data_width
115 115 }
116 116
117 117 return data, {}
118 118
119 119 class SkyMapPlot(Plot):
120 120 '''
121 121 Plot for meteors detection data
122 122 '''
123 123
124 124 CODE = 'param'
125 125
126 126 def setup(self):
127 127
128 128 self.ncols = 1
129 129 self.nrows = 1
130 130 self.width = 7.2
131 131 self.height = 7.2
132 132 self.nplots = 1
133 133 self.xlabel = 'Zonal Zenith Angle (deg)'
134 134 self.ylabel = 'Meridional Zenith Angle (deg)'
135 135 self.polar = True
136 136 self.ymin = -180
137 137 self.ymax = 180
138 138 self.colorbar = False
139 139
140 140 def plot(self):
141 141
142 142 arrayParameters = numpy.concatenate(self.data['param'])
143 143 error = arrayParameters[:, -1]
144 144 indValid = numpy.where(error == 0)[0]
145 145 finalMeteor = arrayParameters[indValid, :]
146 146 finalAzimuth = finalMeteor[:, 3]
147 147 finalZenith = finalMeteor[:, 4]
148 148
149 149 x = finalAzimuth * numpy.pi / 180
150 150 y = finalZenith
151 151
152 152 ax = self.axes[0]
153 153
154 154 if ax.firsttime:
155 155 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
156 156 else:
157 157 ax.plot.set_data(x, y)
158 158
159 159 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
160 160 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
161 161 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
162 162 dt2,
163 163 len(x))
164 164 self.titles[0] = title
165 165
166 166
167 167 class GenericRTIPlot(Plot):
168 168 '''
169 169 Plot for data_xxxx object
170 170 '''
171 171
172 172 CODE = 'param'
173 173 colormap = 'viridis'
174 174 plot_type = 'pcolorbuffer'
175 175
176 176 def setup(self):
177 177 self.xaxis = 'time'
178 178 self.ncols = 1
179 179 self.nrows = self.data.shape('param')[0]
180 180 self.nplots = self.nrows
181 181 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
182 182
183 183 if not self.xlabel:
184 184 self.xlabel = 'Time'
185 185
186 186 self.ylabel = 'Range [km]'
187 187 if not self.titles:
188 188 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
189 189
190 190 def update(self, dataOut):
191 191
192 192 data = {
193 193 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
194 194 }
195 195
196 196 meta = {}
197 197
198 198 return data, meta
199 199
200 200 def plot(self):
201 201 # self.data.normalize_heights()
202 202 self.x = self.data.times
203 203 self.y = self.data.yrange
204 204 self.z = self.data['param']
205 205 self.z = 10*numpy.log10(self.z)
206 206 self.z = numpy.ma.masked_invalid(self.z)
207 207
208 208 if self.decimation is None:
209 209 x, y, z = self.fill_gaps(self.x, self.y, self.z)
210 210 else:
211 211 x, y, z = self.fill_gaps(*self.decimate())
212 212
213 213 for n, ax in enumerate(self.axes):
214 214
215 215 self.zmax = self.zmax if self.zmax is not None else numpy.max(
216 216 self.z[n])
217 217 self.zmin = self.zmin if self.zmin is not None else numpy.min(
218 218 self.z[n])
219 219
220 220 if ax.firsttime:
221 221 if self.zlimits is not None:
222 222 self.zmin, self.zmax = self.zlimits[n]
223 223
224 224 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
225 225 vmin=self.zmin,
226 226 vmax=self.zmax,
227 227 cmap=self.cmaps[n]
228 228 )
229 229 else:
230 230 if self.zlimits is not None:
231 231 self.zmin, self.zmax = self.zlimits[n]
232 232 ax.collections.remove(ax.collections[0])
233 233 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
234 234 vmin=self.zmin,
235 235 vmax=self.zmax,
236 236 cmap=self.cmaps[n]
237 237 )
238 238
239 239
240 240 class PolarMapPlot(Plot):
241 241 '''
242 242 Plot for weather radar
243 243 '''
244 244
245 245 CODE = 'param'
246 246 colormap = 'seismic'
247 247
248 248 def setup(self):
249 249 self.ncols = 1
250 250 self.nrows = 1
251 251 self.width = 9
252 252 self.height = 8
253 253 self.mode = self.data.meta['mode']
254 254 if self.channels is not None:
255 255 self.nplots = len(self.channels)
256 256 self.nrows = len(self.channels)
257 257 else:
258 258 self.nplots = self.data.shape(self.CODE)[0]
259 259 self.nrows = self.nplots
260 260 self.channels = list(range(self.nplots))
261 261 if self.mode == 'E':
262 262 self.xlabel = 'Longitude'
263 263 self.ylabel = 'Latitude'
264 264 else:
265 265 self.xlabel = 'Range (km)'
266 266 self.ylabel = 'Height (km)'
267 267 self.bgcolor = 'white'
268 268 self.cb_labels = self.data.meta['units']
269 269 self.lat = self.data.meta['latitude']
270 270 self.lon = self.data.meta['longitude']
271 271 self.xmin, self.xmax = float(
272 272 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
273 273 self.ymin, self.ymax = float(
274 274 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
275 275 # self.polar = True
276 276
277 277 def plot(self):
278 278
279 279 for n, ax in enumerate(self.axes):
280 280 data = self.data['param'][self.channels[n]]
281 281
282 282 zeniths = numpy.linspace(
283 283 0, self.data.meta['max_range'], data.shape[1])
284 284 if self.mode == 'E':
285 285 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
286 286 r, theta = numpy.meshgrid(zeniths, azimuths)
287 287 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
288 288 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
289 289 x = km2deg(x) + self.lon
290 290 y = km2deg(y) + self.lat
291 291 else:
292 292 azimuths = numpy.radians(self.data.yrange)
293 293 r, theta = numpy.meshgrid(zeniths, azimuths)
294 294 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
295 295 self.y = zeniths
296 296
297 297 if ax.firsttime:
298 298 if self.zlimits is not None:
299 299 self.zmin, self.zmax = self.zlimits[n]
300 300 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
301 301 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
302 302 vmin=self.zmin,
303 303 vmax=self.zmax,
304 304 cmap=self.cmaps[n])
305 305 else:
306 306 if self.zlimits is not None:
307 307 self.zmin, self.zmax = self.zlimits[n]
308 308 ax.collections.remove(ax.collections[0])
309 309 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
310 310 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
311 311 vmin=self.zmin,
312 312 vmax=self.zmax,
313 313 cmap=self.cmaps[n])
314 314
315 315 if self.mode == 'A':
316 316 continue
317 317
318 318 # plot district names
319 319 f = open('/data/workspace/schain_scripts/distrito.csv')
320 320 for line in f:
321 321 label, lon, lat = [s.strip() for s in line.split(',') if s]
322 322 lat = float(lat)
323 323 lon = float(lon)
324 324 # ax.plot(lon, lat, '.b', ms=2)
325 325 ax.text(lon, lat, label.decode('utf8'), ha='center',
326 326 va='bottom', size='8', color='black')
327 327
328 328 # plot limites
329 329 limites = []
330 330 tmp = []
331 331 for line in open('/data/workspace/schain_scripts/lima.csv'):
332 332 if '#' in line:
333 333 if tmp:
334 334 limites.append(tmp)
335 335 tmp = []
336 336 continue
337 337 values = line.strip().split(',')
338 338 tmp.append((float(values[0]), float(values[1])))
339 339 for points in limites:
340 340 ax.add_patch(
341 341 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
342 342
343 343 # plot Cuencas
344 344 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
345 345 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
346 346 values = [line.strip().split(',') for line in f]
347 347 points = [(float(s[0]), float(s[1])) for s in values]
348 348 ax.add_patch(Polygon(points, ec='b', fc='none'))
349 349
350 350 # plot grid
351 351 for r in (15, 30, 45, 60):
352 352 ax.add_artist(plt.Circle((self.lon, self.lat),
353 353 km2deg(r), color='0.6', fill=False, lw=0.2))
354 354 ax.text(
355 355 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
356 356 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
357 357 '{}km'.format(r),
358 358 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
359 359
360 360 if self.mode == 'E':
361 361 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
362 362 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
363 363 else:
364 364 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
365 365 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
366 366
367 367 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
368 368 self.titles = ['{} {}'.format(
369 369 self.data.parameters[x], title) for x in self.channels]
370 370
371 371 class WeatherPlot(Plot):
372 372 CODE = 'weather'
373 373 plot_name = 'weather'
374 374 plot_type = 'ppistyle'
375 375 buffering = False
376 376
377 377 def setup(self):
378 378 self.ncols = 1
379 379 self.nrows = 1
380 380 self.nplots= 1
381 381 self.ylabel= 'Range [Km]'
382 382 self.titles= ['Weather']
383 383 self.colorbar=False
384 384 self.width =8
385 385 self.height =8
386 386 self.ini =0
387 387 self.len_azi =0
388 388 self.buffer_ini = None
389 389 self.buffer_azi = None
390 390 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
391 391 self.flag =0
392 392 self.indicador= 0
393 self.last_data_azi = None
394 self.val_mean = None
393 395
394 396 def update(self, dataOut):
395 397
396 398 data = {}
397 399 meta = {}
398 400 if hasattr(dataOut, 'dataPP_POWER'):
399 401 factor = 1
400 402
401 403 if hasattr(dataOut, 'nFFTPoints'):
402 404 factor = dataOut.normFactor
403 405
404 406 ####print("factor",factor)
405 data['weather'] = 10*numpy.log10(dataOut.data_360[0]/(factor))
406 print("weather",data['weather'])
407 data['weather'] = 10*numpy.log10(dataOut.data_360[1]/(factor))
408 ####print("weather",data['weather'])
407 409 data['azi'] = dataOut.data_azi
408 410 return data, meta
409 411
412 def analizeDATA(self,data_azi):
413 list1 = []
414 list2 = []
415 dat = data_azi
416 for i in reversed(range(1,len(dat))):
417 if dat[i]>dat[i-1]:
418 diff = int(dat[i])-int(dat[i-1])
419 else:
420 diff = 360+int(dat[i])-int(dat[i-1])
421 if diff > 1:
422 list1.append(i-1)
423 list2.append(diff-1)
424 return list1,list2
425
426 def fixDATANEW(self,data_azi,data_weather):
427 list1,list2 = self.analizeDATA(data_azi)
428 if len(list1)== 0:
429 return data_azi,data_weather
430 else:
431 resize = 0
432 for i in range(len(list2)):
433 resize= resize + list2[i]
434 new_data_azi = numpy.resize(data_azi,resize)
435 new_data_weather= numpy.resize(date_weather,resize)
436
437 for i in range(len(list2)):
438 j=0
439 position=list1[i]+1
440 for j in range(list2[i]):
441 new_data_azi[position+j]=new_data_azi[position+j-1]+1
442
443 return new_data_azi
444
445 def fixDATA(self,data_azi):
446 data=data_azi
447 for i in range(len(data)):
448 if numpy.isnan(data[i]):
449 data[i]=data[i-1]+1
450 return data
451
452 def replaceNAN(self,data_weather,data_azi,val):
453 print("----------------activeNEWFUNCTION")
454 data= data_azi
455 data_T= data_weather
456 print("data_azi",data_azi)
457 print("VAL:",val)
458 print("SHAPE",data_T.shape)
459 for i in range(len(data)):
460 if numpy.isnan(data[i]):
461 print("NAN")
462 data_T[i,:]=numpy.ones(data_T.shape[1])*val
463 #data_T[i,:]=numpy.ones(data_T.shape[1])*numpy.nan
464 return data_T
465
410 466 def const_ploteo(self,data_weather,data_azi,step,res):
411 467 if self.ini==0:
412 468 #------- AZIMUTH
413 469 n = (360/res)-len(data_azi)
414 start = data_azi[-1] + res
415 end = data_azi[0] - res
470 ##### new
471 data_azi_old = data_azi
472 data_azi_new = self.fixDATA(data_azi)
473 #ata_azi_new = self.fixDATANEW(data_azi)
474
475 start = data_azi_new[-1] + res
476 end = data_azi_new[0] - res
477 ##### new
478 self.last_data_azi = end
416 479 if start>end:
417 480 end = end + 360
418 481 azi_vacia = numpy.linspace(start,end,int(n))
419 482 azi_vacia = numpy.where(azi_vacia>360,azi_vacia-360,azi_vacia)
420 data_azi = numpy.hstack((data_azi,azi_vacia))
483 data_azi = numpy.hstack((data_azi_new,azi_vacia))
421 484 # RADAR
422 val_mean = numpy.mean(data_weather[:,0])
485 val_mean = numpy.mean(data_weather[:,-1])
486 self.val_mean = val_mean
423 487 data_weather_cmp = numpy.ones([(360-data_weather.shape[0]),data_weather.shape[1]])*val_mean
488 data_weather = self.replaceNAN(data_weather=data_weather,data_azi=data_azi_old,val=self.val_mean)
424 489 data_weather = numpy.vstack((data_weather,data_weather_cmp))
425 490 else:
426 491 # azimuth
427 492 flag=0
428 493 start_azi = self.res_azi[0]
494 #### new
495 data_azi_old = data_azi
496 ### weather ###
497 data_weather = self.replaceNAN(data_weather=data_weather,data_azi=data_azi_old,val=self.val_mean)
498
499 if numpy.isnan(data_azi[0]):
500 data_azi[0]=self.last_data_azi+1
501 data_azi = self.fixDATA(data_azi)
502 ####
503
429 504 start = data_azi[0]
430 505 end = data_azi[-1]
431 print("start",start)
432 print("end",end)
506 self.last_data_azi= end
507 ####print("start",start)
508 ####print("end",end)
433 509 if start< start_azi:
434 510 start = start +360
435 511 if end <start_azi:
436 512 end = end +360
437 513
438 print("start",start)
439 print("end",end)
514 ####print("start",start)
515 ####print("end",end)
440 516 #### AQUI SERA LA MAGIA
441 517 pos_ini = int((start-start_azi)/res)
442 518 len_azi = len(data_azi)
443 519 if (360-pos_ini)<len_azi:
444 520 if pos_ini+1==360:
445 521 pos_ini=0
446 522 else:
447 523 flag=1
448 524 dif= 360-pos_ini
449 525 comp= len_azi-dif
450 526
451 print(pos_ini)
452 print(len_azi)
453 print("shape",self.res_azi.shape)
527 #-----------------
528 ####print(pos_ini)
529 ####print(len_azi)
530 ####print("shape",self.res_azi.shape)
454 531 if flag==0:
455 532 # AZIMUTH
456 533 self.res_azi[pos_ini:pos_ini+len_azi] = data_azi
457 534 # RADAR
458 535 self.res_weather[pos_ini:pos_ini+len_azi,:] = data_weather
459 536 else:
460 537 # AZIMUTH
461 538 self.res_azi[pos_ini:pos_ini+dif] = data_azi[0:dif]
462 539 self.res_azi[0:comp] = data_azi[dif:]
463 540 # RADAR
464 541 self.res_weather[pos_ini:pos_ini+dif,:] = data_weather[0:dif,:]
465 542 self.res_weather[0:comp,:] = data_weather[dif:,:]
466 543 flag=0
467 544 data_azi = self.res_azi
468 545 data_weather = self.res_weather
469 546
470 547 return data_weather,data_azi
471 548
472 549 def plot(self):
473 print("--------------------------------------",self.ini,"-----------------------------------")
550 #print("--------------------------------------",self.ini,"-----------------------------------")
474 551 #numpy.set_printoptions(suppress=True)
475 #print(self.data.times)
476 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1])
552 ####print("times: ",self.data.times)
553 thisDatetime = datetime.datetime.utcfromtimestamp(self.data.times[-1]).strftime('%Y-%m-%d %H:%M:%S')
554 #print("times: ",thisDatetime)
477 555 data = self.data[-1]
478 # ALTURA altura_tmp_h
479 altura_h = (data['weather'].shape[1])/10.0
480 stoprange = float(altura_h*1.5)#stoprange = float(33*1.5) por ahora 400
481 rangestep = float(0.15)
482 r = numpy.arange(0, stoprange, rangestep)
556 ####ALTURA altura_tmp_h
557 ###print("Y RANGES",self.data.yrange,len(self.data.yrange))
558 ###altura_h = (data['weather'].shape[1])/10.0
559 ###stoprange = float(altura_h*0.3)#stoprange = float(33*1.5) por ahora 400
560 ###rangestep = float(0.03)
561 ###r = numpy.arange(0, stoprange, rangestep)
562 ###print("r",r,len(r))
563 #-----------------------------update----------------------
564 r= self.data.yrange
565 delta_height = r[1]-r[0]
566 #print("1",r)
567 r_mask= numpy.where(r>=0)[0]
568 r = numpy.arange(len(r_mask))*delta_height
569 #print("2",r)
483 570 self.y = 2*r
571 ######self.y = self.data.yrange
484 572 # RADAR
485 573 #data_weather = data['weather']
486 574 # PEDESTAL
487 575 #data_azi = data['azi']
488 576 res = 1
489 577 # STEP
490 578 step = (360/(res*data['weather'].shape[0]))
491 579 #print("shape wr_data", wr_data.shape)
492 580 #print("shape wr_azi",wr_azi.shape)
493 581 #print("step",step)
494 print("Time---->",self.data.times[-1],thisDatetime)
495 #print("alturas", len(self.y))
496 self.res_weather, self.res_azi = self.const_ploteo(data_weather=data['weather'],data_azi=data['azi'],step=step,res=res)
582 ####print("Time---->",self.data.times[-1],thisDatetime)
583 #print("alturas", len(self.y))numpy.where(r>=0)
584 self.res_weather, self.res_azi = self.const_ploteo(data_weather=data['weather'][:,r_mask],data_azi=data['azi'],step=step,res=res)
497 585 #numpy.set_printoptions(suppress=True)
498 586 #print("resultado",self.res_azi)
499 ##########################################################
587 ###########################/DATA_RM/10_tmp/ch0###############################
500 588 ################# PLOTEO ###################
501 589 ##########################################################
502 590
503 591 for i,ax in enumerate(self.axes):
504 592 if ax.firsttime:
505 593 plt.clf()
506 cgax, pm = wrl.vis.plot_ppi(self.res_weather,r=r,az=self.res_azi,fig=self.figures[0], proj='cg', vmin=1, vmax=60)
594 cgax, pm = wrl.vis.plot_ppi(self.res_weather,r=r,az=self.res_azi,fig=self.figures[0], proj='cg', vmin=8, vmax=35)
507 595 else:
508 596 plt.clf()
509 cgax, pm = wrl.vis.plot_ppi(self.res_weather,r=r,az=self.res_azi,fig=self.figures[0], proj='cg', vmin=1, vmax=60)
597 cgax, pm = wrl.vis.plot_ppi(self.res_weather,r=r,az=self.res_azi,fig=self.figures[0], proj='cg', vmin=8, vmax=35)
510 598 caax = cgax.parasites[0]
511 599 paax = cgax.parasites[1]
512 600 cbar = plt.gcf().colorbar(pm, pad=0.075)
513 601 caax.set_xlabel('x_range [km]')
514 602 caax.set_ylabel('y_range [km]')
515 plt.text(1.0, 1.05, 'azimuth '+str(thisDatetime)+"step"+str(self.ini), transform=caax.transAxes, va='bottom',ha='right')
603 plt.text(1.0, 1.05, 'azimuth '+str(thisDatetime)+" step "+str(self.ini), transform=caax.transAxes, va='bottom',ha='right')
516 604
517 605 self.ini= self.ini+1
@@ -1,798 +1,798
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 # SUBCHANNELS EN VEZ DE CHANNELS
7 7 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
8 8 # ACTUALIZACION DE VERSION
9 9 # HEADERS
10 10 # MODULO DE ESCRITURA
11 11 # METADATA
12 12
13 13 import os
14 14 import time
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19 from time import time
20 20 from time import sleep
21 21
22 22 import schainpy.admin
23 23 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
24 24 from schainpy.model.data.jrodata import Voltage
25 25 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
26 26
27 27 import pickle
28 28 try:
29 29 import digital_rf
30 30 except:
31 31 pass
32 32
33 33
34 34 class DigitalRFReader(ProcessingUnit):
35 35 '''
36 36 classdocs
37 37 '''
38 38
39 39 def __init__(self):
40 40 '''
41 41 Constructor
42 42 '''
43 43
44 44 ProcessingUnit.__init__(self)
45 45
46 46 self.dataOut = Voltage()
47 47 self.__printInfo = True
48 48 self.__flagDiscontinuousBlock = False
49 49 self.__bufferIndex = 9999999
50 50 self.__codeType = 0
51 51 self.__ippKm = None
52 52 self.__nCode = None
53 53 self.__nBaud = None
54 54 self.__code = None
55 55 self.dtype = None
56 56 self.oldAverage = None
57 57 self.path = None
58 58
59 59 def close(self):
60 60 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
61 61 return
62 62
63 63 def __getCurrentSecond(self):
64 64
65 65 return self.__thisUnixSample / self.__sample_rate
66 66
67 67 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
68 68
69 69 def __setFileHeader(self):
70 70 '''
71 71 In this method will be initialized every parameter of dataOut object (header, no data)
72 72 '''
73 73 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
74 74
75 75 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
76 76
77 77 try:
78 78 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
79 79 self.__radarControllerHeader)
80 80 except:
81 81 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 82 txA=0,
83 83 txB=0,
84 84 nWindows=1,
85 85 nHeights=self.__nSamples,
86 86 firstHeight=self.__firstHeigth,
87 87 deltaHeight=self.__deltaHeigth,
88 88 codeType=self.__codeType,
89 89 nCode=self.__nCode, nBaud=self.__nBaud,
90 90 code=self.__code)
91 91
92 92 try:
93 93 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
94 94 except:
95 95 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
96 96 nProfiles=nProfiles,
97 97 nChannels=len(
98 98 self.__channelList),
99 99 adcResolution=14)
100 100 self.dataOut.type = "Voltage"
101 101
102 102 self.dataOut.data = None
103 103
104 104 self.dataOut.dtype = self.dtype
105 105
106 106 # self.dataOut.nChannels = 0
107 107
108 108 # self.dataOut.nHeights = 0
109 109
110 110 self.dataOut.nProfiles = int(nProfiles)
111 111
112 112 self.dataOut.heightList = self.__firstHeigth + \
113 113 numpy.arange(self.__nSamples, dtype=numpy.float) * \
114 114 self.__deltaHeigth
115 115
116 116 #self.dataOut.channelList = list(range(self.__num_subchannels))
117 117 self.dataOut.channelList = list(range(len(self.__channelList)))
118 118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
119 119
120 120 # self.dataOut.channelIndexList = None
121 121
122 122 self.dataOut.flagNoData = True
123 123
124 124 self.dataOut.flagDataAsBlock = False
125 125 # Set to TRUE if the data is discontinuous
126 126 self.dataOut.flagDiscontinuousBlock = False
127 127
128 128 self.dataOut.utctime = None
129 129
130 130 # timezone like jroheader, difference in minutes between UTC and localtime
131 131 self.dataOut.timeZone = self.__timezone / 60
132 132
133 133 self.dataOut.dstFlag = 0
134 134
135 135 self.dataOut.errorCount = 0
136 136
137 137 try:
138 138 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
139 139 'nCohInt', self.nCohInt)
140 140
141 141 # asumo que la data esta decodificada
142 142 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
143 143 'flagDecodeData', self.flagDecodeData)
144 144
145 145 # asumo que la data esta sin flip
146 146 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
147 147
148 148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
149 149
150 150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
151 151 except:
152 152 pass
153 153
154 154 self.dataOut.ippSeconds = ippSeconds
155 155
156 156 # Time interval between profiles
157 157 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
158 158
159 159 self.dataOut.frequency = self.__frequency
160 160
161 161 self.dataOut.realtime = self.__online
162 162
163 163 def findDatafiles(self, path, startDate=None, endDate=None):
164 164
165 165 if not os.path.isdir(path):
166 166 return []
167 167
168 168 try:
169 169 digitalReadObj = digital_rf.DigitalRFReader(
170 170 path, load_all_metadata=True)
171 171 except:
172 172 digitalReadObj = digital_rf.DigitalRFReader(path)
173 173
174 174 channelNameList = digitalReadObj.get_channels()
175 175
176 176 if not channelNameList:
177 177 return []
178 178
179 179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
180 180
181 181 sample_rate = metadata_dict['sample_rate'][0]
182 182
183 183 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
184 184
185 185 try:
186 186 timezone = this_metadata_file['timezone'].value
187 187 except:
188 188 timezone = 0
189 189
190 190 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
191 191 channelNameList[0]) / sample_rate - timezone
192 192
193 193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
195 195
196 196 if not startDate:
197 197 startDate = startDatetime.date()
198 198
199 199 if not endDate:
200 200 endDate = endDatatime.date()
201 201
202 202 dateList = []
203 203
204 204 thisDatetime = startDatetime
205 205
206 206 while(thisDatetime <= endDatatime):
207 207
208 208 thisDate = thisDatetime.date()
209 209
210 210 if thisDate < startDate:
211 211 continue
212 212
213 213 if thisDate > endDate:
214 214 break
215 215
216 216 dateList.append(thisDate)
217 217 thisDatetime += datetime.timedelta(1)
218 218
219 219 return dateList
220 220
221 221 def setup(self, path=None,
222 222 startDate=None,
223 223 endDate=None,
224 224 startTime=datetime.time(0, 0, 0),
225 225 endTime=datetime.time(23, 59, 59),
226 226 channelList=None,
227 227 nSamples=None,
228 228 online=False,
229 229 delay=60,
230 230 buffer_size=1024,
231 231 ippKm=None,
232 232 nCohInt=1,
233 233 nCode=1,
234 234 nBaud=1,
235 235 flagDecodeData=False,
236 236 code=numpy.ones((1, 1), dtype=numpy.int),
237 237 **kwargs):
238 238 '''
239 239 In this method we should set all initial parameters.
240 240
241 241 Inputs:
242 242 path
243 243 startDate
244 244 endDate
245 245 startTime
246 246 endTime
247 247 set
248 248 expLabel
249 249 ext
250 250 online
251 251 delay
252 252 '''
253 253 self.path = path
254 254 self.nCohInt = nCohInt
255 255 self.flagDecodeData = flagDecodeData
256 256 self.i = 0
257 257 if not os.path.isdir(path):
258 258 raise ValueError("[Reading] Directory %s does not exist" % path)
259 259
260 260 try:
261 261 self.digitalReadObj = digital_rf.DigitalRFReader(
262 262 path, load_all_metadata=True)
263 263 except:
264 264 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 265
266 266 channelNameList = self.digitalReadObj.get_channels()
267 267
268 268 if not channelNameList:
269 269 raise ValueError("[Reading] Directory %s does not have any files" % path)
270 270
271 271 if not channelList:
272 272 channelList = list(range(len(channelNameList)))
273 273
274 274 ########## Reading metadata ######################
275 275
276 276 top_properties = self.digitalReadObj.get_properties(
277 277 channelNameList[channelList[0]])
278 278
279 279 self.__num_subchannels = top_properties['num_subchannels']
280 280 self.__sample_rate = 1.0 * \
281 281 top_properties['sample_rate_numerator'] / \
282 282 top_properties['sample_rate_denominator']
283 283 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 285
286 286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 287 channelNameList[channelList[0]])
288 288 metadata_bounds = this_metadata_file.get_bounds()
289 289 self.fixed_metadata_dict = this_metadata_file.read(
290 290 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 291
292 292 try:
293 293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 294 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
297 297 except:
298 298 pass
299 299
300 300 self.__frequency = None
301 301
302 302 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
303 303
304 304 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
305 305
306 306 try:
307 307 nSamples = self.fixed_metadata_dict['nSamples']
308 308 except:
309 309 nSamples = None
310 310
311 311 self.__firstHeigth = 0
312 312
313 313 try:
314 314 codeType = self.__radarControllerHeader['codeType']
315 315 except:
316 316 codeType = 0
317 317
318 318 try:
319 319 if codeType:
320 320 nCode = self.__radarControllerHeader['nCode']
321 321 nBaud = self.__radarControllerHeader['nBaud']
322 322 code = self.__radarControllerHeader['code']
323 323 except:
324 324 pass
325 325
326 326 if not ippKm:
327 327 try:
328 328 # seconds to km
329 329 ippKm = self.__radarControllerHeader['ipp']
330 330 except:
331 331 ippKm = None
332 332 ####################################################
333 333 self.__ippKm = ippKm
334 334 startUTCSecond = None
335 335 endUTCSecond = None
336 336
337 337 if startDate:
338 338 startDatetime = datetime.datetime.combine(startDate, startTime)
339 339 startUTCSecond = (
340 340 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 341
342 342 if endDate:
343 343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 344 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 345 1, 1)).total_seconds() + self.__timezone
346 346
347 347
348 348 print(startUTCSecond,endUTCSecond)
349 349 start_index, end_index = self.digitalReadObj.get_bounds(
350 350 channelNameList[channelList[0]])
351 351
352 print("*****",start_index,end_index)
352 ##print("*****",start_index,end_index)
353 353 if not startUTCSecond:
354 354 startUTCSecond = start_index / self.__sample_rate
355 355
356 356 if start_index > startUTCSecond * self.__sample_rate:
357 357 startUTCSecond = start_index / self.__sample_rate
358 358
359 359 if not endUTCSecond:
360 360 endUTCSecond = end_index / self.__sample_rate
361 361
362 362 if end_index < endUTCSecond * self.__sample_rate:
363 363 endUTCSecond = end_index / self.__sample_rate
364 364 if not nSamples:
365 365 if not ippKm:
366 366 raise ValueError("[Reading] nSamples or ippKm should be defined")
367 367 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
368 368 channelBoundList = []
369 369 channelNameListFiltered = []
370 370
371 371 for thisIndexChannel in channelList:
372 372 thisChannelName = channelNameList[thisIndexChannel]
373 373 start_index, end_index = self.digitalReadObj.get_bounds(
374 374 thisChannelName)
375 375 channelBoundList.append((start_index, end_index))
376 376 channelNameListFiltered.append(thisChannelName)
377 377
378 378 self.profileIndex = 0
379 379 self.i = 0
380 380 self.__delay = delay
381 381
382 382 self.__codeType = codeType
383 383 self.__nCode = nCode
384 384 self.__nBaud = nBaud
385 385 self.__code = code
386 386
387 387 self.__datapath = path
388 388 self.__online = online
389 389 self.__channelList = channelList
390 390 self.__channelNameList = channelNameListFiltered
391 391 self.__channelBoundList = channelBoundList
392 392 self.__nSamples = nSamples
393 393 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
394 394 self.__nChannels = len(self.__channelList)
395 395
396 396 self.__startUTCSecond = startUTCSecond
397 397 self.__endUTCSecond = endUTCSecond
398 398
399 399 self.__timeInterval = 1.0 * self.__samples_to_read / \
400 400 self.__sample_rate # Time interval
401 401
402 402 if online:
403 403 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
404 404 startUTCSecond = numpy.floor(endUTCSecond)
405 405
406 406 # por que en el otro metodo lo primero q se hace es sumar samplestoread
407 407 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
408 408
409 409 #self.__data_buffer = numpy.zeros(
410 410 # (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
411 411 self.__data_buffer = numpy.zeros((int(len(channelList)), self.__samples_to_read), dtype=numpy.complex)
412 412
413 413
414 414 self.__setFileHeader()
415 415 self.isConfig = True
416 416
417 417 print("[Reading] Digital RF Data was found from %s to %s " % (
418 418 datetime.datetime.utcfromtimestamp(
419 419 self.__startUTCSecond - self.__timezone),
420 420 datetime.datetime.utcfromtimestamp(
421 421 self.__endUTCSecond - self.__timezone)
422 422 ))
423 423
424 424 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
425 425 datetime.datetime.utcfromtimestamp(
426 426 endUTCSecond - self.__timezone)
427 427 ))
428 428 self.oldAverage = None
429 429 self.count = 0
430 430 self.executionTime = 0
431 431
432 432 def __reload(self):
433 433 # print
434 434 # print "%s not in range [%s, %s]" %(
435 435 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
436 436 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
437 437 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
438 438 # )
439 439 print("[Reading] reloading metadata ...")
440 440
441 441 try:
442 442 self.digitalReadObj.reload(complete_update=True)
443 443 except:
444 444 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
445 445
446 446 start_index, end_index = self.digitalReadObj.get_bounds(
447 447 self.__channelNameList[self.__channelList[0]])
448 448
449 449 if start_index > self.__startUTCSecond * self.__sample_rate:
450 450 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
451 451
452 452 if end_index > self.__endUTCSecond * self.__sample_rate:
453 453 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
454 454 print()
455 455 print("[Reading] New timerange found [%s, %s] " % (
456 456 datetime.datetime.utcfromtimestamp(
457 457 self.__startUTCSecond - self.__timezone),
458 458 datetime.datetime.utcfromtimestamp(
459 459 self.__endUTCSecond - self.__timezone)
460 460 ))
461 461
462 462 return True
463 463
464 464 return False
465 465
466 466 def timeit(self, toExecute):
467 467 t0 = time.time()
468 468 toExecute()
469 469 self.executionTime = time.time() - t0
470 470 if self.oldAverage is None:
471 471 self.oldAverage = self.executionTime
472 472 self.oldAverage = (self.executionTime + self.count *
473 473 self.oldAverage) / (self.count + 1.0)
474 474 self.count = self.count + 1.0
475 475 return
476 476
477 477 def __readNextBlock(self, seconds=30, volt_scale=1):
478 478 '''
479 479 '''
480 480
481 481 # Set the next data
482 482 self.__flagDiscontinuousBlock = False
483 483 self.__thisUnixSample += self.__samples_to_read
484 484
485 485 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
486 486 print ("[Reading] There are no more data into selected time-range")
487 487 if self.__online:
488 488 sleep(3)
489 489 self.__reload()
490 490 else:
491 491 return False
492 492
493 493 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
494 494 return False
495 495 self.__thisUnixSample -= self.__samples_to_read
496 496
497 497 indexChannel = 0
498 498
499 499 dataOk = False
500 500
501 501 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
502 502 for indexSubchannel in range(self.__num_subchannels):
503 503 try:
504 504 t0 = time()
505 505 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
506 506 self.__samples_to_read,
507 507 thisChannelName, sub_channel=indexSubchannel)
508 508 self.executionTime = time() - t0
509 509 if self.oldAverage is None:
510 510 self.oldAverage = self.executionTime
511 511 self.oldAverage = (
512 512 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
513 513 self.count = self.count + 1.0
514 514
515 515 except IOError as e:
516 516 # read next profile
517 517 self.__flagDiscontinuousBlock = True
518 518 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
519 519 break
520 520
521 521 if result.shape[0] != self.__samples_to_read:
522 522 self.__flagDiscontinuousBlock = True
523 523 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
524 524 result.shape[0],
525 525 self.__samples_to_read))
526 526 break
527 527
528 528 self.__data_buffer[indexChannel, :] = result * volt_scale
529 529 indexChannel+=1
530 530
531 531 dataOk = True
532 532
533 533 self.__utctime = self.__thisUnixSample / self.__sample_rate
534 534
535 535 if not dataOk:
536 536 return False
537 537
538 538 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
539 539 self.__samples_to_read,
540 540 self.__timeInterval))
541 541
542 542 self.__bufferIndex = 0
543 543
544 544 return True
545 545
546 546 def __isBufferEmpty(self):
547 547 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
548 548
549 549 def getData(self, seconds=30, nTries=5):
550 550 '''
551 551 This method gets the data from files and put the data into the dataOut object
552 552
553 553 In addition, increase el the buffer counter in one.
554 554
555 555 Return:
556 556 data : retorna un perfil de voltages (alturas * canales) copiados desde el
557 557 buffer. Si no hay mas archivos a leer retorna None.
558 558
559 559 Affected:
560 560 self.dataOut
561 561 self.profileIndex
562 562 self.flagDiscontinuousBlock
563 563 self.flagIsNewBlock
564 564 '''
565 565 #print("getdata")
566 566 err_counter = 0
567 567 self.dataOut.flagNoData = True
568 568
569 569 if self.__isBufferEmpty():
570 570 #print("hi")
571 571 self.__flagDiscontinuousBlock = False
572 572
573 573 while True:
574 574 #print ("q ha pasado")
575 575 if self.__readNextBlock():
576 576 break
577 577 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
578 578 raise schainpy.admin.SchainError('Error')
579 579 return
580 580
581 581 if self.__flagDiscontinuousBlock:
582 582 raise schainpy.admin.SchainError('discontinuous block found')
583 583 return
584 584
585 585 if not self.__online:
586 586 raise schainpy.admin.SchainError('Online?')
587 587 return
588 588
589 589 err_counter += 1
590 590 if err_counter > nTries:
591 591 raise schainpy.admin.SchainError('Max retrys reach')
592 592 return
593 593
594 594 print('[Reading] waiting %d seconds to read a new block' % seconds)
595 595 sleep(seconds)
596 596
597 597 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
598 598 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
599 599 self.dataOut.flagNoData = False
600 600 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
601 601 self.dataOut.profileIndex = self.profileIndex
602 602
603 603 self.__bufferIndex += self.__nSamples
604 604 self.profileIndex += 1
605 605
606 606 if self.profileIndex == self.dataOut.nProfiles:
607 607 self.profileIndex = 0
608 608
609 609 return True
610 610
611 611 def printInfo(self):
612 612 '''
613 613 '''
614 614 if self.__printInfo == False:
615 615 return
616 616
617 617 # self.systemHeaderObj.printInfo()
618 618 # self.radarControllerHeaderObj.printInfo()
619 619
620 620 self.__printInfo = False
621 621
622 622 def printNumberOfBlock(self):
623 623 '''
624 624 '''
625 625 return
626 626 # print self.profileIndex
627 627
628 628 def run(self, **kwargs):
629 629 '''
630 630 This method will be called many times so here you should put all your code
631 631 '''
632 632
633 633 if not self.isConfig:
634 634 self.setup(**kwargs)
635 635 #self.i = self.i+1
636 636 self.getData(seconds=self.__delay)
637 637
638 638 return
639 639
640 640 @MPDecorator
641 641 class DigitalRFWriter(Operation):
642 642 '''
643 643 classdocs
644 644 '''
645 645
646 646 def __init__(self, **kwargs):
647 647 '''
648 648 Constructor
649 649 '''
650 650 Operation.__init__(self, **kwargs)
651 651 self.metadata_dict = {}
652 652 self.dataOut = None
653 653 self.dtype = None
654 654 self.oldAverage = 0
655 655
656 656 def setHeader(self):
657 657
658 658 self.metadata_dict['frequency'] = self.dataOut.frequency
659 659 self.metadata_dict['timezone'] = self.dataOut.timeZone
660 660 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
661 661 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
662 662 self.metadata_dict['heightList'] = self.dataOut.heightList
663 663 self.metadata_dict['channelList'] = self.dataOut.channelList
664 664 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
665 665 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
666 666 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
667 667 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
668 668 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
669 669 self.metadata_dict['type'] = self.dataOut.type
670 670 self.metadata_dict['flagDataAsBlock']= getattr(
671 671 self.dataOut, 'flagDataAsBlock', None) # chequear
672 672
673 673 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
674 674 '''
675 675 In this method we should set all initial parameters.
676 676 Input:
677 677 dataOut: Input data will also be outputa data
678 678 '''
679 679 self.setHeader()
680 680 self.__ippSeconds = dataOut.ippSeconds
681 681 self.__deltaH = dataOut.getDeltaH()
682 682 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
683 683 self.__dtype = dataOut.dtype
684 684 if len(dataOut.dtype) == 2:
685 685 self.__dtype = dataOut.dtype[0]
686 686 self.__nSamples = dataOut.systemHeaderObj.nSamples
687 687 self.__nProfiles = dataOut.nProfiles
688 688
689 689 if self.dataOut.type != 'Voltage':
690 690 raise 'Digital RF cannot be used with this data type'
691 691 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
692 692 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
693 693 else:
694 694 self.arr_data = numpy.ones((self.__nSamples, len(
695 695 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
696 696
697 697 file_cadence_millisecs = 1000
698 698
699 699 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
700 700 sample_rate_numerator = int(sample_rate_fraction.numerator)
701 701 sample_rate_denominator = int(sample_rate_fraction.denominator)
702 702 start_global_index = dataOut.utctime * self.__sample_rate
703 703
704 704 uuid = 'prueba'
705 705 compression_level = 0
706 706 checksum = False
707 707 is_complex = True
708 708 num_subchannels = len(dataOut.channelList)
709 709 is_continuous = True
710 710 marching_periods = False
711 711
712 712 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
713 713 fileCadence, start_global_index,
714 714 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
715 715 is_complex, num_subchannels, is_continuous, marching_periods)
716 716 metadata_dir = os.path.join(path, 'metadata')
717 717 os.system('mkdir %s' % (metadata_dir))
718 718 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
719 719 sample_rate_numerator, sample_rate_denominator,
720 720 metadataFile)
721 721 self.isConfig = True
722 722 self.currentSample = 0
723 723 self.oldAverage = 0
724 724 self.count = 0
725 725 return
726 726
727 727 def writeMetadata(self):
728 728 start_idx = self.__sample_rate * self.dataOut.utctime
729 729
730 730 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
731 731 )
732 732 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
733 733 )
734 734 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
735 735 )
736 736 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
737 737 return
738 738
739 739 def timeit(self, toExecute):
740 740 t0 = time()
741 741 toExecute()
742 742 self.executionTime = time() - t0
743 743 if self.oldAverage is None:
744 744 self.oldAverage = self.executionTime
745 745 self.oldAverage = (self.executionTime + self.count *
746 746 self.oldAverage) / (self.count + 1.0)
747 747 self.count = self.count + 1.0
748 748 return
749 749
750 750 def writeData(self):
751 751 if self.dataOut.type != 'Voltage':
752 752 raise 'Digital RF cannot be used with this data type'
753 753 for channel in self.dataOut.channelList:
754 754 for i in range(self.dataOut.nFFTPoints):
755 755 self.arr_data[1][channel * self.dataOut.nFFTPoints +
756 756 i]['r'] = self.dataOut.data[channel][i].real
757 757 self.arr_data[1][channel * self.dataOut.nFFTPoints +
758 758 i]['i'] = self.dataOut.data[channel][i].imag
759 759 else:
760 760 for i in range(self.dataOut.systemHeaderObj.nSamples):
761 761 for channel in self.dataOut.channelList:
762 762 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
763 763 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
764 764
765 765 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
766 766 self.timeit(f)
767 767
768 768 return
769 769
770 770 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
771 771 '''
772 772 This method will be called many times so here you should put all your code
773 773 Inputs:
774 774 dataOut: object with the data
775 775 '''
776 776 # print dataOut.__dict__
777 777 self.dataOut = dataOut
778 778 if not self.isConfig:
779 779 self.setup(dataOut, path, frequency, fileCadence,
780 780 dirCadence, metadataCadence, **kwargs)
781 781 self.writeMetadata()
782 782
783 783 self.writeData()
784 784
785 785 ## self.currentSample += 1
786 786 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
787 787 # self.writeMetadata()
788 788 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
789 789
790 790 return dataOut# en la version 2.7 no aparece este return
791 791
792 792 def close(self):
793 793 print('[Writing] - Closing files ')
794 794 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
795 795 try:
796 796 self.digitalWriteObj.close()
797 797 except:
798 798 pass
@@ -1,626 +1,648
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 attributes.
20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40
40
41 41 Examples
42 42 --------
43
43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67
67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 self.ext = '.{}'.format(self.ext)
102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 self.endDate, self.expLabel, self.ext, self.walk,
109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 try:
112 112 fullpath = next(fullpath)
113 113 except:
114 114 fullpath = None
115
115
116 116 if fullpath:
117 117 break
118 118
119 119 log.warning(
120 120 'Waiting {} sec for a valid file in {}: try {} ...'.format(
121 self.delay, self.path, nTries + 1),
121 self.delay, self.path, nTries + 1),
122 122 self.name)
123 123 time.sleep(self.delay)
124 124
125 125 if not(fullpath):
126 126 raise schainpy.admin.SchainError(
127 'There isn\'t any valid file in {}'.format(self.path))
127 'There isn\'t any valid file in {}'.format(self.path))
128 128
129 129 pathname, filename = os.path.split(fullpath)
130 130 self.year = int(filename[1:5])
131 131 self.doy = int(filename[5:8])
132 self.set = int(filename[8:11]) - 1
132 self.set = int(filename[8:11]) - 1
133 133 else:
134 134 log.log("Searching files in {}".format(self.path), self.name)
135 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
135 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
136 136 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
137
137
138 138 self.setNextFile()
139 139
140 140 return
141 141
142 142 def readFirstHeader(self):
143 143 '''Read metadata and data'''
144 144
145 self.__readMetadata()
145 self.__readMetadata()
146 146 self.__readData()
147 147 self.__setBlockList()
148
148
149 149 if 'type' in self.meta:
150 150 self.dataOut = eval(self.meta['type'])()
151
151
152 152 for attr in self.meta:
153 153 setattr(self.dataOut, attr, self.meta[attr])
154
154
155 155 self.blockIndex = 0
156
156
157 157 return
158 158
159 159 def __setBlockList(self):
160 160 '''
161 161 Selects the data within the times defined
162 162
163 163 self.fp
164 164 self.startTime
165 165 self.endTime
166 166 self.blockList
167 167 self.blocksPerFile
168 168
169 169 '''
170 170
171 171 startTime = self.startTime
172 172 endTime = self.endTime
173 173 thisUtcTime = self.data['utctime'] + self.utcoffset
174 174 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
175 175 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
176 176
177 177 thisDate = thisDatetime.date()
178 178 thisTime = thisDatetime.time()
179 179
180 180 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
181 181 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
182 182
183 183 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
184 184
185 185 self.blockList = ind
186 186 self.blocksPerFile = len(ind)
187 187 return
188 188
189 189 def __readMetadata(self):
190 190 '''
191 191 Reads Metadata
192 192 '''
193 193
194 194 meta = {}
195 195
196 196 if self.description:
197 197 for key, value in self.description['Metadata'].items():
198 198 meta[key] = self.fp[value][()]
199 199 else:
200 200 grp = self.fp['Metadata']
201 201 for name in grp:
202 202 meta[name] = grp[name][()]
203 203
204 204 if self.extras:
205 205 for key, value in self.extras.items():
206 206 meta[key] = value
207 207 self.meta = meta
208 208
209 209 return
210 210
211 211 def __readData(self):
212 212
213 213 data = {}
214
214
215 215 if self.description:
216 216 for key, value in self.description['Data'].items():
217 217 if isinstance(value, str):
218 218 if isinstance(self.fp[value], h5py.Dataset):
219 219 data[key] = self.fp[value][()]
220 220 elif isinstance(self.fp[value], h5py.Group):
221 221 array = []
222 222 for ch in self.fp[value]:
223 223 array.append(self.fp[value][ch][()])
224 224 data[key] = numpy.array(array)
225 225 elif isinstance(value, list):
226 226 array = []
227 227 for ch in value:
228 228 array.append(self.fp[ch][()])
229 229 data[key] = numpy.array(array)
230 230 else:
231 231 grp = self.fp['Data']
232 232 for name in grp:
233 233 if isinstance(grp[name], h5py.Dataset):
234 234 array = grp[name][()]
235 235 elif isinstance(grp[name], h5py.Group):
236 236 array = []
237 237 for ch in grp[name]:
238 238 array.append(grp[name][ch][()])
239 239 array = numpy.array(array)
240 240 else:
241 241 log.warning('Unknown type: {}'.format(name))
242
242
243 243 if name in self.description:
244 244 key = self.description[name]
245 245 else:
246 246 key = name
247 247 data[key] = array
248 248
249 249 self.data = data
250 250 return
251
251
252 252 def getData(self):
253 253
254 254 for attr in self.data:
255 255 if self.data[attr].ndim == 1:
256 256 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
257 257 else:
258 258 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
259 259
260 260 self.dataOut.flagNoData = False
261 261 self.blockIndex += 1
262 262
263 263 log.log("Block No. {}/{} -> {}".format(
264 264 self.blockIndex,
265 265 self.blocksPerFile,
266 266 self.dataOut.datatime.ctime()), self.name)
267 267
268 268 return
269 269
270 270 def run(self, **kwargs):
271 271
272 272 if not(self.isConfig):
273 273 self.setup(**kwargs)
274 274 self.isConfig = True
275 275
276 276 if self.blockIndex == self.blocksPerFile:
277 277 self.setNextFile()
278 278
279 279 self.getData()
280 280
281 281 return
282 282
283 283 @MPDecorator
284 284 class HDFWriter(Operation):
285 285 """Operation to write HDF5 files.
286 286
287 287 The HDF5 file contains by default two groups Data and Metadata where
288 288 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
289 289 parameters, data attributes are normaly time dependent where the metadata
290 are not.
291 It is possible to customize the structure of the HDF5 file with the
290 are not.
291 It is possible to customize the structure of the HDF5 file with the
292 292 optional description parameter see the examples.
293 293
294 294 Parameters:
295 295 -----------
296 296 path : str
297 297 Path where files will be saved.
298 298 blocksPerFile : int
299 299 Number of blocks per file
300 300 metadataList : list
301 301 List of the dataOut attributes that will be saved as metadata
302 302 dataList : int
303 303 List of the dataOut attributes that will be saved as data
304 304 setType : bool
305 305 If True the name of the files corresponds to the timestamp of the data
306 306 description : dict, optional
307 307 Dictionary with the desired description of the HDF5 file
308
308
309 309 Examples
310 310 --------
311
311
312 312 desc = {
313 313 'data_output': {'winds': ['z', 'w', 'v']},
314 314 'utctime': 'timestamps',
315 315 'heightList': 'heights'
316 316 }
317 317 desc = {
318 318 'data_output': ['z', 'w', 'v'],
319 319 'utctime': 'timestamps',
320 320 'heightList': 'heights'
321 321 }
322 322 desc = {
323 323 'Data': {
324 324 'data_output': 'winds',
325 325 'utctime': 'timestamps'
326 326 },
327 327 'Metadata': {
328 328 'heightList': 'heights'
329 329 }
330 330 }
331
331
332 332 writer = proc_unit.addOperation(name='HDFWriter')
333 333 writer.addParameter(name='path', value='/path/to/file')
334 334 writer.addParameter(name='blocksPerFile', value='32')
335 335 writer.addParameter(name='metadataList', value='heightList,timeZone')
336 336 writer.addParameter(name='dataList',value='data_output,utctime')
337 337 # writer.addParameter(name='description',value=json.dumps(desc))
338 338
339 339 """
340 340
341 341 ext = ".hdf5"
342 342 optchar = "D"
343 343 filename = None
344 344 path = None
345 345 setFile = None
346 346 fp = None
347 347 firsttime = True
348 348 #Configurations
349 349 blocksPerFile = None
350 350 blockIndex = None
351 351 dataOut = None
352 352 #Data Arrays
353 353 dataList = None
354 354 metadataList = None
355 355 currentDay = None
356 356 lastTime = None
357 last_Azipos = None
358 last_Elepos = None
359 mode = None
360
361
357 362
358 363 def __init__(self):
359
364
360 365 Operation.__init__(self)
361 366 return
362 367
368 def generalFlag(self):
369 print("GENERALFLAG")
370 if self.mode== "weather":
371 if self.last_Azipos == None:
372 tmp = self.dataOut.azimuth
373 print("ang azimuth writer",tmp)
374 self.last_Azipos = tmp
375 flag = False
376 return flag
377 print("ang_azimuth writer",self.dataOut.azimuth)
378 result = self.dataOut.azimuth - self.last_Azipos
379 self.last_Azipos = self.dataOut.azimuth
380 if result<0:
381 flag = True
382 return flag
383
363 384 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
364 385 self.path = path
365 386 self.blocksPerFile = blocksPerFile
366 387 self.metadataList = metadataList
367 388 self.dataList = [s.strip() for s in dataList]
368 389 self.setType = setType
369 390 self.description = description
370 391
371 392 if self.metadataList is None:
372 393 self.metadataList = self.dataOut.metadata_list
373 394
374 395 tableList = []
375 396 dsList = []
376 397
377 398 for i in range(len(self.dataList)):
378 399 dsDict = {}
379 400 if hasattr(self.dataOut, self.dataList[i]):
380 401 dataAux = getattr(self.dataOut, self.dataList[i])
381 402 dsDict['variable'] = self.dataList[i]
382 403 else:
383 404 log.warning('Attribute {} not found in dataOut', self.name)
384 405 continue
385 406
386 407 if dataAux is None:
387 408 continue
388 409 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
389 410 dsDict['nDim'] = 0
390 411 else:
391 412 dsDict['nDim'] = len(dataAux.shape)
392 413 dsDict['shape'] = dataAux.shape
393 414 dsDict['dsNumber'] = dataAux.shape[0]
394 415 dsDict['dtype'] = dataAux.dtype
395
416
396 417 dsList.append(dsDict)
397 418
398 419 self.dsList = dsList
399 420 self.currentDay = self.dataOut.datatime.date()
400 421
401 422 def timeFlag(self):
402 423 currentTime = self.dataOut.utctime
403 424 timeTuple = time.localtime(currentTime)
404 425 dataDay = timeTuple.tm_yday
405 426
406 427 if self.lastTime is None:
407 428 self.lastTime = currentTime
408 429 self.currentDay = dataDay
409 430 return False
410
431
411 432 timeDiff = currentTime - self.lastTime
412 433
413 434 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
414 435 if dataDay != self.currentDay:
415 436 self.currentDay = dataDay
416 437 return True
417 438 elif timeDiff > 3*60*60:
418 439 self.lastTime = currentTime
419 440 return True
420 441 else:
421 442 self.lastTime = currentTime
422 443 return False
423 444
424 445 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
425 dataList=[], setType=None, description={}):
446 dataList=[], setType=None, description={},mode= None):
426 447
427 448 self.dataOut = dataOut
449 self.mode = mode
428 450 if not(self.isConfig):
429 self.setup(path=path, blocksPerFile=blocksPerFile,
451 self.setup(path=path, blocksPerFile=blocksPerFile,
430 452 metadataList=metadataList, dataList=dataList,
431 453 setType=setType, description=description)
432 454
433 455 self.isConfig = True
434 456 self.setNextFile()
435 457
436 458 self.putData()
437 459 return
438
460
439 461 def setNextFile(self):
440
462
441 463 ext = self.ext
442 464 path = self.path
443 465 setFile = self.setFile
444 466
445 467 timeTuple = time.localtime(self.dataOut.utctime)
446 468 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
447 469 fullpath = os.path.join(path, subfolder)
448 470
449 471 if os.path.exists(fullpath):
450 472 filesList = os.listdir(fullpath)
451 473 filesList = [k for k in filesList if k.startswith(self.optchar)]
452 474 if len( filesList ) > 0:
453 475 filesList = sorted(filesList, key=str.lower)
454 476 filen = filesList[-1]
455 477 # el filename debera tener el siguiente formato
456 478 # 0 1234 567 89A BCDE (hex)
457 479 # x YYYY DDD SSS .ext
458 480 if isNumber(filen[8:11]):
459 481 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
460 482 else:
461 483 setFile = -1
462 484 else:
463 485 setFile = -1 #inicializo mi contador de seteo
464 486 else:
465 487 os.makedirs(fullpath)
466 488 setFile = -1 #inicializo mi contador de seteo
467 489
468 490 if self.setType is None:
469 491 setFile += 1
470 492 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
471 493 timeTuple.tm_year,
472 494 timeTuple.tm_yday,
473 495 setFile,
474 496 ext )
475 497 else:
476 498 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
477 499 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
478 500 timeTuple.tm_year,
479 501 timeTuple.tm_yday,
480 502 setFile,
481 503 ext )
482 504
483 505 self.filename = os.path.join( path, subfolder, file )
484 506
485 507 #Setting HDF5 File
486 508 self.fp = h5py.File(self.filename, 'w')
487 509 #write metadata
488 510 self.writeMetadata(self.fp)
489 511 #Write data
490 512 self.writeData(self.fp)
491 513
492 514 def getLabel(self, name, x=None):
493 515
494 516 if x is None:
495 517 if 'Data' in self.description:
496 518 data = self.description['Data']
497 519 if 'Metadata' in self.description:
498 520 data.update(self.description['Metadata'])
499 521 else:
500 522 data = self.description
501 523 if name in data:
502 524 if isinstance(data[name], str):
503 525 return data[name]
504 526 elif isinstance(data[name], list):
505 527 return None
506 528 elif isinstance(data[name], dict):
507 529 for key, value in data[name].items():
508 530 return key
509 531 return name
510 532 else:
511 533 if 'Metadata' in self.description:
512 534 meta = self.description['Metadata']
513 535 else:
514 536 meta = self.description
515 537 if name in meta:
516 538 if isinstance(meta[name], list):
517 539 return meta[name][x]
518 540 elif isinstance(meta[name], dict):
519 541 for key, value in meta[name].items():
520 542 return value[x]
521 543 if 'cspc' in name:
522 544 return 'pair{:02d}'.format(x)
523 545 else:
524 546 return 'channel{:02d}'.format(x)
525
547
526 548 def writeMetadata(self, fp):
527 549
528 550 if self.description:
529 551 if 'Metadata' in self.description:
530 552 grp = fp.create_group('Metadata')
531 553 else:
532 554 grp = fp
533 555 else:
534 556 grp = fp.create_group('Metadata')
535 557
536 558 for i in range(len(self.metadataList)):
537 559 if not hasattr(self.dataOut, self.metadataList[i]):
538 560 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
539 561 continue
540 562 value = getattr(self.dataOut, self.metadataList[i])
541 563 if isinstance(value, bool):
542 564 if value is True:
543 565 value = 1
544 566 else:
545 567 value = 0
546 568 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
547 569 return
548 570
549 571 def writeData(self, fp):
550
572
551 573 if self.description:
552 574 if 'Data' in self.description:
553 575 grp = fp.create_group('Data')
554 576 else:
555 577 grp = fp
556 578 else:
557 579 grp = fp.create_group('Data')
558 580
559 581 dtsets = []
560 582 data = []
561
583
562 584 for dsInfo in self.dsList:
563 585 if dsInfo['nDim'] == 0:
564 586 ds = grp.create_dataset(
565 self.getLabel(dsInfo['variable']),
587 self.getLabel(dsInfo['variable']),
566 588 (self.blocksPerFile, ),
567 chunks=True,
589 chunks=True,
568 590 dtype=numpy.float64)
569 591 dtsets.append(ds)
570 592 data.append((dsInfo['variable'], -1))
571 593 else:
572 594 label = self.getLabel(dsInfo['variable'])
573 595 if label is not None:
574 596 sgrp = grp.create_group(label)
575 597 else:
576 598 sgrp = grp
577 599 for i in range(dsInfo['dsNumber']):
578 600 ds = sgrp.create_dataset(
579 self.getLabel(dsInfo['variable'], i),
601 self.getLabel(dsInfo['variable'], i),
580 602 (self.blocksPerFile, ) + dsInfo['shape'][1:],
581 603 chunks=True,
582 604 dtype=dsInfo['dtype'])
583 605 dtsets.append(ds)
584 606 data.append((dsInfo['variable'], i))
585 607 fp.flush()
586 608
587 609 log.log('Creating file: {}'.format(fp.filename), self.name)
588
610
589 611 self.ds = dtsets
590 612 self.data = data
591 613 self.firsttime = True
592 614 self.blockIndex = 0
593 615 return
594 616
595 617 def putData(self):
596
597 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
618 print("**************************PUT DATA***************************************************")
619 if (self.blockIndex == self.blocksPerFile) or self.timeFlag() or self.generalFlag():
598 620 self.closeFile()
599 621 self.setNextFile()
600 622
601 623 for i, ds in enumerate(self.ds):
602 624 attr, ch = self.data[i]
603 625 if ch == -1:
604 626 ds[self.blockIndex] = getattr(self.dataOut, attr)
605 627 else:
606 628 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
607 629
608 630 self.fp.flush()
609 631 self.blockIndex += 1
610 632 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
611 633
612 634 return
613 635
614 636 def closeFile(self):
615 637
616 638 if self.blockIndex != self.blocksPerFile:
617 639 for ds in self.ds:
618 640 ds.resize(self.blockIndex, axis=0)
619 641
620 642 if self.fp:
621 643 self.fp.flush()
622 644 self.fp.close()
623 645
624 646 def close(self):
625 647
626 648 self.closeFile()
@@ -1,4473 +1,4608
1 1 import numpy,os,h5py
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import time
14 14
15 15 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 17 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 18 from scipy import asarray as ar,exp
19 19 from scipy.optimize import curve_fit
20 20 from schainpy.utils import log
21 21 import warnings
22 22 from numpy import NaN
23 23 from scipy.optimize.optimize import OptimizeWarning
24 24 warnings.filterwarnings('ignore')
25 25
26 from time import sleep
27
26 28 import matplotlib.pyplot as plt
27 29
28 30 SPEED_OF_LIGHT = 299792458
29 31
30 32 '''solving pickling issue'''
31 33
32 34 def _pickle_method(method):
33 35 func_name = method.__func__.__name__
34 36 obj = method.__self__
35 37 cls = method.__self__.__class__
36 38 return _unpickle_method, (func_name, obj, cls)
37 39
38 40 def _unpickle_method(func_name, obj, cls):
39 41 for cls in cls.mro():
40 42 try:
41 43 func = cls.__dict__[func_name]
42 44 except KeyError:
43 45 pass
44 46 else:
45 47 break
46 48 return func.__get__(obj, cls)
47 49
48 50 def isNumber(str):
49 51 try:
50 52 float(str)
51 53 return True
52 54 except:
53 55 return False
54 56
55 57 class ParametersProc(ProcessingUnit):
56 58
57 59 METHODS = {}
58 60 nSeconds = None
59 61
60 62 def __init__(self):
61 63 ProcessingUnit.__init__(self)
62 64
63 65 # self.objectDict = {}
64 66 self.buffer = None
65 67 self.firstdatatime = None
66 68 self.profIndex = 0
67 69 self.dataOut = Parameters()
68 70 self.setupReq = False #Agregar a todas las unidades de proc
69 71
70 72 def __updateObjFromInput(self):
71 73
72 74 self.dataOut.inputUnit = self.dataIn.type
73 75
74 76 self.dataOut.timeZone = self.dataIn.timeZone
75 77 self.dataOut.dstFlag = self.dataIn.dstFlag
76 78 self.dataOut.errorCount = self.dataIn.errorCount
77 79 self.dataOut.useLocalTime = self.dataIn.useLocalTime
78 80
79 81 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
80 82 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
81 83 self.dataOut.channelList = self.dataIn.channelList
82 84 self.dataOut.heightList = self.dataIn.heightList
83 85 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
84 86 # self.dataOut.nHeights = self.dataIn.nHeights
85 87 # self.dataOut.nChannels = self.dataIn.nChannels
86 88 # self.dataOut.nBaud = self.dataIn.nBaud
87 89 # self.dataOut.nCode = self.dataIn.nCode
88 90 # self.dataOut.code = self.dataIn.code
89 91 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
90 92 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
91 93 # self.dataOut.utctime = self.firstdatatime
92 94 self.dataOut.utctime = self.dataIn.utctime
93 95 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
94 96 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
95 97 self.dataOut.nCohInt = self.dataIn.nCohInt
96 98 # self.dataOut.nIncohInt = 1
97 99 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
98 100 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
99 101 self.dataOut.timeInterval1 = self.dataIn.timeInterval
100 102 self.dataOut.heightList = self.dataIn.heightList
101 103 self.dataOut.frequency = self.dataIn.frequency
102 104 # self.dataOut.noise = self.dataIn.noise
103 105
104 106 def run(self):
105 107
106 108
107 109 #print("HOLA MUNDO SOY YO")
108 110 #---------------------- Voltage Data ---------------------------
109 111
110 112 if self.dataIn.type == "Voltage":
111 113
112 114 self.__updateObjFromInput()
113 115 self.dataOut.data_pre = self.dataIn.data.copy()
114 116 self.dataOut.flagNoData = False
115 117 self.dataOut.utctimeInit = self.dataIn.utctime
116 118 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
117 119
118 120 if hasattr(self.dataIn, 'flagDataAsBlock'):
119 121 self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock
120 122
121 123 if hasattr(self.dataIn, 'profileIndex'):
122 124 self.dataOut.profileIndex = self.dataIn.profileIndex
123 125
124 126 if hasattr(self.dataIn, 'dataPP_POW'):
125 127 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
126 128
127 129 if hasattr(self.dataIn, 'dataPP_POWER'):
128 130 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
129 131
130 132 if hasattr(self.dataIn, 'dataPP_DOP'):
131 133 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
132 134
133 135 if hasattr(self.dataIn, 'dataPP_SNR'):
134 136 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
135 137
136 138 if hasattr(self.dataIn, 'dataPP_WIDTH'):
137 139 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
138 140 return
139 141
140 142 #---------------------- Spectra Data ---------------------------
141 143
142 144 if self.dataIn.type == "Spectra":
143 145 #print("que paso en spectra")
144 146 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
145 147 self.dataOut.data_spc = self.dataIn.data_spc
146 148 self.dataOut.data_cspc = self.dataIn.data_cspc
147 149 self.dataOut.nProfiles = self.dataIn.nProfiles
148 150 self.dataOut.nIncohInt = self.dataIn.nIncohInt
149 151 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
150 152 self.dataOut.ippFactor = self.dataIn.ippFactor
151 153 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
152 154 self.dataOut.spc_noise = self.dataIn.getNoise()
153 155 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
154 156 # self.dataOut.normFactor = self.dataIn.normFactor
155 157 self.dataOut.pairsList = self.dataIn.pairsList
156 158 self.dataOut.groupList = self.dataIn.pairsList
157 159 self.dataOut.flagNoData = False
158 160
159 161 if hasattr(self.dataIn, 'flagDataAsBlock'):
160 162 self.dataOut.flagDataAsBlock = self.dataIn.flagDataAsBlock
161 163
162 164 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
163 165 self.dataOut.ChanDist = self.dataIn.ChanDist
164 166 else: self.dataOut.ChanDist = None
165 167
166 168 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
167 169 # self.dataOut.VelRange = self.dataIn.VelRange
168 170 #else: self.dataOut.VelRange = None
169 171
170 172 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
171 173 self.dataOut.RadarConst = self.dataIn.RadarConst
172 174
173 175 if hasattr(self.dataIn, 'NPW'): #NPW
174 176 self.dataOut.NPW = self.dataIn.NPW
175 177
176 178 if hasattr(self.dataIn, 'COFA'): #COFA
177 179 self.dataOut.COFA = self.dataIn.COFA
178 180
179 181
180 182
181 183 #---------------------- Correlation Data ---------------------------
182 184
183 185 if self.dataIn.type == "Correlation":
184 186 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
185 187
186 188 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
187 189 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
188 190 self.dataOut.groupList = (acf_pairs, ccf_pairs)
189 191
190 192 self.dataOut.abscissaList = self.dataIn.lagRange
191 193 self.dataOut.noise = self.dataIn.noise
192 194 self.dataOut.data_snr = self.dataIn.SNR
193 195 self.dataOut.flagNoData = False
194 196 self.dataOut.nAvg = self.dataIn.nAvg
195 197
196 198 #---------------------- Parameters Data ---------------------------
197 199
198 200 if self.dataIn.type == "Parameters":
199 201 self.dataOut.copy(self.dataIn)
200 202 self.dataOut.flagNoData = False
201 203 #print("yo si entre")
202 204
203 205 return True
204 206
205 207 self.__updateObjFromInput()
206 208 #print("yo si entre2")
207 209
208 210 self.dataOut.utctimeInit = self.dataIn.utctime
209 211 self.dataOut.paramInterval = self.dataIn.timeInterval
210 212 #print("soy spectra ",self.dataOut.utctimeInit)
211 213 return
212 214
213 215
214 216 def target(tups):
215 217
216 218 obj, args = tups
217 219
218 220 return obj.FitGau(args)
219 221
220 222 class RemoveWideGC(Operation):
221 223 ''' This class remove the wide clutter and replace it with a simple interpolation points
222 224 This mainly applies to CLAIRE radar
223 225
224 226 ClutterWidth : Width to look for the clutter peak
225 227
226 228 Input:
227 229
228 230 self.dataOut.data_pre : SPC and CSPC
229 231 self.dataOut.spc_range : To select wind and rainfall velocities
230 232
231 233 Affected:
232 234
233 235 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
234 236
235 237 Written by D. ScipiΓ³n 25.02.2021
236 238 '''
237 239 def __init__(self):
238 240 Operation.__init__(self)
239 241 self.i = 0
240 242 self.ich = 0
241 243 self.ir = 0
242 244
243 245 def run(self, dataOut, ClutterWidth=2.5):
244 246 # print ('Entering RemoveWideGC ... ')
245 247
246 248 self.spc = dataOut.data_pre[0].copy()
247 249 self.spc_out = dataOut.data_pre[0].copy()
248 250 self.Num_Chn = self.spc.shape[0]
249 251 self.Num_Hei = self.spc.shape[2]
250 252 VelRange = dataOut.spc_range[2][:-1]
251 253 dv = VelRange[1]-VelRange[0]
252 254
253 255 # Find the velocities that corresponds to zero
254 256 gc_values = numpy.squeeze(numpy.where(numpy.abs(VelRange) <= ClutterWidth))
255 257
256 258 # Removing novalid data from the spectra
257 259 for ich in range(self.Num_Chn) :
258 260 for ir in range(self.Num_Hei) :
259 261 # Estimate the noise at each range
260 262 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
261 263
262 264 # Removing the noise floor at each range
263 265 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
264 266 self.spc[ich,novalid,ir] = HSn
265 267
266 268 junk = numpy.append(numpy.insert(numpy.squeeze(self.spc[ich,gc_values,ir]),0,HSn),HSn)
267 269 j1index = numpy.squeeze(numpy.where(numpy.diff(junk)>0))
268 270 j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
269 271 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
270 272 continue
271 273 junk3 = numpy.squeeze(numpy.diff(j1index))
272 274 junk4 = numpy.squeeze(numpy.diff(j2index))
273 275
274 276 valleyindex = j2index[numpy.where(junk4>1)]
275 277 peakindex = j1index[numpy.where(junk3>1)]
276 278
277 279 isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
278 280 if numpy.size(isvalid) == 0 :
279 281 continue
280 282 if numpy.size(isvalid) >1 :
281 283 vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
282 284 isvalid = isvalid[vindex]
283 285
284 286 # clutter peak
285 287 gcpeak = peakindex[isvalid]
286 288 vl = numpy.where(valleyindex < gcpeak)
287 289 if numpy.size(vl) == 0:
288 290 continue
289 291 gcvl = valleyindex[vl[0][-1]]
290 292 vr = numpy.where(valleyindex > gcpeak)
291 293 if numpy.size(vr) == 0:
292 294 continue
293 295 gcvr = valleyindex[vr[0][0]]
294 296
295 297 # Removing the clutter
296 298 interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
297 299 gcindex = gc_values[gcvl+1:gcvr-1]
298 300 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
299 301
300 302 dataOut.data_pre[0] = self.spc_out
301 303 #print ('Leaving RemoveWideGC ... ')
302 304 return dataOut
303 305
304 306 class SpectralFilters(Operation):
305 307 ''' This class allows to replace the novalid values with noise for each channel
306 308 This applies to CLAIRE RADAR
307 309
308 310 PositiveLimit : RightLimit of novalid data
309 311 NegativeLimit : LeftLimit of novalid data
310 312
311 313 Input:
312 314
313 315 self.dataOut.data_pre : SPC and CSPC
314 316 self.dataOut.spc_range : To select wind and rainfall velocities
315 317
316 318 Affected:
317 319
318 320 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
319 321
320 322 Written by D. ScipiΓ³n 29.01.2021
321 323 '''
322 324 def __init__(self):
323 325 Operation.__init__(self)
324 326 self.i = 0
325 327
326 328 def run(self, dataOut, ):
327 329
328 330 self.spc = dataOut.data_pre[0].copy()
329 331 self.Num_Chn = self.spc.shape[0]
330 332 VelRange = dataOut.spc_range[2]
331 333
332 334 # novalid corresponds to data within the Negative and PositiveLimit
333 335
334 336
335 337 # Removing novalid data from the spectra
336 338 for i in range(self.Num_Chn):
337 339 self.spc[i,novalid,:] = dataOut.noise[i]
338 340 dataOut.data_pre[0] = self.spc
339 341 return dataOut
340 342
341 343 class GaussianFit(Operation):
342 344
343 345 '''
344 346 Function that fit of one and two generalized gaussians (gg) based
345 347 on the PSD shape across an "power band" identified from a cumsum of
346 348 the measured spectrum - noise.
347 349
348 350 Input:
349 351 self.dataOut.data_pre : SelfSpectra
350 352
351 353 Output:
352 354 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
353 355
354 356 '''
355 357 def __init__(self):
356 358 Operation.__init__(self)
357 359 self.i=0
358 360
359 361
360 362 # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
361 363 def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
362 364 """This routine will find a couple of generalized Gaussians to a power spectrum
363 365 methods: generalized, squared
364 366 input: spc
365 367 output:
366 368 noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
367 369 """
368 370 print ('Entering ',method,' double Gaussian fit')
369 371 self.spc = dataOut.data_pre[0].copy()
370 372 self.Num_Hei = self.spc.shape[2]
371 373 self.Num_Bin = self.spc.shape[1]
372 374 self.Num_Chn = self.spc.shape[0]
373 375
374 376 start_time = time.time()
375 377
376 378 pool = Pool(processes=self.Num_Chn)
377 379 args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
378 380 objs = [self for __ in range(self.Num_Chn)]
379 381 attrs = list(zip(objs, args))
380 382 DGauFitParam = pool.map(target, attrs)
381 383 # Parameters:
382 384 # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
383 385 dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
384 386
385 387 # Double Gaussian Curves
386 388 gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
387 389 gau0[:] = numpy.NaN
388 390 gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
389 391 gau1[:] = numpy.NaN
390 392 x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
391 393 for iCh in range(self.Num_Chn):
392 394 N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
393 395 N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
394 396 A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
395 397 A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
396 398 v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
397 399 v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
398 400 s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
399 401 s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
400 402 if method == 'genealized':
401 403 p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
402 404 p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
403 405 elif method == 'squared':
404 406 p0 = 2.
405 407 p1 = 2.
406 408 gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
407 409 gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
408 410 dataOut.GaussFit0 = gau0
409 411 dataOut.GaussFit1 = gau1
410 412
411 413 print('Leaving ',method ,' double Gaussian fit')
412 414 return dataOut
413 415
414 416 def FitGau(self, X):
415 417 # print('Entering FitGau')
416 418 # Assigning the variables
417 419 Vrange, ch, wnoise, num_intg, SNRlimit = X
418 420 # Noise Limits
419 421 noisebl = wnoise * 0.9
420 422 noisebh = wnoise * 1.1
421 423 # Radar Velocity
422 424 Va = max(Vrange)
423 425 deltav = Vrange[1] - Vrange[0]
424 426 x = numpy.arange(self.Num_Bin)
425 427
426 428 # print ('stop 0')
427 429
428 430 # 5 parameters, 2 Gaussians
429 431 DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
430 432 DGauFitParam[:] = numpy.NaN
431 433
432 434 # SPCparam = []
433 435 # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
434 436 # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
435 437 # SPC_ch1[:] = 0 #numpy.NaN
436 438 # SPC_ch2[:] = 0 #numpy.NaN
437 439 # print ('stop 1')
438 440 for ht in range(self.Num_Hei):
439 441 # print (ht)
440 442 # print ('stop 2')
441 443 # Spectra at each range
442 444 spc = numpy.asarray(self.spc)[ch,:,ht]
443 445 snr = ( spc.mean() - wnoise ) / wnoise
444 446 snrdB = 10.*numpy.log10(snr)
445 447
446 448 #print ('stop 3')
447 449 if snrdB < SNRlimit :
448 450 # snr = numpy.NaN
449 451 # SPC_ch1[:,ht] = 0#numpy.NaN
450 452 # SPC_ch1[:,ht] = 0#numpy.NaN
451 453 # SPCparam = (SPC_ch1,SPC_ch2)
452 454 # print ('SNR less than SNRth')
453 455 continue
454 456 # wnoise = hildebrand_sekhon(spc,num_intg)
455 457 # print ('stop 2.01')
456 458 #############################################
457 459 # normalizing spc and noise
458 460 # This part differs from gg1
459 461 # spc_norm_max = max(spc) #commented by D. ScipiΓ³n 19.03.2021
460 462 #spc = spc / spc_norm_max
461 463 # pnoise = pnoise #/ spc_norm_max #commented by D. ScipiΓ³n 19.03.2021
462 464 #############################################
463 465
464 466 # print ('stop 2.1')
465 467 fatspectra=1.0
466 468 # noise per channel.... we might want to use the noise at each range
467 469
468 470 # wnoise = noise_ #/ spc_norm_max #commented by D. ScipiΓ³n 19.03.2021
469 471 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
470 472 #if wnoise>1.1*pnoise: # to be tested later
471 473 # wnoise=pnoise
472 474 # noisebl = wnoise*0.9
473 475 # noisebh = wnoise*1.1
474 476 spc = spc - wnoise # signal
475 477
476 478 # print ('stop 2.2')
477 479 minx = numpy.argmin(spc)
478 480 #spcs=spc.copy()
479 481 spcs = numpy.roll(spc,-minx)
480 482 cum = numpy.cumsum(spcs)
481 483 # tot_noise = wnoise * self.Num_Bin #64;
482 484
483 485 # print ('stop 2.3')
484 486 # snr = sum(spcs) / tot_noise
485 487 # snrdB = 10.*numpy.log10(snr)
486 488 #print ('stop 3')
487 489 # if snrdB < SNRlimit :
488 490 # snr = numpy.NaN
489 491 # SPC_ch1[:,ht] = 0#numpy.NaN
490 492 # SPC_ch1[:,ht] = 0#numpy.NaN
491 493 # SPCparam = (SPC_ch1,SPC_ch2)
492 494 # print ('SNR less than SNRth')
493 495 # continue
494 496
495 497
496 498 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
497 499 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
498 500 # print ('stop 4')
499 501 cummax = max(cum)
500 502 epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
501 503 cumlo = cummax * epsi
502 504 cumhi = cummax * (1-epsi)
503 505 powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
504 506
505 507 # print ('stop 5')
506 508 if len(powerindex) < 1:# case for powerindex 0
507 509 # print ('powerindex < 1')
508 510 continue
509 511 powerlo = powerindex[0]
510 512 powerhi = powerindex[-1]
511 513 powerwidth = powerhi-powerlo
512 514 if powerwidth <= 1:
513 515 # print('powerwidth <= 1')
514 516 continue
515 517
516 518 # print ('stop 6')
517 519 firstpeak = powerlo + powerwidth/10.# first gaussian energy location
518 520 secondpeak = powerhi - powerwidth/10. #second gaussian energy location
519 521 midpeak = (firstpeak + secondpeak)/2.
520 522 firstamp = spcs[int(firstpeak)]
521 523 secondamp = spcs[int(secondpeak)]
522 524 midamp = spcs[int(midpeak)]
523 525
524 526 y_data = spc + wnoise
525 527
526 528 ''' single Gaussian '''
527 529 shift0 = numpy.mod(midpeak+minx, self.Num_Bin )
528 530 width0 = powerwidth/4.#Initialization entire power of spectrum divided by 4
529 531 power0 = 2.
530 532 amplitude0 = midamp
531 533 state0 = [shift0,width0,amplitude0,power0,wnoise]
532 534 bnds = ((0,self.Num_Bin-1),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
533 535 lsq1 = fmin_l_bfgs_b(self.misfit1, state0, args=(y_data,x,num_intg), bounds=bnds, approx_grad=True)
534 536 # print ('stop 7.1')
535 537 # print (bnds)
536 538
537 539 chiSq1=lsq1[1]
538 540
539 541 # print ('stop 8')
540 542 if fatspectra<1.0 and powerwidth<4:
541 543 choice=0
542 544 Amplitude0=lsq1[0][2]
543 545 shift0=lsq1[0][0]
544 546 width0=lsq1[0][1]
545 547 p0=lsq1[0][3]
546 548 Amplitude1=0.
547 549 shift1=0.
548 550 width1=0.
549 551 p1=0.
550 552 noise=lsq1[0][4]
551 553 #return (numpy.array([shift0,width0,Amplitude0,p0]),
552 554 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
553 555
554 556 # print ('stop 9')
555 557 ''' two Gaussians '''
556 558 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
557 559 shift0 = numpy.mod(firstpeak+minx, self.Num_Bin )
558 560 shift1 = numpy.mod(secondpeak+minx, self.Num_Bin )
559 561 width0 = powerwidth/6.
560 562 width1 = width0
561 563 power0 = 2.
562 564 power1 = power0
563 565 amplitude0 = firstamp
564 566 amplitude1 = secondamp
565 567 state0 = [shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
566 568 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
567 569 bnds=((0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
568 570 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
569 571
570 572 # print ('stop 10')
571 573 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
572 574
573 575 # print ('stop 11')
574 576 chiSq2 = lsq2[1]
575 577
576 578 # print ('stop 12')
577 579
578 580 oneG = (chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
579 581
580 582 # print ('stop 13')
581 583 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
582 584 if oneG:
583 585 choice = 0
584 586 else:
585 587 w1 = lsq2[0][1]; w2 = lsq2[0][5]
586 588 a1 = lsq2[0][2]; a2 = lsq2[0][6]
587 589 p1 = lsq2[0][3]; p2 = lsq2[0][7]
588 590 s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
589 591 s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
590 592 gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
591 593
592 594 if gp1>gp2:
593 595 if a1>0.7*a2:
594 596 choice = 1
595 597 else:
596 598 choice = 2
597 599 elif gp2>gp1:
598 600 if a2>0.7*a1:
599 601 choice = 2
600 602 else:
601 603 choice = 1
602 604 else:
603 605 choice = numpy.argmax([a1,a2])+1
604 606 #else:
605 607 #choice=argmin([std2a,std2b])+1
606 608
607 609 else: # with low SNR go to the most energetic peak
608 610 choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
609 611
610 612 # print ('stop 14')
611 613 shift0 = lsq2[0][0]
612 614 vel0 = Vrange[0] + shift0 * deltav
613 615 shift1 = lsq2[0][4]
614 616 # vel1=Vrange[0] + shift1 * deltav
615 617
616 618 # max_vel = 1.0
617 619 # Va = max(Vrange)
618 620 # deltav = Vrange[1]-Vrange[0]
619 621 # print ('stop 15')
620 622 #first peak will be 0, second peak will be 1
621 623 # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.ScipiΓ³n 19.03.2021
622 624 if vel0 > -Va and vel0 < Va : #first peak is in the correct range
623 625 shift0 = lsq2[0][0]
624 626 width0 = lsq2[0][1]
625 627 Amplitude0 = lsq2[0][2]
626 628 p0 = lsq2[0][3]
627 629
628 630 shift1 = lsq2[0][4]
629 631 width1 = lsq2[0][5]
630 632 Amplitude1 = lsq2[0][6]
631 633 p1 = lsq2[0][7]
632 634 noise = lsq2[0][8]
633 635 else:
634 636 shift1 = lsq2[0][0]
635 637 width1 = lsq2[0][1]
636 638 Amplitude1 = lsq2[0][2]
637 639 p1 = lsq2[0][3]
638 640
639 641 shift0 = lsq2[0][4]
640 642 width0 = lsq2[0][5]
641 643 Amplitude0 = lsq2[0][6]
642 644 p0 = lsq2[0][7]
643 645 noise = lsq2[0][8]
644 646
645 647 if Amplitude0<0.05: # in case the peak is noise
646 648 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
647 649 if Amplitude1<0.05:
648 650 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
649 651
650 652 # print ('stop 16 ')
651 653 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
652 654 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
653 655 # SPCparam = (SPC_ch1,SPC_ch2)
654 656
655 657 DGauFitParam[0,ht,0] = noise
656 658 DGauFitParam[0,ht,1] = noise
657 659 DGauFitParam[1,ht,0] = Amplitude0
658 660 DGauFitParam[1,ht,1] = Amplitude1
659 661 DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
660 662 DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
661 663 DGauFitParam[3,ht,0] = width0 * deltav
662 664 DGauFitParam[3,ht,1] = width1 * deltav
663 665 DGauFitParam[4,ht,0] = p0
664 666 DGauFitParam[4,ht,1] = p1
665 667
666 668 # print (DGauFitParam.shape)
667 669 # print ('Leaving FitGau')
668 670 return DGauFitParam
669 671 # return SPCparam
670 672 # return GauSPC
671 673
672 674 def y_model1(self,x,state):
673 675 shift0, width0, amplitude0, power0, noise = state
674 676 model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
675 677 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
676 678 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
677 679 return model0 + model0u + model0d + noise
678 680
679 681 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
680 682 shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
681 683 model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
682 684 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
683 685 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
684 686
685 687 model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
686 688 model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
687 689 model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
688 690 return model0 + model0u + model0d + model1 + model1u + model1d + noise
689 691
690 692 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
691 693
692 694 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
693 695
694 696 def misfit2(self,state,y_data,x,num_intg):
695 697 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
696 698
697 699
698 700
699 701 class PrecipitationProc(Operation):
700 702
701 703 '''
702 704 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
703 705
704 706 Input:
705 707 self.dataOut.data_pre : SelfSpectra
706 708
707 709 Output:
708 710
709 711 self.dataOut.data_output : Reflectivity factor, rainfall Rate
710 712
711 713
712 714 Parameters affected:
713 715 '''
714 716
715 717 def __init__(self):
716 718 Operation.__init__(self)
717 719 self.i=0
718 720
719 721 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
720 722 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
721 723
722 724 # print ('Entering PrecepitationProc ... ')
723 725
724 726 if radar == "MIRA35C" :
725 727
726 728 self.spc = dataOut.data_pre[0].copy()
727 729 self.Num_Hei = self.spc.shape[2]
728 730 self.Num_Bin = self.spc.shape[1]
729 731 self.Num_Chn = self.spc.shape[0]
730 732 Ze = self.dBZeMODE2(dataOut)
731 733
732 734 else:
733 735
734 736 self.spc = dataOut.data_pre[0].copy()
735 737
736 738 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
737 739 self.spc[:,:,0:7]= numpy.NaN
738 740
739 741 self.Num_Hei = self.spc.shape[2]
740 742 self.Num_Bin = self.spc.shape[1]
741 743 self.Num_Chn = self.spc.shape[0]
742 744
743 745 VelRange = dataOut.spc_range[2]
744 746
745 747 ''' Se obtiene la constante del RADAR '''
746 748
747 749 self.Pt = Pt
748 750 self.Gt = Gt
749 751 self.Gr = Gr
750 752 self.Lambda = Lambda
751 753 self.aL = aL
752 754 self.tauW = tauW
753 755 self.ThetaT = ThetaT
754 756 self.ThetaR = ThetaR
755 757 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
756 758 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
757 759 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
758 760
759 761 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
760 762 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
761 763 RadarConstant = 10e-26 * Numerator / Denominator #
762 764 ExpConstant = 10**(40/10) #Constante Experimental
763 765
764 766 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
765 767 for i in range(self.Num_Chn):
766 768 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
767 769 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
768 770
769 771 SPCmean = numpy.mean(SignalPower, 0)
770 772 Pr = SPCmean[:,:]/dataOut.normFactor
771 773
772 774 # Declaring auxiliary variables
773 775 Range = dataOut.heightList*1000. #Range in m
774 776 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
775 777 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
776 778 zMtrx = rMtrx+Altitude
777 779 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
778 780 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
779 781
780 782 # height dependence to air density Foote and Du Toit (1969)
781 783 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
782 784 VMtrx = VelMtrx / delv_z #Normalized velocity
783 785 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
784 786 # Diameter is related to the fall speed of falling drops
785 787 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
786 788 # Only valid for D>= 0.16 mm
787 789 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
788 790
789 791 #Calculate Radar Reflectivity ETAn
790 792 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
791 793 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
792 794 # Radar Cross Section
793 795 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
794 796 # Drop Size Distribution
795 797 DSD = ETAn / sigmaD
796 798 # Equivalente Reflectivy
797 799 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
798 800 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
799 801 # RainFall Rate
800 802 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
801 803
802 804 # Censoring the data
803 805 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
804 806 SNRth = 10**(SNRdBlimit/10) #-30dB
805 807 novalid = numpy.where((dataOut.data_snr[0,:] <SNRth) | (dataOut.data_snr[1,:] <SNRth) | (dataOut.data_snr[2,:] <SNRth)) # AND condition. Maybe OR condition better
806 808 W = numpy.nanmean(dataOut.data_dop,0)
807 809 W[novalid] = numpy.NaN
808 810 Ze_org[novalid] = numpy.NaN
809 811 RR[novalid] = numpy.NaN
810 812
811 813 dataOut.data_output = RR[8]
812 814 dataOut.data_param = numpy.ones([3,self.Num_Hei])
813 815 dataOut.channelList = [0,1,2]
814 816
815 817 dataOut.data_param[0]=10*numpy.log10(Ze_org)
816 818 dataOut.data_param[1]=-W
817 819 dataOut.data_param[2]=RR
818 820
819 821 # print ('Leaving PrecepitationProc ... ')
820 822 return dataOut
821 823
822 824 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
823 825
824 826 NPW = dataOut.NPW
825 827 COFA = dataOut.COFA
826 828
827 829 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
828 830 RadarConst = dataOut.RadarConst
829 831 #frequency = 34.85*10**9
830 832
831 833 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
832 834 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
833 835
834 836 ETA = numpy.sum(SNR,1)
835 837
836 838 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
837 839
838 840 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
839 841
840 842 for r in range(self.Num_Hei):
841 843
842 844 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
843 845 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
844 846
845 847 return Ze
846 848
847 849 # def GetRadarConstant(self):
848 850 #
849 851 # """
850 852 # Constants:
851 853 #
852 854 # Pt: Transmission Power dB 5kW 5000
853 855 # Gt: Transmission Gain dB 24.7 dB 295.1209
854 856 # Gr: Reception Gain dB 18.5 dB 70.7945
855 857 # Lambda: Wavelenght m 0.6741 m 0.6741
856 858 # aL: Attenuation loses dB 4dB 2.5118
857 859 # tauW: Width of transmission pulse s 4us 4e-6
858 860 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
859 861 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
860 862 #
861 863 # """
862 864 #
863 865 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
864 866 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
865 867 # RadarConstant = Numerator / Denominator
866 868 #
867 869 # return RadarConstant
868 870
869 871
870 872
871 873 class FullSpectralAnalysis(Operation):
872 874
873 875 """
874 876 Function that implements Full Spectral Analysis technique.
875 877
876 878 Input:
877 879 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
878 880 self.dataOut.groupList : Pairlist of channels
879 881 self.dataOut.ChanDist : Physical distance between receivers
880 882
881 883
882 884 Output:
883 885
884 886 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
885 887
886 888
887 889 Parameters affected: Winds, height range, SNR
888 890
889 891 """
890 892 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
891 893 minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
892 894
893 895 spc = dataOut.data_pre[0].copy()
894 896 cspc = dataOut.data_pre[1]
895 897 nHeights = spc.shape[2]
896 898
897 899 # first_height = 0.75 #km (ref: data header 20170822)
898 900 # resolution_height = 0.075 #km
899 901 '''
900 902 finding height range. check this when radar parameters are changed!
901 903 '''
902 904 if maxheight is not None:
903 905 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
904 906 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
905 907 else:
906 908 range_max = nHeights
907 909 if minheight is not None:
908 910 # range_min = int((minheight - first_height) / resolution_height) # theoretical
909 911 range_min = int(13.26 * minheight - 5) # empirical, works better
910 912 if range_min < 0:
911 913 range_min = 0
912 914 else:
913 915 range_min = 0
914 916
915 917 pairsList = dataOut.groupList
916 918 if dataOut.ChanDist is not None :
917 919 ChanDist = dataOut.ChanDist
918 920 else:
919 921 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
920 922
921 923 # 4 variables: zonal, meridional, vertical, and average SNR
922 924 data_param = numpy.zeros([4,nHeights]) * numpy.NaN
923 925 velocityX = numpy.zeros([nHeights]) * numpy.NaN
924 926 velocityY = numpy.zeros([nHeights]) * numpy.NaN
925 927 velocityZ = numpy.zeros([nHeights]) * numpy.NaN
926 928
927 929 dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
928 930
929 931 '''***********************************************WIND ESTIMATION**************************************'''
930 932 for Height in range(nHeights):
931 933
932 934 if Height >= range_min and Height < range_max:
933 935 # error_code will be useful in future analysis
934 936 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
935 937 ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
936 938
937 939 if abs(Vzon) < 100. and abs(Vmer) < 100.:
938 940 velocityX[Height] = Vzon
939 941 velocityY[Height] = -Vmer
940 942 velocityZ[Height] = Vver
941 943
942 944 # Censoring data with SNR threshold
943 945 dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
944 946
945 947 data_param[0] = velocityX
946 948 data_param[1] = velocityY
947 949 data_param[2] = velocityZ
948 950 data_param[3] = dbSNR
949 951 dataOut.data_param = data_param
950 952 return dataOut
951 953
952 954 def moving_average(self,x, N=2):
953 955 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
954 956 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
955 957
956 958 def gaus(self,xSamples,Amp,Mu,Sigma):
957 959 return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
958 960
959 961 def Moments(self, ySamples, xSamples):
960 962 Power = numpy.nanmean(ySamples) # Power, 0th Moment
961 963 yNorm = ySamples / numpy.nansum(ySamples)
962 964 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
963 965 Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
964 966 StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
965 967 return numpy.array([Power,RadVel,StdDev])
966 968
967 969 def StopWindEstimation(self, error_code):
968 970 Vzon = numpy.NaN
969 971 Vmer = numpy.NaN
970 972 Vver = numpy.NaN
971 973 return Vzon, Vmer, Vver, error_code
972 974
973 975 def AntiAliasing(self, interval, maxstep):
974 976 """
975 977 function to prevent errors from aliased values when computing phaseslope
976 978 """
977 979 antialiased = numpy.zeros(len(interval))
978 980 copyinterval = interval.copy()
979 981
980 982 antialiased[0] = copyinterval[0]
981 983
982 984 for i in range(1,len(antialiased)):
983 985 step = interval[i] - interval[i-1]
984 986 if step > maxstep:
985 987 copyinterval -= 2*numpy.pi
986 988 antialiased[i] = copyinterval[i]
987 989 elif step < maxstep*(-1):
988 990 copyinterval += 2*numpy.pi
989 991 antialiased[i] = copyinterval[i]
990 992 else:
991 993 antialiased[i] = copyinterval[i].copy()
992 994
993 995 return antialiased
994 996
995 997 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
996 998 """
997 999 Function that Calculates Zonal, Meridional and Vertical wind velocities.
998 1000 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
999 1001
1000 1002 Input:
1001 1003 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
1002 1004 pairsList : Pairlist of channels
1003 1005 ChanDist : array of xi_ij and eta_ij
1004 1006 Height : height at which data is processed
1005 1007 noise : noise in [channels] format for specific height
1006 1008 Abbsisarange : range of the frequencies or velocities
1007 1009 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
1008 1010
1009 1011 Output:
1010 1012 Vzon, Vmer, Vver : wind velocities
1011 1013 error_code : int that states where code is terminated
1012 1014
1013 1015 0 : no error detected
1014 1016 1 : Gaussian of mean spc exceeds widthlimit
1015 1017 2 : no Gaussian of mean spc found
1016 1018 3 : SNR to low or velocity to high -> prec. e.g.
1017 1019 4 : at least one Gaussian of cspc exceeds widthlimit
1018 1020 5 : zero out of three cspc Gaussian fits converged
1019 1021 6 : phase slope fit could not be found
1020 1022 7 : arrays used to fit phase have different length
1021 1023 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
1022 1024
1023 1025 """
1024 1026
1025 1027 error_code = 0
1026 1028
1027 1029 nChan = spc.shape[0]
1028 1030 nProf = spc.shape[1]
1029 1031 nPair = cspc.shape[0]
1030 1032
1031 1033 SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
1032 1034 CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
1033 1035 phase = numpy.zeros([nPair, nProf]) # phase between channels
1034 1036 PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
1035 1037 PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
1036 1038 xFrec = AbbsisaRange[0][:-1] # frequency range
1037 1039 xVel = AbbsisaRange[2][:-1] # velocity range
1038 1040 xSamples = xFrec # the frequency range is taken
1039 1041 delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
1040 1042
1041 1043 # only consider velocities with in NegativeLimit and PositiveLimit
1042 1044 if (NegativeLimit is None):
1043 1045 NegativeLimit = numpy.min(xVel)
1044 1046 if (PositiveLimit is None):
1045 1047 PositiveLimit = numpy.max(xVel)
1046 1048 xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
1047 1049 xSamples_zoom = xSamples[xvalid]
1048 1050
1049 1051 '''Getting Eij and Nij'''
1050 1052 Xi01, Xi02, Xi12 = ChanDist[:,0]
1051 1053 Eta01, Eta02, Eta12 = ChanDist[:,1]
1052 1054
1053 1055 # spwd limit - updated by D. ScipiΓ³n 30.03.2021
1054 1056 widthlimit = 10
1055 1057 '''************************* SPC is normalized ********************************'''
1056 1058 spc_norm = spc.copy()
1057 1059 # For each channel
1058 1060 for i in range(nChan):
1059 1061 spc_sub = spc_norm[i,:] - noise[i] # only the signal power
1060 1062 SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
1061 1063
1062 1064 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1063 1065
1064 1066 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1065 1067 you only fit the curve and don't need the absolute value of height for calculation,
1066 1068 only for estimation of width. for normalization of cross spectra, you need initial,
1067 1069 unnormalized self-spectra With noise.
1068 1070
1069 1071 Technically, you don't even need to normalize the self-spectra, as you only need the
1070 1072 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1071 1073 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1072 1074 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1073 1075 """
1074 1076 # initial conditions
1075 1077 popt = [1e-10,0,1e-10]
1076 1078 # Spectra average
1077 1079 SPCMean = numpy.average(SPC_Samples,0)
1078 1080 # Moments in frequency
1079 1081 SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
1080 1082
1081 1083 # Gauss Fit SPC in frequency domain
1082 1084 if dbSNR > SNRlimit: # only if SNR > SNRth
1083 1085 try:
1084 1086 popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
1085 1087 if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
1086 1088 return self.StopWindEstimation(error_code = 1)
1087 1089 FitGauss = self.gaus(xSamples_zoom,*popt)
1088 1090 except :#RuntimeError:
1089 1091 return self.StopWindEstimation(error_code = 2)
1090 1092 else:
1091 1093 return self.StopWindEstimation(error_code = 3)
1092 1094
1093 1095 '''***************************** CSPC Normalization *************************
1094 1096 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1095 1097 influence the norm which is not desired. First, a range is identified where the
1096 1098 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1097 1099 around it gets cut off and values replaced by mean determined by the boundary
1098 1100 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1099 1101
1100 1102 The sums are then added and multiplied by range/datapoints, because you need
1101 1103 an integral and not a sum for normalization.
1102 1104
1103 1105 A norm is found according to Briggs 92.
1104 1106 '''
1105 1107 # for each pair
1106 1108 for i in range(nPair):
1107 1109 cspc_norm = cspc[i,:].copy()
1108 1110 chan_index0 = pairsList[i][0]
1109 1111 chan_index1 = pairsList[i][1]
1110 1112 CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
1111 1113 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1112 1114
1113 1115 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
1114 1116 self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
1115 1117 self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
1116 1118
1117 1119 popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
1118 1120 FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
1119 1121
1120 1122 '''*******************************FIT GAUSS CSPC************************************'''
1121 1123 try:
1122 1124 popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
1123 1125 if popt01[2] > widthlimit: # CONDITION
1124 1126 return self.StopWindEstimation(error_code = 4)
1125 1127 popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
1126 1128 if popt02[2] > widthlimit: # CONDITION
1127 1129 return self.StopWindEstimation(error_code = 4)
1128 1130 popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
1129 1131 if popt12[2] > widthlimit: # CONDITION
1130 1132 return self.StopWindEstimation(error_code = 4)
1131 1133
1132 1134 FitGauss01 = self.gaus(xSamples_zoom, *popt01)
1133 1135 FitGauss02 = self.gaus(xSamples_zoom, *popt02)
1134 1136 FitGauss12 = self.gaus(xSamples_zoom, *popt12)
1135 1137 except:
1136 1138 return self.StopWindEstimation(error_code = 5)
1137 1139
1138 1140
1139 1141 '''************* Getting Fij ***************'''
1140 1142 # x-axis point of the gaussian where the center is located from GaussFit of spectra
1141 1143 GaussCenter = popt[1]
1142 1144 ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
1143 1145 PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
1144 1146
1145 1147 # Point where e^-1 is located in the gaussian
1146 1148 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1147 1149 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
1148 1150 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1149 1151 Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
1150 1152
1151 1153 '''********** Taking frequency ranges from mean SPCs **********'''
1152 1154 GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
1153 1155 Range = numpy.empty(2)
1154 1156 Range[0] = GaussCenter - GauWidth
1155 1157 Range[1] = GaussCenter + GauWidth
1156 1158 # Point in x-axis where the bandwidth is located (min:max)
1157 1159 ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
1158 1160 ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
1159 1161 PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
1160 1162 PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
1161 1163 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1162 1164 FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
1163 1165
1164 1166 '''************************** Getting Phase Slope ***************************'''
1165 1167 for i in range(nPair):
1166 1168 if len(FrecRange) > 5:
1167 1169 PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
1168 1170 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1169 1171 if len(FrecRange) == len(PhaseRange):
1170 1172 try:
1171 1173 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1172 1174 PhaseSlope[i] = slope
1173 1175 PhaseInter[i] = intercept
1174 1176 except:
1175 1177 return self.StopWindEstimation(error_code = 6)
1176 1178 else:
1177 1179 return self.StopWindEstimation(error_code = 7)
1178 1180 else:
1179 1181 return self.StopWindEstimation(error_code = 8)
1180 1182
1181 1183 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1182 1184
1183 1185 '''Getting constant C'''
1184 1186 cC=(Fij*numpy.pi)**2
1185 1187
1186 1188 '''****** Getting constants F and G ******'''
1187 1189 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1188 1190 # MijEijNij = numpy.array([[Xi01,Eta01], [Xi02,Eta02], [Xi12,Eta12]])
1189 1191 # MijResult0 = (-PhaseSlope[0] * cC) / (2*numpy.pi)
1190 1192 MijResult1 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1191 1193 MijResult2 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1192 1194 # MijResults = numpy.array([MijResult0, MijResult1, MijResult2])
1193 1195 MijResults = numpy.array([MijResult1, MijResult2])
1194 1196 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1195 1197
1196 1198 '''****** Getting constants A, B and H ******'''
1197 1199 W01 = numpy.nanmax( FitGauss01 )
1198 1200 W02 = numpy.nanmax( FitGauss02 )
1199 1201 W12 = numpy.nanmax( FitGauss12 )
1200 1202
1201 1203 WijResult01 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1202 1204 WijResult02 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1203 1205 WijResult12 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1204 1206 WijResults = numpy.array([WijResult01, WijResult02, WijResult12])
1205 1207
1206 1208 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1207 1209 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1208 1210
1209 1211 VxVy = numpy.array([[cA,cH],[cH,cB]])
1210 1212 VxVyResults = numpy.array([-cF,-cG])
1211 1213 (Vmer,Vzon) = numpy.linalg.solve(VxVy, VxVyResults)
1212 1214 Vver = -SPCMoments[1]*SPEED_OF_LIGHT/(2*radfreq)
1213 1215 error_code = 0
1214 1216
1215 1217 return Vzon, Vmer, Vver, error_code
1216 1218
1217 1219 class SpectralMoments(Operation):
1218 1220
1219 1221 '''
1220 1222 Function SpectralMoments()
1221 1223
1222 1224 Calculates moments (power, mean, standard deviation) and SNR of the signal
1223 1225
1224 1226 Type of dataIn: Spectra
1225 1227
1226 1228 Configuration Parameters:
1227 1229
1228 1230 dirCosx : Cosine director in X axis
1229 1231 dirCosy : Cosine director in Y axis
1230 1232
1231 1233 elevation :
1232 1234 azimuth :
1233 1235
1234 1236 Input:
1235 1237 channelList : simple channel list to select e.g. [2,3,7]
1236 1238 self.dataOut.data_pre : Spectral data
1237 1239 self.dataOut.abscissaList : List of frequencies
1238 1240 self.dataOut.noise : Noise level per channel
1239 1241
1240 1242 Affected:
1241 1243 self.dataOut.moments : Parameters per channel
1242 1244 self.dataOut.data_snr : SNR per channel
1243 1245
1244 1246 '''
1245 1247
1246 1248 def run(self, dataOut):
1247 1249
1248 1250 data = dataOut.data_pre[0]
1249 1251 absc = dataOut.abscissaList[:-1]
1250 1252 noise = dataOut.noise
1251 1253 nChannel = data.shape[0]
1252 1254 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1253 1255
1254 1256 for ind in range(nChannel):
1255 1257 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1256 1258
1257 1259 dataOut.moments = data_param[:,1:,:]
1258 1260 dataOut.data_snr = data_param[:,0]
1259 1261 dataOut.data_pow = data_param[:,1]
1260 1262 dataOut.data_dop = data_param[:,2]
1261 1263 dataOut.data_width = data_param[:,3]
1262 1264 return dataOut
1263 1265
1264 1266 def __calculateMoments(self, oldspec, oldfreq, n0,
1265 1267 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1266 1268
1267 1269 if (nicoh is None): nicoh = 1
1268 1270 if (graph is None): graph = 0
1269 1271 if (smooth is None): smooth = 0
1270 1272 elif (self.smooth < 3): smooth = 0
1271 1273
1272 1274 if (type1 is None): type1 = 0
1273 1275 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1274 1276 if (snrth is None): snrth = -3
1275 1277 if (dc is None): dc = 0
1276 1278 if (aliasing is None): aliasing = 0
1277 1279 if (oldfd is None): oldfd = 0
1278 1280 if (wwauto is None): wwauto = 0
1279 1281
1280 1282 if (n0 < 1.e-20): n0 = 1.e-20
1281 1283
1282 1284 freq = oldfreq
1283 1285 vec_power = numpy.zeros(oldspec.shape[1])
1284 1286 vec_fd = numpy.zeros(oldspec.shape[1])
1285 1287 vec_w = numpy.zeros(oldspec.shape[1])
1286 1288 vec_snr = numpy.zeros(oldspec.shape[1])
1287 1289
1288 1290 # oldspec = numpy.ma.masked_invalid(oldspec)
1289 1291 for ind in range(oldspec.shape[1]):
1290 1292
1291 1293 spec = oldspec[:,ind]
1292 1294 aux = spec*fwindow
1293 1295 max_spec = aux.max()
1294 1296 m = aux.tolist().index(max_spec)
1295 1297
1296 1298 # Smooth
1297 1299 if (smooth == 0):
1298 1300 spec2 = spec
1299 1301 else:
1300 1302 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1301 1303
1302 1304 # Moments Estimation
1303 1305 bb = spec2[numpy.arange(m,spec2.size)]
1304 1306 bb = (bb<n0).nonzero()
1305 1307 bb = bb[0]
1306 1308
1307 1309 ss = spec2[numpy.arange(0,m + 1)]
1308 1310 ss = (ss<n0).nonzero()
1309 1311 ss = ss[0]
1310 1312
1311 1313 if (bb.size == 0):
1312 1314 bb0 = spec.size - 1 - m
1313 1315 else:
1314 1316 bb0 = bb[0] - 1
1315 1317 if (bb0 < 0):
1316 1318 bb0 = 0
1317 1319
1318 1320 if (ss.size == 0):
1319 1321 ss1 = 1
1320 1322 else:
1321 1323 ss1 = max(ss) + 1
1322 1324
1323 1325 if (ss1 > m):
1324 1326 ss1 = m
1325 1327
1326 valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1327 #valid = numpy.arange(1,oldspec.shape[0])# valid perfil completo igual pulsepair
1328 #valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1329 valid = numpy.arange(1,oldspec.shape[0])# valid perfil completo igual pulsepair
1328 1330 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. ScipiΓ³n added with correct definition
1329 1331 total_power = (spec2[valid] * fwindow[valid]).mean() # D. ScipiΓ³n added with correct definition
1330 1332 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1331 1333 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1332 1334 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1333 1335 snr = (spec2.mean()-n0)/n0
1334 1336 if (snr < 1.e-20) :
1335 1337 snr = 1.e-20
1336 1338
1337 1339 # vec_power[ind] = power #D. ScipiΓ³n replaced with the line below
1338 1340 vec_power[ind] = total_power
1339 1341 vec_fd[ind] = fd
1340 1342 vec_w[ind] = w
1341 1343 vec_snr[ind] = snr
1342 1344
1343 1345 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1344 1346
1345 1347 #------------------ Get SA Parameters --------------------------
1346 1348
1347 1349 def GetSAParameters(self):
1348 1350 #SA en frecuencia
1349 1351 pairslist = self.dataOut.groupList
1350 1352 num_pairs = len(pairslist)
1351 1353
1352 1354 vel = self.dataOut.abscissaList
1353 1355 spectra = self.dataOut.data_pre
1354 1356 cspectra = self.dataIn.data_cspc
1355 1357 delta_v = vel[1] - vel[0]
1356 1358
1357 1359 #Calculating the power spectrum
1358 1360 spc_pow = numpy.sum(spectra, 3)*delta_v
1359 1361 #Normalizing Spectra
1360 1362 norm_spectra = spectra/spc_pow
1361 1363 #Calculating the norm_spectra at peak
1362 1364 max_spectra = numpy.max(norm_spectra, 3)
1363 1365
1364 1366 #Normalizing Cross Spectra
1365 1367 norm_cspectra = numpy.zeros(cspectra.shape)
1366 1368
1367 1369 for i in range(num_chan):
1368 1370 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1369 1371
1370 1372 max_cspectra = numpy.max(norm_cspectra,2)
1371 1373 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1372 1374
1373 1375 for i in range(num_pairs):
1374 1376 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1375 1377 #------------------- Get Lags ----------------------------------
1376 1378
1377 1379 class SALags(Operation):
1378 1380 '''
1379 1381 Function GetMoments()
1380 1382
1381 1383 Input:
1382 1384 self.dataOut.data_pre
1383 1385 self.dataOut.abscissaList
1384 1386 self.dataOut.noise
1385 1387 self.dataOut.normFactor
1386 1388 self.dataOut.data_snr
1387 1389 self.dataOut.groupList
1388 1390 self.dataOut.nChannels
1389 1391
1390 1392 Affected:
1391 1393 self.dataOut.data_param
1392 1394
1393 1395 '''
1394 1396 def run(self, dataOut):
1395 1397 data_acf = dataOut.data_pre[0]
1396 1398 data_ccf = dataOut.data_pre[1]
1397 1399 normFactor_acf = dataOut.normFactor[0]
1398 1400 normFactor_ccf = dataOut.normFactor[1]
1399 1401 pairs_acf = dataOut.groupList[0]
1400 1402 pairs_ccf = dataOut.groupList[1]
1401 1403
1402 1404 nHeights = dataOut.nHeights
1403 1405 absc = dataOut.abscissaList
1404 1406 noise = dataOut.noise
1405 1407 SNR = dataOut.data_snr
1406 1408 nChannels = dataOut.nChannels
1407 1409 # pairsList = dataOut.groupList
1408 1410 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1409 1411
1410 1412 for l in range(len(pairs_acf)):
1411 1413 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1412 1414
1413 1415 for l in range(len(pairs_ccf)):
1414 1416 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1415 1417
1416 1418 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1417 1419 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1418 1420 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1419 1421 return
1420 1422
1421 1423 # def __getPairsAutoCorr(self, pairsList, nChannels):
1422 1424 #
1423 1425 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1424 1426 #
1425 1427 # for l in range(len(pairsList)):
1426 1428 # firstChannel = pairsList[l][0]
1427 1429 # secondChannel = pairsList[l][1]
1428 1430 #
1429 1431 # #Obteniendo pares de Autocorrelacion
1430 1432 # if firstChannel == secondChannel:
1431 1433 # pairsAutoCorr[firstChannel] = int(l)
1432 1434 #
1433 1435 # pairsAutoCorr = pairsAutoCorr.astype(int)
1434 1436 #
1435 1437 # pairsCrossCorr = range(len(pairsList))
1436 1438 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1437 1439 #
1438 1440 # return pairsAutoCorr, pairsCrossCorr
1439 1441
1440 1442 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1441 1443
1442 1444 lag0 = data_acf.shape[1]/2
1443 1445 #Funcion de Autocorrelacion
1444 1446 mean_acf = stats.nanmean(data_acf, axis = 0)
1445 1447
1446 1448 #Obtencion Indice de TauCross
1447 1449 ind_ccf = data_ccf.argmax(axis = 1)
1448 1450 #Obtencion Indice de TauAuto
1449 1451 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1450 1452 ccf_lag0 = data_ccf[:,lag0,:]
1451 1453
1452 1454 for i in range(ccf_lag0.shape[0]):
1453 1455 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1454 1456
1455 1457 #Obtencion de TauCross y TauAuto
1456 1458 tau_ccf = lagRange[ind_ccf]
1457 1459 tau_acf = lagRange[ind_acf]
1458 1460
1459 1461 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1460 1462
1461 1463 tau_ccf[Nan1,Nan2] = numpy.nan
1462 1464 tau_acf[Nan1,Nan2] = numpy.nan
1463 1465 tau = numpy.vstack((tau_ccf,tau_acf))
1464 1466
1465 1467 return tau
1466 1468
1467 1469 def __calculateLag1Phase(self, data, lagTRange):
1468 1470 data1 = stats.nanmean(data, axis = 0)
1469 1471 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1470 1472
1471 1473 phase = numpy.angle(data1[lag1,:])
1472 1474
1473 1475 return phase
1474 1476
1475 1477 class SpectralFitting(Operation):
1476 1478 '''
1477 1479 Function GetMoments()
1478 1480
1479 1481 Input:
1480 1482 Output:
1481 1483 Variables modified:
1482 1484 '''
1483 1485
1484 1486 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1485 1487
1486 1488
1487 1489 if path != None:
1488 1490 sys.path.append(path)
1489 1491 self.dataOut.library = importlib.import_module(file)
1490 1492
1491 1493 #To be inserted as a parameter
1492 1494 groupArray = numpy.array(groupList)
1493 1495 # groupArray = numpy.array([[0,1],[2,3]])
1494 1496 self.dataOut.groupList = groupArray
1495 1497
1496 1498 nGroups = groupArray.shape[0]
1497 1499 nChannels = self.dataIn.nChannels
1498 1500 nHeights=self.dataIn.heightList.size
1499 1501
1500 1502 #Parameters Array
1501 1503 self.dataOut.data_param = None
1502 1504
1503 1505 #Set constants
1504 1506 constants = self.dataOut.library.setConstants(self.dataIn)
1505 1507 self.dataOut.constants = constants
1506 1508 M = self.dataIn.normFactor
1507 1509 N = self.dataIn.nFFTPoints
1508 1510 ippSeconds = self.dataIn.ippSeconds
1509 1511 K = self.dataIn.nIncohInt
1510 1512 pairsArray = numpy.array(self.dataIn.pairsList)
1511 1513
1512 1514 #List of possible combinations
1513 1515 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1514 1516 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1515 1517
1516 1518 if getSNR:
1517 1519 listChannels = groupArray.reshape((groupArray.size))
1518 1520 listChannels.sort()
1519 1521 noise = self.dataIn.getNoise()
1520 1522 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1521 1523
1522 1524 for i in range(nGroups):
1523 1525 coord = groupArray[i,:]
1524 1526
1525 1527 #Input data array
1526 1528 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1527 1529 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1528 1530
1529 1531 #Cross Spectra data array for Covariance Matrixes
1530 1532 ind = 0
1531 1533 for pairs in listComb:
1532 1534 pairsSel = numpy.array([coord[x],coord[y]])
1533 1535 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1534 1536 ind += 1
1535 1537 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1536 1538 dataCross = dataCross**2/K
1537 1539
1538 1540 for h in range(nHeights):
1539 1541
1540 1542 #Input
1541 1543 d = data[:,h]
1542 1544
1543 1545 #Covariance Matrix
1544 1546 D = numpy.diag(d**2/K)
1545 1547 ind = 0
1546 1548 for pairs in listComb:
1547 1549 #Coordinates in Covariance Matrix
1548 1550 x = pairs[0]
1549 1551 y = pairs[1]
1550 1552 #Channel Index
1551 1553 S12 = dataCross[ind,:,h]
1552 1554 D12 = numpy.diag(S12)
1553 1555 #Completing Covariance Matrix with Cross Spectras
1554 1556 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1555 1557 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1556 1558 ind += 1
1557 1559 Dinv=numpy.linalg.inv(D)
1558 1560 L=numpy.linalg.cholesky(Dinv)
1559 1561 LT=L.T
1560 1562
1561 1563 dp = numpy.dot(LT,d)
1562 1564
1563 1565 #Initial values
1564 1566 data_spc = self.dataIn.data_spc[coord,:,h]
1565 1567
1566 1568 if (h>0)and(error1[3]<5):
1567 1569 p0 = self.dataOut.data_param[i,:,h-1]
1568 1570 else:
1569 1571 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1570 1572
1571 1573 try:
1572 1574 #Least Squares
1573 1575 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1574 1576 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1575 1577 #Chi square error
1576 1578 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1577 1579 #Error with Jacobian
1578 1580 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1579 1581 except:
1580 1582 minp = p0*numpy.nan
1581 1583 error0 = numpy.nan
1582 1584 error1 = p0*numpy.nan
1583 1585
1584 1586 #Save
1585 1587 if self.dataOut.data_param is None:
1586 1588 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1587 1589 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1588 1590
1589 1591 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1590 1592 self.dataOut.data_param[i,:,h] = minp
1591 1593 return
1592 1594
1593 1595 def __residFunction(self, p, dp, LT, constants):
1594 1596
1595 1597 fm = self.dataOut.library.modelFunction(p, constants)
1596 1598 fmp=numpy.dot(LT,fm)
1597 1599
1598 1600 return dp-fmp
1599 1601
1600 1602 def __getSNR(self, z, noise):
1601 1603
1602 1604 avg = numpy.average(z, axis=1)
1603 1605 SNR = (avg.T-noise)/noise
1604 1606 SNR = SNR.T
1605 1607 return SNR
1606 1608
1607 1609 def __chisq(p,chindex,hindex):
1608 1610 #similar to Resid but calculates CHI**2
1609 1611 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1610 1612 dp=numpy.dot(LT,d)
1611 1613 fmp=numpy.dot(LT,fm)
1612 1614 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1613 1615 return chisq
1614 1616
1615 1617 class WindProfiler(Operation):
1616 1618
1617 1619 __isConfig = False
1618 1620
1619 1621 __initime = None
1620 1622 __lastdatatime = None
1621 1623 __integrationtime = None
1622 1624
1623 1625 __buffer = None
1624 1626
1625 1627 __dataReady = False
1626 1628
1627 1629 __firstdata = None
1628 1630
1629 1631 n = None
1630 1632
1631 1633 def __init__(self):
1632 1634 Operation.__init__(self)
1633 1635
1634 1636 def __calculateCosDir(self, elev, azim):
1635 1637 zen = (90 - elev)*numpy.pi/180
1636 1638 azim = azim*numpy.pi/180
1637 1639 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1638 1640 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1639 1641
1640 1642 signX = numpy.sign(numpy.cos(azim))
1641 1643 signY = numpy.sign(numpy.sin(azim))
1642 1644
1643 1645 cosDirX = numpy.copysign(cosDirX, signX)
1644 1646 cosDirY = numpy.copysign(cosDirY, signY)
1645 1647 return cosDirX, cosDirY
1646 1648
1647 1649 def __calculateAngles(self, theta_x, theta_y, azimuth):
1648 1650
1649 1651 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1650 1652 zenith_arr = numpy.arccos(dir_cosw)
1651 1653 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1652 1654
1653 1655 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1654 1656 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1655 1657
1656 1658 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1657 1659
1658 1660 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1659 1661
1660 1662 #
1661 1663 if horOnly:
1662 1664 A = numpy.c_[dir_cosu,dir_cosv]
1663 1665 else:
1664 1666 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1665 1667 A = numpy.asmatrix(A)
1666 1668 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1667 1669
1668 1670 return A1
1669 1671
1670 1672 def __correctValues(self, heiRang, phi, velRadial, SNR):
1671 1673 listPhi = phi.tolist()
1672 1674 maxid = listPhi.index(max(listPhi))
1673 1675 minid = listPhi.index(min(listPhi))
1674 1676
1675 1677 rango = list(range(len(phi)))
1676 1678 # rango = numpy.delete(rango,maxid)
1677 1679
1678 1680 heiRang1 = heiRang*math.cos(phi[maxid])
1679 1681 heiRangAux = heiRang*math.cos(phi[minid])
1680 1682 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1681 1683 heiRang1 = numpy.delete(heiRang1,indOut)
1682 1684
1683 1685 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1684 1686 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1685 1687
1686 1688 for i in rango:
1687 1689 x = heiRang*math.cos(phi[i])
1688 1690 y1 = velRadial[i,:]
1689 1691 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1690 1692
1691 1693 x1 = heiRang1
1692 1694 y11 = f1(x1)
1693 1695
1694 1696 y2 = SNR[i,:]
1695 1697 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1696 1698 y21 = f2(x1)
1697 1699
1698 1700 velRadial1[i,:] = y11
1699 1701 SNR1[i,:] = y21
1700 1702
1701 1703 return heiRang1, velRadial1, SNR1
1702 1704
1703 1705 def __calculateVelUVW(self, A, velRadial):
1704 1706
1705 1707 #Operacion Matricial
1706 1708 # velUVW = numpy.zeros((velRadial.shape[1],3))
1707 1709 # for ind in range(velRadial.shape[1]):
1708 1710 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1709 1711 # velUVW = velUVW.transpose()
1710 1712 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1711 1713 velUVW[:,:] = numpy.dot(A,velRadial)
1712 1714
1713 1715
1714 1716 return velUVW
1715 1717
1716 1718 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1717 1719
1718 1720 def techniqueDBS(self, kwargs):
1719 1721 """
1720 1722 Function that implements Doppler Beam Swinging (DBS) technique.
1721 1723
1722 1724 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1723 1725 Direction correction (if necessary), Ranges and SNR
1724 1726
1725 1727 Output: Winds estimation (Zonal, Meridional and Vertical)
1726 1728
1727 1729 Parameters affected: Winds, height range, SNR
1728 1730 """
1729 1731 velRadial0 = kwargs['velRadial']
1730 1732 heiRang = kwargs['heightList']
1731 1733 SNR0 = kwargs['SNR']
1732 1734
1733 1735 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1734 1736 theta_x = numpy.array(kwargs['dirCosx'])
1735 1737 theta_y = numpy.array(kwargs['dirCosy'])
1736 1738 else:
1737 1739 elev = numpy.array(kwargs['elevation'])
1738 1740 azim = numpy.array(kwargs['azimuth'])
1739 1741 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1740 1742 azimuth = kwargs['correctAzimuth']
1741 1743 if 'horizontalOnly' in kwargs:
1742 1744 horizontalOnly = kwargs['horizontalOnly']
1743 1745 else: horizontalOnly = False
1744 1746 if 'correctFactor' in kwargs:
1745 1747 correctFactor = kwargs['correctFactor']
1746 1748 else: correctFactor = 1
1747 1749 if 'channelList' in kwargs:
1748 1750 channelList = kwargs['channelList']
1749 1751 if len(channelList) == 2:
1750 1752 horizontalOnly = True
1751 1753 arrayChannel = numpy.array(channelList)
1752 1754 param = param[arrayChannel,:,:]
1753 1755 theta_x = theta_x[arrayChannel]
1754 1756 theta_y = theta_y[arrayChannel]
1755 1757
1756 1758 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1757 1759 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1758 1760 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1759 1761
1760 1762 #Calculo de Componentes de la velocidad con DBS
1761 1763 winds = self.__calculateVelUVW(A,velRadial1)
1762 1764
1763 1765 return winds, heiRang1, SNR1
1764 1766
1765 1767 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1766 1768
1767 1769 nPairs = len(pairs_ccf)
1768 1770 posx = numpy.asarray(posx)
1769 1771 posy = numpy.asarray(posy)
1770 1772
1771 1773 #Rotacion Inversa para alinear con el azimuth
1772 1774 if azimuth!= None:
1773 1775 azimuth = azimuth*math.pi/180
1774 1776 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1775 1777 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1776 1778 else:
1777 1779 posx1 = posx
1778 1780 posy1 = posy
1779 1781
1780 1782 #Calculo de Distancias
1781 1783 distx = numpy.zeros(nPairs)
1782 1784 disty = numpy.zeros(nPairs)
1783 1785 dist = numpy.zeros(nPairs)
1784 1786 ang = numpy.zeros(nPairs)
1785 1787
1786 1788 for i in range(nPairs):
1787 1789 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1788 1790 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1789 1791 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1790 1792 ang[i] = numpy.arctan2(disty[i],distx[i])
1791 1793
1792 1794 return distx, disty, dist, ang
1793 1795 #Calculo de Matrices
1794 1796 # nPairs = len(pairs)
1795 1797 # ang1 = numpy.zeros((nPairs, 2, 1))
1796 1798 # dist1 = numpy.zeros((nPairs, 2, 1))
1797 1799 #
1798 1800 # for j in range(nPairs):
1799 1801 # dist1[j,0,0] = dist[pairs[j][0]]
1800 1802 # dist1[j,1,0] = dist[pairs[j][1]]
1801 1803 # ang1[j,0,0] = ang[pairs[j][0]]
1802 1804 # ang1[j,1,0] = ang[pairs[j][1]]
1803 1805 #
1804 1806 # return distx,disty, dist1,ang1
1805 1807
1806 1808
1807 1809 def __calculateVelVer(self, phase, lagTRange, _lambda):
1808 1810
1809 1811 Ts = lagTRange[1] - lagTRange[0]
1810 1812 velW = -_lambda*phase/(4*math.pi*Ts)
1811 1813
1812 1814 return velW
1813 1815
1814 1816 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1815 1817 nPairs = tau1.shape[0]
1816 1818 nHeights = tau1.shape[1]
1817 1819 vel = numpy.zeros((nPairs,3,nHeights))
1818 1820 dist1 = numpy.reshape(dist, (dist.size,1))
1819 1821
1820 1822 angCos = numpy.cos(ang)
1821 1823 angSin = numpy.sin(ang)
1822 1824
1823 1825 vel0 = dist1*tau1/(2*tau2**2)
1824 1826 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1825 1827 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1826 1828
1827 1829 ind = numpy.where(numpy.isinf(vel))
1828 1830 vel[ind] = numpy.nan
1829 1831
1830 1832 return vel
1831 1833
1832 1834 # def __getPairsAutoCorr(self, pairsList, nChannels):
1833 1835 #
1834 1836 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1835 1837 #
1836 1838 # for l in range(len(pairsList)):
1837 1839 # firstChannel = pairsList[l][0]
1838 1840 # secondChannel = pairsList[l][1]
1839 1841 #
1840 1842 # #Obteniendo pares de Autocorrelacion
1841 1843 # if firstChannel == secondChannel:
1842 1844 # pairsAutoCorr[firstChannel] = int(l)
1843 1845 #
1844 1846 # pairsAutoCorr = pairsAutoCorr.astype(int)
1845 1847 #
1846 1848 # pairsCrossCorr = range(len(pairsList))
1847 1849 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1848 1850 #
1849 1851 # return pairsAutoCorr, pairsCrossCorr
1850 1852
1851 1853 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1852 1854 def techniqueSA(self, kwargs):
1853 1855
1854 1856 """
1855 1857 Function that implements Spaced Antenna (SA) technique.
1856 1858
1857 1859 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1858 1860 Direction correction (if necessary), Ranges and SNR
1859 1861
1860 1862 Output: Winds estimation (Zonal, Meridional and Vertical)
1861 1863
1862 1864 Parameters affected: Winds
1863 1865 """
1864 1866 position_x = kwargs['positionX']
1865 1867 position_y = kwargs['positionY']
1866 1868 azimuth = kwargs['azimuth']
1867 1869
1868 1870 if 'correctFactor' in kwargs:
1869 1871 correctFactor = kwargs['correctFactor']
1870 1872 else:
1871 1873 correctFactor = 1
1872 1874
1873 1875 groupList = kwargs['groupList']
1874 1876 pairs_ccf = groupList[1]
1875 1877 tau = kwargs['tau']
1876 1878 _lambda = kwargs['_lambda']
1877 1879
1878 1880 #Cross Correlation pairs obtained
1879 1881 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1880 1882 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1881 1883 # pairsSelArray = numpy.array(pairsSelected)
1882 1884 # pairs = []
1883 1885 #
1884 1886 # #Wind estimation pairs obtained
1885 1887 # for i in range(pairsSelArray.shape[0]/2):
1886 1888 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1887 1889 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1888 1890 # pairs.append((ind1,ind2))
1889 1891
1890 1892 indtau = tau.shape[0]/2
1891 1893 tau1 = tau[:indtau,:]
1892 1894 tau2 = tau[indtau:-1,:]
1893 1895 # tau1 = tau1[pairs,:]
1894 1896 # tau2 = tau2[pairs,:]
1895 1897 phase1 = tau[-1,:]
1896 1898
1897 1899 #---------------------------------------------------------------------
1898 1900 #Metodo Directo
1899 1901 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1900 1902 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1901 1903 winds = stats.nanmean(winds, axis=0)
1902 1904 #---------------------------------------------------------------------
1903 1905 #Metodo General
1904 1906 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1905 1907 # #Calculo Coeficientes de Funcion de Correlacion
1906 1908 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1907 1909 # #Calculo de Velocidades
1908 1910 # winds = self.calculateVelUV(F,G,A,B,H)
1909 1911
1910 1912 #---------------------------------------------------------------------
1911 1913 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1912 1914 winds = correctFactor*winds
1913 1915 return winds
1914 1916
1915 1917 def __checkTime(self, currentTime, paramInterval, outputInterval):
1916 1918
1917 1919 dataTime = currentTime + paramInterval
1918 1920 deltaTime = dataTime - self.__initime
1919 1921
1920 1922 if deltaTime >= outputInterval or deltaTime < 0:
1921 1923 self.__dataReady = True
1922 1924 return
1923 1925
1924 1926 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1925 1927 '''
1926 1928 Function that implements winds estimation technique with detected meteors.
1927 1929
1928 1930 Input: Detected meteors, Minimum meteor quantity to wind estimation
1929 1931
1930 1932 Output: Winds estimation (Zonal and Meridional)
1931 1933
1932 1934 Parameters affected: Winds
1933 1935 '''
1934 1936 #Settings
1935 1937 nInt = (heightMax - heightMin)/2
1936 1938 nInt = int(nInt)
1937 1939 winds = numpy.zeros((2,nInt))*numpy.nan
1938 1940
1939 1941 #Filter errors
1940 1942 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1941 1943 finalMeteor = arrayMeteor[error,:]
1942 1944
1943 1945 #Meteor Histogram
1944 1946 finalHeights = finalMeteor[:,2]
1945 1947 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1946 1948 nMeteorsPerI = hist[0]
1947 1949 heightPerI = hist[1]
1948 1950
1949 1951 #Sort of meteors
1950 1952 indSort = finalHeights.argsort()
1951 1953 finalMeteor2 = finalMeteor[indSort,:]
1952 1954
1953 1955 # Calculating winds
1954 1956 ind1 = 0
1955 1957 ind2 = 0
1956 1958
1957 1959 for i in range(nInt):
1958 1960 nMet = nMeteorsPerI[i]
1959 1961 ind1 = ind2
1960 1962 ind2 = ind1 + nMet
1961 1963
1962 1964 meteorAux = finalMeteor2[ind1:ind2,:]
1963 1965
1964 1966 if meteorAux.shape[0] >= meteorThresh:
1965 1967 vel = meteorAux[:, 6]
1966 1968 zen = meteorAux[:, 4]*numpy.pi/180
1967 1969 azim = meteorAux[:, 3]*numpy.pi/180
1968 1970
1969 1971 n = numpy.cos(zen)
1970 1972 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1971 1973 # l = m*numpy.tan(azim)
1972 1974 l = numpy.sin(zen)*numpy.sin(azim)
1973 1975 m = numpy.sin(zen)*numpy.cos(azim)
1974 1976
1975 1977 A = numpy.vstack((l, m)).transpose()
1976 1978 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1977 1979 windsAux = numpy.dot(A1, vel)
1978 1980
1979 1981 winds[0,i] = windsAux[0]
1980 1982 winds[1,i] = windsAux[1]
1981 1983
1982 1984 return winds, heightPerI[:-1]
1983 1985
1984 1986 def techniqueNSM_SA(self, **kwargs):
1985 1987 metArray = kwargs['metArray']
1986 1988 heightList = kwargs['heightList']
1987 1989 timeList = kwargs['timeList']
1988 1990
1989 1991 rx_location = kwargs['rx_location']
1990 1992 groupList = kwargs['groupList']
1991 1993 azimuth = kwargs['azimuth']
1992 1994 dfactor = kwargs['dfactor']
1993 1995 k = kwargs['k']
1994 1996
1995 1997 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
1996 1998 d = dist*dfactor
1997 1999 #Phase calculation
1998 2000 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
1999 2001
2000 2002 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
2001 2003
2002 2004 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2003 2005 azimuth1 = azimuth1*numpy.pi/180
2004 2006
2005 2007 for i in range(heightList.size):
2006 2008 h = heightList[i]
2007 2009 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
2008 2010 metHeight = metArray1[indH,:]
2009 2011 if metHeight.shape[0] >= 2:
2010 2012 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
2011 2013 iazim = metHeight[:,1].astype(int)
2012 2014 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
2013 2015 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
2014 2016 A = numpy.asmatrix(A)
2015 2017 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
2016 2018 velHor = numpy.dot(A1,velAux)
2017 2019
2018 2020 velEst[i,:] = numpy.squeeze(velHor)
2019 2021 return velEst
2020 2022
2021 2023 def __getPhaseSlope(self, metArray, heightList, timeList):
2022 2024 meteorList = []
2023 2025 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2024 2026 #Putting back together the meteor matrix
2025 2027 utctime = metArray[:,0]
2026 2028 uniqueTime = numpy.unique(utctime)
2027 2029
2028 2030 phaseDerThresh = 0.5
2029 2031 ippSeconds = timeList[1] - timeList[0]
2030 2032 sec = numpy.where(timeList>1)[0][0]
2031 2033 nPairs = metArray.shape[1] - 6
2032 2034 nHeights = len(heightList)
2033 2035
2034 2036 for t in uniqueTime:
2035 2037 metArray1 = metArray[utctime==t,:]
2036 2038 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2037 2039 tmet = metArray1[:,1].astype(int)
2038 2040 hmet = metArray1[:,2].astype(int)
2039 2041
2040 2042 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2041 2043 metPhase[:,:] = numpy.nan
2042 2044 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2043 2045
2044 2046 #Delete short trails
2045 2047 metBool = ~numpy.isnan(metPhase[0,:,:])
2046 2048 heightVect = numpy.sum(metBool, axis = 1)
2047 2049 metBool[heightVect<sec,:] = False
2048 2050 metPhase[:,heightVect<sec,:] = numpy.nan
2049 2051
2050 2052 #Derivative
2051 2053 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2052 2054 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2053 2055 metPhase[phDerAux] = numpy.nan
2054 2056
2055 2057 #--------------------------METEOR DETECTION -----------------------------------------
2056 2058 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2057 2059
2058 2060 for p in numpy.arange(nPairs):
2059 2061 phase = metPhase[p,:,:]
2060 2062 phDer = metDer[p,:,:]
2061 2063
2062 2064 for h in indMet:
2063 2065 height = heightList[h]
2064 2066 phase1 = phase[h,:] #82
2065 2067 phDer1 = phDer[h,:]
2066 2068
2067 2069 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2068 2070
2069 2071 indValid = numpy.where(~numpy.isnan(phase1))[0]
2070 2072 initMet = indValid[0]
2071 2073 endMet = 0
2072 2074
2073 2075 for i in range(len(indValid)-1):
2074 2076
2075 2077 #Time difference
2076 2078 inow = indValid[i]
2077 2079 inext = indValid[i+1]
2078 2080 idiff = inext - inow
2079 2081 #Phase difference
2080 2082 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2081 2083
2082 2084 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2083 2085 sizeTrail = inow - initMet + 1
2084 2086 if sizeTrail>3*sec: #Too short meteors
2085 2087 x = numpy.arange(initMet,inow+1)*ippSeconds
2086 2088 y = phase1[initMet:inow+1]
2087 2089 ynnan = ~numpy.isnan(y)
2088 2090 x = x[ynnan]
2089 2091 y = y[ynnan]
2090 2092 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2091 2093 ylin = x*slope + intercept
2092 2094 rsq = r_value**2
2093 2095 if rsq > 0.5:
2094 2096 vel = slope#*height*1000/(k*d)
2095 2097 estAux = numpy.array([utctime,p,height, vel, rsq])
2096 2098 meteorList.append(estAux)
2097 2099 initMet = inext
2098 2100 metArray2 = numpy.array(meteorList)
2099 2101
2100 2102 return metArray2
2101 2103
2102 2104 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2103 2105
2104 2106 azimuth1 = numpy.zeros(len(pairslist))
2105 2107 dist = numpy.zeros(len(pairslist))
2106 2108
2107 2109 for i in range(len(rx_location)):
2108 2110 ch0 = pairslist[i][0]
2109 2111 ch1 = pairslist[i][1]
2110 2112
2111 2113 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2112 2114 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2113 2115 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2114 2116 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2115 2117
2116 2118 azimuth1 -= azimuth0
2117 2119 return azimuth1, dist
2118 2120
2119 2121 def techniqueNSM_DBS(self, **kwargs):
2120 2122 metArray = kwargs['metArray']
2121 2123 heightList = kwargs['heightList']
2122 2124 timeList = kwargs['timeList']
2123 2125 azimuth = kwargs['azimuth']
2124 2126 theta_x = numpy.array(kwargs['theta_x'])
2125 2127 theta_y = numpy.array(kwargs['theta_y'])
2126 2128
2127 2129 utctime = metArray[:,0]
2128 2130 cmet = metArray[:,1].astype(int)
2129 2131 hmet = metArray[:,3].astype(int)
2130 2132 SNRmet = metArray[:,4]
2131 2133 vmet = metArray[:,5]
2132 2134 spcmet = metArray[:,6]
2133 2135
2134 2136 nChan = numpy.max(cmet) + 1
2135 2137 nHeights = len(heightList)
2136 2138
2137 2139 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2138 2140 hmet = heightList[hmet]
2139 2141 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2140 2142
2141 2143 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2142 2144
2143 2145 for i in range(nHeights - 1):
2144 2146 hmin = heightList[i]
2145 2147 hmax = heightList[i + 1]
2146 2148
2147 2149 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2148 2150 indthisH = numpy.where(thisH)
2149 2151
2150 2152 if numpy.size(indthisH) > 3:
2151 2153
2152 2154 vel_aux = vmet[thisH]
2153 2155 chan_aux = cmet[thisH]
2154 2156 cosu_aux = dir_cosu[chan_aux]
2155 2157 cosv_aux = dir_cosv[chan_aux]
2156 2158 cosw_aux = dir_cosw[chan_aux]
2157 2159
2158 2160 nch = numpy.size(numpy.unique(chan_aux))
2159 2161 if nch > 1:
2160 2162 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2161 2163 velEst[i,:] = numpy.dot(A,vel_aux)
2162 2164
2163 2165 return velEst
2164 2166
2165 2167 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2166 2168
2167 2169 param = dataOut.data_param
2168 2170 if dataOut.abscissaList != None:
2169 2171 absc = dataOut.abscissaList[:-1]
2170 2172 # noise = dataOut.noise
2171 2173 heightList = dataOut.heightList
2172 2174 SNR = dataOut.data_snr
2173 2175
2174 2176 if technique == 'DBS':
2175 2177
2176 2178 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2177 2179 kwargs['heightList'] = heightList
2178 2180 kwargs['SNR'] = SNR
2179 2181
2180 2182 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
2181 2183 dataOut.utctimeInit = dataOut.utctime
2182 2184 dataOut.outputInterval = dataOut.paramInterval
2183 2185
2184 2186 elif technique == 'SA':
2185 2187
2186 2188 #Parameters
2187 2189 # position_x = kwargs['positionX']
2188 2190 # position_y = kwargs['positionY']
2189 2191 # azimuth = kwargs['azimuth']
2190 2192 #
2191 2193 # if kwargs.has_key('crosspairsList'):
2192 2194 # pairs = kwargs['crosspairsList']
2193 2195 # else:
2194 2196 # pairs = None
2195 2197 #
2196 2198 # if kwargs.has_key('correctFactor'):
2197 2199 # correctFactor = kwargs['correctFactor']
2198 2200 # else:
2199 2201 # correctFactor = 1
2200 2202
2201 2203 # tau = dataOut.data_param
2202 2204 # _lambda = dataOut.C/dataOut.frequency
2203 2205 # pairsList = dataOut.groupList
2204 2206 # nChannels = dataOut.nChannels
2205 2207
2206 2208 kwargs['groupList'] = dataOut.groupList
2207 2209 kwargs['tau'] = dataOut.data_param
2208 2210 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2209 2211 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2210 2212 dataOut.data_output = self.techniqueSA(kwargs)
2211 2213 dataOut.utctimeInit = dataOut.utctime
2212 2214 dataOut.outputInterval = dataOut.timeInterval
2213 2215
2214 2216 elif technique == 'Meteors':
2215 2217 dataOut.flagNoData = True
2216 2218 self.__dataReady = False
2217 2219
2218 2220 if 'nHours' in kwargs:
2219 2221 nHours = kwargs['nHours']
2220 2222 else:
2221 2223 nHours = 1
2222 2224
2223 2225 if 'meteorsPerBin' in kwargs:
2224 2226 meteorThresh = kwargs['meteorsPerBin']
2225 2227 else:
2226 2228 meteorThresh = 6
2227 2229
2228 2230 if 'hmin' in kwargs:
2229 2231 hmin = kwargs['hmin']
2230 2232 else: hmin = 70
2231 2233 if 'hmax' in kwargs:
2232 2234 hmax = kwargs['hmax']
2233 2235 else: hmax = 110
2234 2236
2235 2237 dataOut.outputInterval = nHours*3600
2236 2238
2237 2239 if self.__isConfig == False:
2238 2240 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2239 2241 #Get Initial LTC time
2240 2242 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2241 2243 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2242 2244
2243 2245 self.__isConfig = True
2244 2246
2245 2247 if self.__buffer is None:
2246 2248 self.__buffer = dataOut.data_param
2247 2249 self.__firstdata = copy.copy(dataOut)
2248 2250
2249 2251 else:
2250 2252 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2251 2253
2252 2254 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2253 2255
2254 2256 if self.__dataReady:
2255 2257 dataOut.utctimeInit = self.__initime
2256 2258
2257 2259 self.__initime += dataOut.outputInterval #to erase time offset
2258 2260
2259 2261 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2260 2262 dataOut.flagNoData = False
2261 2263 self.__buffer = None
2262 2264
2263 2265 elif technique == 'Meteors1':
2264 2266 dataOut.flagNoData = True
2265 2267 self.__dataReady = False
2266 2268
2267 2269 if 'nMins' in kwargs:
2268 2270 nMins = kwargs['nMins']
2269 2271 else: nMins = 20
2270 2272 if 'rx_location' in kwargs:
2271 2273 rx_location = kwargs['rx_location']
2272 2274 else: rx_location = [(0,1),(1,1),(1,0)]
2273 2275 if 'azimuth' in kwargs:
2274 2276 azimuth = kwargs['azimuth']
2275 2277 else: azimuth = 51.06
2276 2278 if 'dfactor' in kwargs:
2277 2279 dfactor = kwargs['dfactor']
2278 2280 if 'mode' in kwargs:
2279 2281 mode = kwargs['mode']
2280 2282 if 'theta_x' in kwargs:
2281 2283 theta_x = kwargs['theta_x']
2282 2284 if 'theta_y' in kwargs:
2283 2285 theta_y = kwargs['theta_y']
2284 2286 else: mode = 'SA'
2285 2287
2286 2288 #Borrar luego esto
2287 2289 if dataOut.groupList is None:
2288 2290 dataOut.groupList = [(0,1),(0,2),(1,2)]
2289 2291 groupList = dataOut.groupList
2290 2292 C = 3e8
2291 2293 freq = 50e6
2292 2294 lamb = C/freq
2293 2295 k = 2*numpy.pi/lamb
2294 2296
2295 2297 timeList = dataOut.abscissaList
2296 2298 heightList = dataOut.heightList
2297 2299
2298 2300 if self.__isConfig == False:
2299 2301 dataOut.outputInterval = nMins*60
2300 2302 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2301 2303 #Get Initial LTC time
2302 2304 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2303 2305 minuteAux = initime.minute
2304 2306 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2305 2307 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2306 2308
2307 2309 self.__isConfig = True
2308 2310
2309 2311 if self.__buffer is None:
2310 2312 self.__buffer = dataOut.data_param
2311 2313 self.__firstdata = copy.copy(dataOut)
2312 2314
2313 2315 else:
2314 2316 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2315 2317
2316 2318 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2317 2319
2318 2320 if self.__dataReady:
2319 2321 dataOut.utctimeInit = self.__initime
2320 2322 self.__initime += dataOut.outputInterval #to erase time offset
2321 2323
2322 2324 metArray = self.__buffer
2323 2325 if mode == 'SA':
2324 2326 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2325 2327 elif mode == 'DBS':
2326 2328 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2327 2329 dataOut.data_output = dataOut.data_output.T
2328 2330 dataOut.flagNoData = False
2329 2331 self.__buffer = None
2330 2332
2331 2333 return
2332 2334
2333 2335 class EWDriftsEstimation(Operation):
2334 2336
2335 2337 def __init__(self):
2336 2338 Operation.__init__(self)
2337 2339
2338 2340 def __correctValues(self, heiRang, phi, velRadial, SNR):
2339 2341 listPhi = phi.tolist()
2340 2342 maxid = listPhi.index(max(listPhi))
2341 2343 minid = listPhi.index(min(listPhi))
2342 2344
2343 2345 rango = list(range(len(phi)))
2344 2346 # rango = numpy.delete(rango,maxid)
2345 2347
2346 2348 heiRang1 = heiRang*math.cos(phi[maxid])
2347 2349 heiRangAux = heiRang*math.cos(phi[minid])
2348 2350 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2349 2351 heiRang1 = numpy.delete(heiRang1,indOut)
2350 2352
2351 2353 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2352 2354 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2353 2355
2354 2356 for i in rango:
2355 2357 x = heiRang*math.cos(phi[i])
2356 2358 y1 = velRadial[i,:]
2357 2359 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2358 2360
2359 2361 x1 = heiRang1
2360 2362 y11 = f1(x1)
2361 2363
2362 2364 y2 = SNR[i,:]
2363 2365 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2364 2366 y21 = f2(x1)
2365 2367
2366 2368 velRadial1[i,:] = y11
2367 2369 SNR1[i,:] = y21
2368 2370
2369 2371 return heiRang1, velRadial1, SNR1
2370 2372
2371 2373 def run(self, dataOut, zenith, zenithCorrection):
2372 2374 heiRang = dataOut.heightList
2373 2375 velRadial = dataOut.data_param[:,3,:]
2374 2376 SNR = dataOut.data_snr
2375 2377
2376 2378 zenith = numpy.array(zenith)
2377 2379 zenith -= zenithCorrection
2378 2380 zenith *= numpy.pi/180
2379 2381
2380 2382 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2381 2383
2382 2384 alp = zenith[0]
2383 2385 bet = zenith[1]
2384 2386
2385 2387 w_w = velRadial1[0,:]
2386 2388 w_e = velRadial1[1,:]
2387 2389
2388 2390 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2389 2391 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2390 2392
2391 2393 winds = numpy.vstack((u,w))
2392 2394
2393 2395 dataOut.heightList = heiRang1
2394 2396 dataOut.data_output = winds
2395 2397 dataOut.data_snr = SNR1
2396 2398
2397 2399 dataOut.utctimeInit = dataOut.utctime
2398 2400 dataOut.outputInterval = dataOut.timeInterval
2399 2401 return
2400 2402
2401 2403 #--------------- Non Specular Meteor ----------------
2402 2404
2403 2405 class NonSpecularMeteorDetection(Operation):
2404 2406
2405 2407 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2406 2408 data_acf = dataOut.data_pre[0]
2407 2409 data_ccf = dataOut.data_pre[1]
2408 2410 pairsList = dataOut.groupList[1]
2409 2411
2410 2412 lamb = dataOut.C/dataOut.frequency
2411 2413 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2412 2414 paramInterval = dataOut.paramInterval
2413 2415
2414 2416 nChannels = data_acf.shape[0]
2415 2417 nLags = data_acf.shape[1]
2416 2418 nProfiles = data_acf.shape[2]
2417 2419 nHeights = dataOut.nHeights
2418 2420 nCohInt = dataOut.nCohInt
2419 2421 sec = numpy.round(nProfiles/dataOut.paramInterval)
2420 2422 heightList = dataOut.heightList
2421 2423 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2422 2424 utctime = dataOut.utctime
2423 2425
2424 2426 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2425 2427
2426 2428 #------------------------ SNR --------------------------------------
2427 2429 power = data_acf[:,0,:,:].real
2428 2430 noise = numpy.zeros(nChannels)
2429 2431 SNR = numpy.zeros(power.shape)
2430 2432 for i in range(nChannels):
2431 2433 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2432 2434 SNR[i] = (power[i]-noise[i])/noise[i]
2433 2435 SNRm = numpy.nanmean(SNR, axis = 0)
2434 2436 SNRdB = 10*numpy.log10(SNR)
2435 2437
2436 2438 if mode == 'SA':
2437 2439 dataOut.groupList = dataOut.groupList[1]
2438 2440 nPairs = data_ccf.shape[0]
2439 2441 #---------------------- Coherence and Phase --------------------------
2440 2442 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2441 2443 # phase1 = numpy.copy(phase)
2442 2444 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2443 2445
2444 2446 for p in range(nPairs):
2445 2447 ch0 = pairsList[p][0]
2446 2448 ch1 = pairsList[p][1]
2447 2449 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2448 2450 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2449 2451 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2450 2452 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2451 2453 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2452 2454 coh = numpy.nanmax(coh1, axis = 0)
2453 2455 # struc = numpy.ones((5,1))
2454 2456 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2455 2457 #---------------------- Radial Velocity ----------------------------
2456 2458 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2457 2459 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2458 2460
2459 2461 if allData:
2460 2462 boolMetFin = ~numpy.isnan(SNRm)
2461 2463 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2462 2464 else:
2463 2465 #------------------------ Meteor mask ---------------------------------
2464 2466 # #SNR mask
2465 2467 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2466 2468 #
2467 2469 # #Erase small objects
2468 2470 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2469 2471 #
2470 2472 # auxEEJ = numpy.sum(boolMet1,axis=0)
2471 2473 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2472 2474 # indEEJ = numpy.where(indOver)[0]
2473 2475 # indNEEJ = numpy.where(~indOver)[0]
2474 2476 #
2475 2477 # boolMetFin = boolMet1
2476 2478 #
2477 2479 # if indEEJ.size > 0:
2478 2480 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2479 2481 #
2480 2482 # boolMet2 = coh > cohThresh
2481 2483 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2482 2484 #
2483 2485 # #Final Meteor mask
2484 2486 # boolMetFin = boolMet1|boolMet2
2485 2487
2486 2488 #Coherence mask
2487 2489 boolMet1 = coh > 0.75
2488 2490 struc = numpy.ones((30,1))
2489 2491 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2490 2492
2491 2493 #Derivative mask
2492 2494 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2493 2495 boolMet2 = derPhase < 0.2
2494 2496 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2495 2497 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2496 2498 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2497 2499 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2498 2500 # #Final mask
2499 2501 # boolMetFin = boolMet2
2500 2502 boolMetFin = boolMet1&boolMet2
2501 2503 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2502 2504 #Creating data_param
2503 2505 coordMet = numpy.where(boolMetFin)
2504 2506
2505 2507 tmet = coordMet[0]
2506 2508 hmet = coordMet[1]
2507 2509
2508 2510 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2509 2511 data_param[:,0] = utctime
2510 2512 data_param[:,1] = tmet
2511 2513 data_param[:,2] = hmet
2512 2514 data_param[:,3] = SNRm[tmet,hmet]
2513 2515 data_param[:,4] = velRad[tmet,hmet]
2514 2516 data_param[:,5] = coh[tmet,hmet]
2515 2517 data_param[:,6:] = phase[:,tmet,hmet].T
2516 2518
2517 2519 elif mode == 'DBS':
2518 2520 dataOut.groupList = numpy.arange(nChannels)
2519 2521
2520 2522 #Radial Velocities
2521 2523 phase = numpy.angle(data_acf[:,1,:,:])
2522 2524 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2523 2525 velRad = phase*lamb/(4*numpy.pi*tSamp)
2524 2526
2525 2527 #Spectral width
2526 2528 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2527 2529 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2528 2530 acf1 = data_acf[:,1,:,:]
2529 2531 acf2 = data_acf[:,2,:,:]
2530 2532
2531 2533 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2532 2534 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2533 2535 if allData:
2534 2536 boolMetFin = ~numpy.isnan(SNRdB)
2535 2537 else:
2536 2538 #SNR
2537 2539 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2538 2540 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2539 2541
2540 2542 #Radial velocity
2541 2543 boolMet2 = numpy.abs(velRad) < 20
2542 2544 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2543 2545
2544 2546 #Spectral Width
2545 2547 boolMet3 = spcWidth < 30
2546 2548 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2547 2549 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2548 2550 boolMetFin = boolMet1&boolMet2&boolMet3
2549 2551
2550 2552 #Creating data_param
2551 2553 coordMet = numpy.where(boolMetFin)
2552 2554
2553 2555 cmet = coordMet[0]
2554 2556 tmet = coordMet[1]
2555 2557 hmet = coordMet[2]
2556 2558
2557 2559 data_param = numpy.zeros((tmet.size, 7))
2558 2560 data_param[:,0] = utctime
2559 2561 data_param[:,1] = cmet
2560 2562 data_param[:,2] = tmet
2561 2563 data_param[:,3] = hmet
2562 2564 data_param[:,4] = SNR[cmet,tmet,hmet].T
2563 2565 data_param[:,5] = velRad[cmet,tmet,hmet].T
2564 2566 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2565 2567
2566 2568 # self.dataOut.data_param = data_int
2567 2569 if len(data_param) == 0:
2568 2570 dataOut.flagNoData = True
2569 2571 else:
2570 2572 dataOut.data_param = data_param
2571 2573
2572 2574 def __erase_small(self, binArray, threshX, threshY):
2573 2575 labarray, numfeat = ndimage.measurements.label(binArray)
2574 2576 binArray1 = numpy.copy(binArray)
2575 2577
2576 2578 for i in range(1,numfeat + 1):
2577 2579 auxBin = (labarray==i)
2578 2580 auxSize = auxBin.sum()
2579 2581
2580 2582 x,y = numpy.where(auxBin)
2581 2583 widthX = x.max() - x.min()
2582 2584 widthY = y.max() - y.min()
2583 2585
2584 2586 #width X: 3 seg -> 12.5*3
2585 2587 #width Y:
2586 2588
2587 2589 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2588 2590 binArray1[auxBin] = False
2589 2591
2590 2592 return binArray1
2591 2593
2592 2594 #--------------- Specular Meteor ----------------
2593 2595
2594 2596 class SMDetection(Operation):
2595 2597 '''
2596 2598 Function DetectMeteors()
2597 2599 Project developed with paper:
2598 2600 HOLDSWORTH ET AL. 2004
2599 2601
2600 2602 Input:
2601 2603 self.dataOut.data_pre
2602 2604
2603 2605 centerReceiverIndex: From the channels, which is the center receiver
2604 2606
2605 2607 hei_ref: Height reference for the Beacon signal extraction
2606 2608 tauindex:
2607 2609 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2608 2610
2609 2611 cohDetection: Whether to user Coherent detection or not
2610 2612 cohDet_timeStep: Coherent Detection calculation time step
2611 2613 cohDet_thresh: Coherent Detection phase threshold to correct phases
2612 2614
2613 2615 noise_timeStep: Noise calculation time step
2614 2616 noise_multiple: Noise multiple to define signal threshold
2615 2617
2616 2618 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2617 2619 multDet_rangeLimit: Multiple Detection Removal range limit in km
2618 2620
2619 2621 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2620 2622 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2621 2623
2622 2624 hmin: Minimum Height of the meteor to use it in the further wind estimations
2623 2625 hmax: Maximum Height of the meteor to use it in the further wind estimations
2624 2626 azimuth: Azimuth angle correction
2625 2627
2626 2628 Affected:
2627 2629 self.dataOut.data_param
2628 2630
2629 2631 Rejection Criteria (Errors):
2630 2632 0: No error; analysis OK
2631 2633 1: SNR < SNR threshold
2632 2634 2: angle of arrival (AOA) ambiguously determined
2633 2635 3: AOA estimate not feasible
2634 2636 4: Large difference in AOAs obtained from different antenna baselines
2635 2637 5: echo at start or end of time series
2636 2638 6: echo less than 5 examples long; too short for analysis
2637 2639 7: echo rise exceeds 0.3s
2638 2640 8: echo decay time less than twice rise time
2639 2641 9: large power level before echo
2640 2642 10: large power level after echo
2641 2643 11: poor fit to amplitude for estimation of decay time
2642 2644 12: poor fit to CCF phase variation for estimation of radial drift velocity
2643 2645 13: height unresolvable echo: not valid height within 70 to 110 km
2644 2646 14: height ambiguous echo: more then one possible height within 70 to 110 km
2645 2647 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2646 2648 16: oscilatory echo, indicating event most likely not an underdense echo
2647 2649
2648 2650 17: phase difference in meteor Reestimation
2649 2651
2650 2652 Data Storage:
2651 2653 Meteors for Wind Estimation (8):
2652 2654 Utc Time | Range Height
2653 2655 Azimuth Zenith errorCosDir
2654 2656 VelRad errorVelRad
2655 2657 Phase0 Phase1 Phase2 Phase3
2656 2658 TypeError
2657 2659
2658 2660 '''
2659 2661
2660 2662 def run(self, dataOut, hei_ref = None, tauindex = 0,
2661 2663 phaseOffsets = None,
2662 2664 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2663 2665 noise_timeStep = 4, noise_multiple = 4,
2664 2666 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2665 2667 phaseThresh = 20, SNRThresh = 5,
2666 2668 hmin = 50, hmax=150, azimuth = 0,
2667 2669 channelPositions = None) :
2668 2670
2669 2671
2670 2672 #Getting Pairslist
2671 2673 if channelPositions is None:
2672 2674 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2673 2675 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2674 2676 meteorOps = SMOperations()
2675 2677 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2676 2678 heiRang = dataOut.heightList
2677 2679 #Get Beacon signal - No Beacon signal anymore
2678 2680 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2679 2681 #
2680 2682 # if hei_ref != None:
2681 2683 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2682 2684 #
2683 2685
2684 2686
2685 2687 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2686 2688 # see if the user put in pre defined phase shifts
2687 2689 voltsPShift = dataOut.data_pre.copy()
2688 2690
2689 2691 # if predefinedPhaseShifts != None:
2690 2692 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2691 2693 #
2692 2694 # # elif beaconPhaseShifts:
2693 2695 # # #get hardware phase shifts using beacon signal
2694 2696 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2695 2697 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2696 2698 #
2697 2699 # else:
2698 2700 # hardwarePhaseShifts = numpy.zeros(5)
2699 2701 #
2700 2702 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2701 2703 # for i in range(self.dataOut.data_pre.shape[0]):
2702 2704 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2703 2705
2704 2706 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2705 2707
2706 2708 #Remove DC
2707 2709 voltsDC = numpy.mean(voltsPShift,1)
2708 2710 voltsDC = numpy.mean(voltsDC,1)
2709 2711 for i in range(voltsDC.shape[0]):
2710 2712 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2711 2713
2712 2714 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2713 2715 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2714 2716
2715 2717 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2716 2718 #Coherent Detection
2717 2719 if cohDetection:
2718 2720 #use coherent detection to get the net power
2719 2721 cohDet_thresh = cohDet_thresh*numpy.pi/180
2720 2722 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2721 2723
2722 2724 #Non-coherent detection!
2723 2725 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2724 2726 #********** END OF COH/NON-COH POWER CALCULATION**********************
2725 2727
2726 2728 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2727 2729 #Get noise
2728 2730 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2729 2731 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2730 2732 #Get signal threshold
2731 2733 signalThresh = noise_multiple*noise
2732 2734 #Meteor echoes detection
2733 2735 listMeteors = self.__findMeteors(powerNet, signalThresh)
2734 2736 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2735 2737
2736 2738 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2737 2739 #Parameters
2738 2740 heiRange = dataOut.heightList
2739 2741 rangeInterval = heiRange[1] - heiRange[0]
2740 2742 rangeLimit = multDet_rangeLimit/rangeInterval
2741 2743 timeLimit = multDet_timeLimit/dataOut.timeInterval
2742 2744 #Multiple detection removals
2743 2745 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2744 2746 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2745 2747
2746 2748 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2747 2749 #Parameters
2748 2750 phaseThresh = phaseThresh*numpy.pi/180
2749 2751 thresh = [phaseThresh, noise_multiple, SNRThresh]
2750 2752 #Meteor reestimation (Errors N 1, 6, 12, 17)
2751 2753 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2752 2754 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2753 2755 #Estimation of decay times (Errors N 7, 8, 11)
2754 2756 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2755 2757 #******************* END OF METEOR REESTIMATION *******************
2756 2758
2757 2759 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2758 2760 #Calculating Radial Velocity (Error N 15)
2759 2761 radialStdThresh = 10
2760 2762 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2761 2763
2762 2764 if len(listMeteors4) > 0:
2763 2765 #Setting New Array
2764 2766 date = dataOut.utctime
2765 2767 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2766 2768
2767 2769 #Correcting phase offset
2768 2770 if phaseOffsets != None:
2769 2771 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2770 2772 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2771 2773
2772 2774 #Second Pairslist
2773 2775 pairsList = []
2774 2776 pairx = (0,1)
2775 2777 pairy = (2,3)
2776 2778 pairsList.append(pairx)
2777 2779 pairsList.append(pairy)
2778 2780
2779 2781 jph = numpy.array([0,0,0,0])
2780 2782 h = (hmin,hmax)
2781 2783 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2782 2784
2783 2785 # #Calculate AOA (Error N 3, 4)
2784 2786 # #JONES ET AL. 1998
2785 2787 # error = arrayParameters[:,-1]
2786 2788 # AOAthresh = numpy.pi/8
2787 2789 # phases = -arrayParameters[:,9:13]
2788 2790 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2789 2791 #
2790 2792 # #Calculate Heights (Error N 13 and 14)
2791 2793 # error = arrayParameters[:,-1]
2792 2794 # Ranges = arrayParameters[:,2]
2793 2795 # zenith = arrayParameters[:,5]
2794 2796 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2795 2797 # error = arrayParameters[:,-1]
2796 2798 #********************* END OF PARAMETERS CALCULATION **************************
2797 2799
2798 2800 #***************************+ PASS DATA TO NEXT STEP **********************
2799 2801 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2800 2802 dataOut.data_param = arrayParameters
2801 2803
2802 2804 if arrayParameters is None:
2803 2805 dataOut.flagNoData = True
2804 2806 else:
2805 2807 dataOut.flagNoData = True
2806 2808
2807 2809 return
2808 2810
2809 2811 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2810 2812
2811 2813 minIndex = min(newheis[0])
2812 2814 maxIndex = max(newheis[0])
2813 2815
2814 2816 voltage = voltage0[:,:,minIndex:maxIndex+1]
2815 2817 nLength = voltage.shape[1]/n
2816 2818 nMin = 0
2817 2819 nMax = 0
2818 2820 phaseOffset = numpy.zeros((len(pairslist),n))
2819 2821
2820 2822 for i in range(n):
2821 2823 nMax += nLength
2822 2824 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2823 2825 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2824 2826 phaseOffset[:,i] = phaseCCF.transpose()
2825 2827 nMin = nMax
2826 2828 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2827 2829
2828 2830 #Remove Outliers
2829 2831 factor = 2
2830 2832 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2831 2833 dw = numpy.std(wt,axis = 1)
2832 2834 dw = dw.reshape((dw.size,1))
2833 2835 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2834 2836 phaseOffset[ind] = numpy.nan
2835 2837 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2836 2838
2837 2839 return phaseOffset
2838 2840
2839 2841 def __shiftPhase(self, data, phaseShift):
2840 2842 #this will shift the phase of a complex number
2841 2843 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2842 2844 return dataShifted
2843 2845
2844 2846 def __estimatePhaseDifference(self, array, pairslist):
2845 2847 nChannel = array.shape[0]
2846 2848 nHeights = array.shape[2]
2847 2849 numPairs = len(pairslist)
2848 2850 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2849 2851 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2850 2852
2851 2853 #Correct phases
2852 2854 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2853 2855 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2854 2856
2855 2857 if indDer[0].shape[0] > 0:
2856 2858 for i in range(indDer[0].shape[0]):
2857 2859 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2858 2860 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2859 2861
2860 2862 # for j in range(numSides):
2861 2863 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2862 2864 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2863 2865 #
2864 2866 #Linear
2865 2867 phaseInt = numpy.zeros((numPairs,1))
2866 2868 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2867 2869 for j in range(numPairs):
2868 2870 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2869 2871 phaseInt[j] = fit[1]
2870 2872 #Phase Differences
2871 2873 phaseDiff = phaseInt - phaseCCF[:,2,:]
2872 2874 phaseArrival = phaseInt.reshape(phaseInt.size)
2873 2875
2874 2876 #Dealias
2875 2877 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2876 2878 # indAlias = numpy.where(phaseArrival > numpy.pi)
2877 2879 # phaseArrival[indAlias] -= 2*numpy.pi
2878 2880 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2879 2881 # phaseArrival[indAlias] += 2*numpy.pi
2880 2882
2881 2883 return phaseDiff, phaseArrival
2882 2884
2883 2885 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2884 2886 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2885 2887 #find the phase shifts of each channel over 1 second intervals
2886 2888 #only look at ranges below the beacon signal
2887 2889 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2888 2890 numBlocks = int(volts.shape[1]/numProfPerBlock)
2889 2891 numHeights = volts.shape[2]
2890 2892 nChannel = volts.shape[0]
2891 2893 voltsCohDet = volts.copy()
2892 2894
2893 2895 pairsarray = numpy.array(pairslist)
2894 2896 indSides = pairsarray[:,1]
2895 2897 # indSides = numpy.array(range(nChannel))
2896 2898 # indSides = numpy.delete(indSides, indCenter)
2897 2899 #
2898 2900 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2899 2901 listBlocks = numpy.array_split(volts, numBlocks, 1)
2900 2902
2901 2903 startInd = 0
2902 2904 endInd = 0
2903 2905
2904 2906 for i in range(numBlocks):
2905 2907 startInd = endInd
2906 2908 endInd = endInd + listBlocks[i].shape[1]
2907 2909
2908 2910 arrayBlock = listBlocks[i]
2909 2911 # arrayBlockCenter = listCenter[i]
2910 2912
2911 2913 #Estimate the Phase Difference
2912 2914 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2913 2915 #Phase Difference RMS
2914 2916 arrayPhaseRMS = numpy.abs(phaseDiff)
2915 2917 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2916 2918 indPhase = numpy.where(phaseRMSaux==4)
2917 2919 #Shifting
2918 2920 if indPhase[0].shape[0] > 0:
2919 2921 for j in range(indSides.size):
2920 2922 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2921 2923 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2922 2924
2923 2925 return voltsCohDet
2924 2926
2925 2927 def __calculateCCF(self, volts, pairslist ,laglist):
2926 2928
2927 2929 nHeights = volts.shape[2]
2928 2930 nPoints = volts.shape[1]
2929 2931 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2930 2932
2931 2933 for i in range(len(pairslist)):
2932 2934 volts1 = volts[pairslist[i][0]]
2933 2935 volts2 = volts[pairslist[i][1]]
2934 2936
2935 2937 for t in range(len(laglist)):
2936 2938 idxT = laglist[t]
2937 2939 if idxT >= 0:
2938 2940 vStacked = numpy.vstack((volts2[idxT:,:],
2939 2941 numpy.zeros((idxT, nHeights),dtype='complex')))
2940 2942 else:
2941 2943 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2942 2944 volts2[:(nPoints + idxT),:]))
2943 2945 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2944 2946
2945 2947 vStacked = None
2946 2948 return voltsCCF
2947 2949
2948 2950 def __getNoise(self, power, timeSegment, timeInterval):
2949 2951 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2950 2952 numBlocks = int(power.shape[0]/numProfPerBlock)
2951 2953 numHeights = power.shape[1]
2952 2954
2953 2955 listPower = numpy.array_split(power, numBlocks, 0)
2954 2956 noise = numpy.zeros((power.shape[0], power.shape[1]))
2955 2957 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2956 2958
2957 2959 startInd = 0
2958 2960 endInd = 0
2959 2961
2960 2962 for i in range(numBlocks): #split por canal
2961 2963 startInd = endInd
2962 2964 endInd = endInd + listPower[i].shape[0]
2963 2965
2964 2966 arrayBlock = listPower[i]
2965 2967 noiseAux = numpy.mean(arrayBlock, 0)
2966 2968 # noiseAux = numpy.median(noiseAux)
2967 2969 # noiseAux = numpy.mean(arrayBlock)
2968 2970 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2969 2971
2970 2972 noiseAux1 = numpy.mean(arrayBlock)
2971 2973 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2972 2974
2973 2975 return noise, noise1
2974 2976
2975 2977 def __findMeteors(self, power, thresh):
2976 2978 nProf = power.shape[0]
2977 2979 nHeights = power.shape[1]
2978 2980 listMeteors = []
2979 2981
2980 2982 for i in range(nHeights):
2981 2983 powerAux = power[:,i]
2982 2984 threshAux = thresh[:,i]
2983 2985
2984 2986 indUPthresh = numpy.where(powerAux > threshAux)[0]
2985 2987 indDNthresh = numpy.where(powerAux <= threshAux)[0]
2986 2988
2987 2989 j = 0
2988 2990
2989 2991 while (j < indUPthresh.size - 2):
2990 2992 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
2991 2993 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
2992 2994 indDNthresh = indDNthresh[indDNAux]
2993 2995
2994 2996 if (indDNthresh.size > 0):
2995 2997 indEnd = indDNthresh[0] - 1
2996 2998 indInit = indUPthresh[j]
2997 2999
2998 3000 meteor = powerAux[indInit:indEnd + 1]
2999 3001 indPeak = meteor.argmax() + indInit
3000 3002 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
3001 3003
3002 3004 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
3003 3005 j = numpy.where(indUPthresh == indEnd)[0] + 1
3004 3006 else: j+=1
3005 3007 else: j+=1
3006 3008
3007 3009 return listMeteors
3008 3010
3009 3011 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
3010 3012
3011 3013 arrayMeteors = numpy.asarray(listMeteors)
3012 3014 listMeteors1 = []
3013 3015
3014 3016 while arrayMeteors.shape[0] > 0:
3015 3017 FLAs = arrayMeteors[:,4]
3016 3018 maxFLA = FLAs.argmax()
3017 3019 listMeteors1.append(arrayMeteors[maxFLA,:])
3018 3020
3019 3021 MeteorInitTime = arrayMeteors[maxFLA,1]
3020 3022 MeteorEndTime = arrayMeteors[maxFLA,3]
3021 3023 MeteorHeight = arrayMeteors[maxFLA,0]
3022 3024
3023 3025 #Check neighborhood
3024 3026 maxHeightIndex = MeteorHeight + rangeLimit
3025 3027 minHeightIndex = MeteorHeight - rangeLimit
3026 3028 minTimeIndex = MeteorInitTime - timeLimit
3027 3029 maxTimeIndex = MeteorEndTime + timeLimit
3028 3030
3029 3031 #Check Heights
3030 3032 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3031 3033 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3032 3034 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3033 3035
3034 3036 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3035 3037
3036 3038 return listMeteors1
3037 3039
3038 3040 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3039 3041 numHeights = volts.shape[2]
3040 3042 nChannel = volts.shape[0]
3041 3043
3042 3044 thresholdPhase = thresh[0]
3043 3045 thresholdNoise = thresh[1]
3044 3046 thresholdDB = float(thresh[2])
3045 3047
3046 3048 thresholdDB1 = 10**(thresholdDB/10)
3047 3049 pairsarray = numpy.array(pairslist)
3048 3050 indSides = pairsarray[:,1]
3049 3051
3050 3052 pairslist1 = list(pairslist)
3051 3053 pairslist1.append((0,1))
3052 3054 pairslist1.append((3,4))
3053 3055
3054 3056 listMeteors1 = []
3055 3057 listPowerSeries = []
3056 3058 listVoltageSeries = []
3057 3059 #volts has the war data
3058 3060
3059 3061 if frequency == 30e6:
3060 3062 timeLag = 45*10**-3
3061 3063 else:
3062 3064 timeLag = 15*10**-3
3063 3065 lag = numpy.ceil(timeLag/timeInterval)
3064 3066
3065 3067 for i in range(len(listMeteors)):
3066 3068
3067 3069 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3068 3070 meteorAux = numpy.zeros(16)
3069 3071
3070 3072 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3071 3073 mHeight = listMeteors[i][0]
3072 3074 mStart = listMeteors[i][1]
3073 3075 mPeak = listMeteors[i][2]
3074 3076 mEnd = listMeteors[i][3]
3075 3077
3076 3078 #get the volt data between the start and end times of the meteor
3077 3079 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3078 3080 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3079 3081
3080 3082 #3.6. Phase Difference estimation
3081 3083 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3082 3084
3083 3085 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3084 3086 #meteorVolts0.- all Channels, all Profiles
3085 3087 meteorVolts0 = volts[:,:,mHeight]
3086 3088 meteorThresh = noise[:,mHeight]*thresholdNoise
3087 3089 meteorNoise = noise[:,mHeight]
3088 3090 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3089 3091 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3090 3092
3091 3093 #Times reestimation
3092 3094 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3093 3095 if mStart1.size > 0:
3094 3096 mStart1 = mStart1[-1] + 1
3095 3097
3096 3098 else:
3097 3099 mStart1 = mPeak
3098 3100
3099 3101 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3100 3102 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3101 3103 if mEndDecayTime1.size == 0:
3102 3104 mEndDecayTime1 = powerNet0.size
3103 3105 else:
3104 3106 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3105 3107 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3106 3108
3107 3109 #meteorVolts1.- all Channels, from start to end
3108 3110 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3109 3111 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3110 3112 if meteorVolts2.shape[1] == 0:
3111 3113 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3112 3114 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3113 3115 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3114 3116 ##################### END PARAMETERS REESTIMATION #########################
3115 3117
3116 3118 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3117 3119 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3118 3120 if meteorVolts2.shape[1] > 0:
3119 3121 #Phase Difference re-estimation
3120 3122 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3121 3123 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3122 3124 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3123 3125 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3124 3126 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3125 3127
3126 3128 #Phase Difference RMS
3127 3129 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3128 3130 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3129 3131 #Data from Meteor
3130 3132 mPeak1 = powerNet1.argmax() + mStart1
3131 3133 mPeakPower1 = powerNet1.max()
3132 3134 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3133 3135 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3134 3136 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3135 3137 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3136 3138 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3137 3139 #Vectorize
3138 3140 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3139 3141 meteorAux[7:11] = phaseDiffint[0:4]
3140 3142
3141 3143 #Rejection Criterions
3142 3144 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3143 3145 meteorAux[-1] = 17
3144 3146 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3145 3147 meteorAux[-1] = 1
3146 3148
3147 3149
3148 3150 else:
3149 3151 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3150 3152 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3151 3153 PowerSeries = 0
3152 3154
3153 3155 listMeteors1.append(meteorAux)
3154 3156 listPowerSeries.append(PowerSeries)
3155 3157 listVoltageSeries.append(meteorVolts1)
3156 3158
3157 3159 return listMeteors1, listPowerSeries, listVoltageSeries
3158 3160
3159 3161 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3160 3162
3161 3163 threshError = 10
3162 3164 #Depending if it is 30 or 50 MHz
3163 3165 if frequency == 30e6:
3164 3166 timeLag = 45*10**-3
3165 3167 else:
3166 3168 timeLag = 15*10**-3
3167 3169 lag = numpy.ceil(timeLag/timeInterval)
3168 3170
3169 3171 listMeteors1 = []
3170 3172
3171 3173 for i in range(len(listMeteors)):
3172 3174 meteorPower = listPower[i]
3173 3175 meteorAux = listMeteors[i]
3174 3176
3175 3177 if meteorAux[-1] == 0:
3176 3178
3177 3179 try:
3178 3180 indmax = meteorPower.argmax()
3179 3181 indlag = indmax + lag
3180 3182
3181 3183 y = meteorPower[indlag:]
3182 3184 x = numpy.arange(0, y.size)*timeLag
3183 3185
3184 3186 #first guess
3185 3187 a = y[0]
3186 3188 tau = timeLag
3187 3189 #exponential fit
3188 3190 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3189 3191 y1 = self.__exponential_function(x, *popt)
3190 3192 #error estimation
3191 3193 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3192 3194
3193 3195 decayTime = popt[1]
3194 3196 riseTime = indmax*timeInterval
3195 3197 meteorAux[11:13] = [decayTime, error]
3196 3198
3197 3199 #Table items 7, 8 and 11
3198 3200 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3199 3201 meteorAux[-1] = 7
3200 3202 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3201 3203 meteorAux[-1] = 8
3202 3204 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3203 3205 meteorAux[-1] = 11
3204 3206
3205 3207
3206 3208 except:
3207 3209 meteorAux[-1] = 11
3208 3210
3209 3211
3210 3212 listMeteors1.append(meteorAux)
3211 3213
3212 3214 return listMeteors1
3213 3215
3214 3216 #Exponential Function
3215 3217
3216 3218 def __exponential_function(self, x, a, tau):
3217 3219 y = a*numpy.exp(-x/tau)
3218 3220 return y
3219 3221
3220 3222 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3221 3223
3222 3224 pairslist1 = list(pairslist)
3223 3225 pairslist1.append((0,1))
3224 3226 pairslist1.append((3,4))
3225 3227 numPairs = len(pairslist1)
3226 3228 #Time Lag
3227 3229 timeLag = 45*10**-3
3228 3230 c = 3e8
3229 3231 lag = numpy.ceil(timeLag/timeInterval)
3230 3232 freq = 30e6
3231 3233
3232 3234 listMeteors1 = []
3233 3235
3234 3236 for i in range(len(listMeteors)):
3235 3237 meteorAux = listMeteors[i]
3236 3238 if meteorAux[-1] == 0:
3237 3239 mStart = listMeteors[i][1]
3238 3240 mPeak = listMeteors[i][2]
3239 3241 mLag = mPeak - mStart + lag
3240 3242
3241 3243 #get the volt data between the start and end times of the meteor
3242 3244 meteorVolts = listVolts[i]
3243 3245 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3244 3246
3245 3247 #Get CCF
3246 3248 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3247 3249
3248 3250 #Method 2
3249 3251 slopes = numpy.zeros(numPairs)
3250 3252 time = numpy.array([-2,-1,1,2])*timeInterval
3251 3253 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3252 3254
3253 3255 #Correct phases
3254 3256 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3255 3257 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3256 3258
3257 3259 if indDer[0].shape[0] > 0:
3258 3260 for i in range(indDer[0].shape[0]):
3259 3261 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3260 3262 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3261 3263
3262 3264 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3263 3265 for j in range(numPairs):
3264 3266 fit = stats.linregress(time, angAllCCF[j,:])
3265 3267 slopes[j] = fit[0]
3266 3268
3267 3269 #Remove Outlier
3268 3270 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3269 3271 # slopes = numpy.delete(slopes,indOut)
3270 3272 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3271 3273 # slopes = numpy.delete(slopes,indOut)
3272 3274
3273 3275 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3274 3276 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3275 3277 meteorAux[-2] = radialError
3276 3278 meteorAux[-3] = radialVelocity
3277 3279
3278 3280 #Setting Error
3279 3281 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3280 3282 if numpy.abs(radialVelocity) > 200:
3281 3283 meteorAux[-1] = 15
3282 3284 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3283 3285 elif radialError > radialStdThresh:
3284 3286 meteorAux[-1] = 12
3285 3287
3286 3288 listMeteors1.append(meteorAux)
3287 3289 return listMeteors1
3288 3290
3289 3291 def __setNewArrays(self, listMeteors, date, heiRang):
3290 3292
3291 3293 #New arrays
3292 3294 arrayMeteors = numpy.array(listMeteors)
3293 3295 arrayParameters = numpy.zeros((len(listMeteors), 13))
3294 3296
3295 3297 #Date inclusion
3296 3298 # date = re.findall(r'\((.*?)\)', date)
3297 3299 # date = date[0].split(',')
3298 3300 # date = map(int, date)
3299 3301 #
3300 3302 # if len(date)<6:
3301 3303 # date.append(0)
3302 3304 #
3303 3305 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3304 3306 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3305 3307 arrayDate = numpy.tile(date, (len(listMeteors)))
3306 3308
3307 3309 #Meteor array
3308 3310 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3309 3311 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3310 3312
3311 3313 #Parameters Array
3312 3314 arrayParameters[:,0] = arrayDate #Date
3313 3315 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3314 3316 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3315 3317 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3316 3318 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3317 3319
3318 3320
3319 3321 return arrayParameters
3320 3322
3321 3323 class CorrectSMPhases(Operation):
3322 3324
3323 3325 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3324 3326
3325 3327 arrayParameters = dataOut.data_param
3326 3328 pairsList = []
3327 3329 pairx = (0,1)
3328 3330 pairy = (2,3)
3329 3331 pairsList.append(pairx)
3330 3332 pairsList.append(pairy)
3331 3333 jph = numpy.zeros(4)
3332 3334
3333 3335 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3334 3336 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3335 3337 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3336 3338
3337 3339 meteorOps = SMOperations()
3338 3340 if channelPositions is None:
3339 3341 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3340 3342 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3341 3343
3342 3344 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3343 3345 h = (hmin,hmax)
3344 3346
3345 3347 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3346 3348
3347 3349 dataOut.data_param = arrayParameters
3348 3350 return
3349 3351
3350 3352 class SMPhaseCalibration(Operation):
3351 3353
3352 3354 __buffer = None
3353 3355
3354 3356 __initime = None
3355 3357
3356 3358 __dataReady = False
3357 3359
3358 3360 __isConfig = False
3359 3361
3360 3362 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3361 3363
3362 3364 dataTime = currentTime + paramInterval
3363 3365 deltaTime = dataTime - initTime
3364 3366
3365 3367 if deltaTime >= outputInterval or deltaTime < 0:
3366 3368 return True
3367 3369
3368 3370 return False
3369 3371
3370 3372 def __getGammas(self, pairs, d, phases):
3371 3373 gammas = numpy.zeros(2)
3372 3374
3373 3375 for i in range(len(pairs)):
3374 3376
3375 3377 pairi = pairs[i]
3376 3378
3377 3379 phip3 = phases[:,pairi[0]]
3378 3380 d3 = d[pairi[0]]
3379 3381 phip2 = phases[:,pairi[1]]
3380 3382 d2 = d[pairi[1]]
3381 3383 #Calculating gamma
3382 3384 # jdcos = alp1/(k*d1)
3383 3385 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3384 3386 jgamma = -phip2*d3/d2 - phip3
3385 3387 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3386 3388 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3387 3389 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3388 3390
3389 3391 #Revised distribution
3390 3392 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3391 3393
3392 3394 #Histogram
3393 3395 nBins = 64
3394 3396 rmin = -0.5*numpy.pi
3395 3397 rmax = 0.5*numpy.pi
3396 3398 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3397 3399
3398 3400 meteorsY = phaseHisto[0]
3399 3401 phasesX = phaseHisto[1][:-1]
3400 3402 width = phasesX[1] - phasesX[0]
3401 3403 phasesX += width/2
3402 3404
3403 3405 #Gaussian aproximation
3404 3406 bpeak = meteorsY.argmax()
3405 3407 peak = meteorsY.max()
3406 3408 jmin = bpeak - 5
3407 3409 jmax = bpeak + 5 + 1
3408 3410
3409 3411 if jmin<0:
3410 3412 jmin = 0
3411 3413 jmax = 6
3412 3414 elif jmax > meteorsY.size:
3413 3415 jmin = meteorsY.size - 6
3414 3416 jmax = meteorsY.size
3415 3417
3416 3418 x0 = numpy.array([peak,bpeak,50])
3417 3419 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3418 3420
3419 3421 #Gammas
3420 3422 gammas[i] = coeff[0][1]
3421 3423
3422 3424 return gammas
3423 3425
3424 3426 def __residualFunction(self, coeffs, y, t):
3425 3427
3426 3428 return y - self.__gauss_function(t, coeffs)
3427 3429
3428 3430 def __gauss_function(self, t, coeffs):
3429 3431
3430 3432 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3431 3433
3432 3434 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3433 3435 meteorOps = SMOperations()
3434 3436 nchan = 4
3435 3437 pairx = pairsList[0] #x es 0
3436 3438 pairy = pairsList[1] #y es 1
3437 3439 center_xangle = 0
3438 3440 center_yangle = 0
3439 3441 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3440 3442 ntimes = len(range_angle)
3441 3443
3442 3444 nstepsx = 20
3443 3445 nstepsy = 20
3444 3446
3445 3447 for iz in range(ntimes):
3446 3448 min_xangle = -range_angle[iz]/2 + center_xangle
3447 3449 max_xangle = range_angle[iz]/2 + center_xangle
3448 3450 min_yangle = -range_angle[iz]/2 + center_yangle
3449 3451 max_yangle = range_angle[iz]/2 + center_yangle
3450 3452
3451 3453 inc_x = (max_xangle-min_xangle)/nstepsx
3452 3454 inc_y = (max_yangle-min_yangle)/nstepsy
3453 3455
3454 3456 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3455 3457 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3456 3458 penalty = numpy.zeros((nstepsx,nstepsy))
3457 3459 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3458 3460 jph = numpy.zeros(nchan)
3459 3461
3460 3462 # Iterations looking for the offset
3461 3463 for iy in range(int(nstepsy)):
3462 3464 for ix in range(int(nstepsx)):
3463 3465 d3 = d[pairsList[1][0]]
3464 3466 d2 = d[pairsList[1][1]]
3465 3467 d5 = d[pairsList[0][0]]
3466 3468 d4 = d[pairsList[0][1]]
3467 3469
3468 3470 alp2 = alpha_y[iy] #gamma 1
3469 3471 alp4 = alpha_x[ix] #gamma 0
3470 3472
3471 3473 alp3 = -alp2*d3/d2 - gammas[1]
3472 3474 alp5 = -alp4*d5/d4 - gammas[0]
3473 3475 # jph[pairy[1]] = alpha_y[iy]
3474 3476 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3475 3477
3476 3478 # jph[pairx[1]] = alpha_x[ix]
3477 3479 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3478 3480 jph[pairsList[0][1]] = alp4
3479 3481 jph[pairsList[0][0]] = alp5
3480 3482 jph[pairsList[1][0]] = alp3
3481 3483 jph[pairsList[1][1]] = alp2
3482 3484 jph_array[:,ix,iy] = jph
3483 3485 # d = [2.0,2.5,2.5,2.0]
3484 3486 #falta chequear si va a leer bien los meteoros
3485 3487 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3486 3488 error = meteorsArray1[:,-1]
3487 3489 ind1 = numpy.where(error==0)[0]
3488 3490 penalty[ix,iy] = ind1.size
3489 3491
3490 3492 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3491 3493 phOffset = jph_array[:,i,j]
3492 3494
3493 3495 center_xangle = phOffset[pairx[1]]
3494 3496 center_yangle = phOffset[pairy[1]]
3495 3497
3496 3498 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3497 3499 phOffset = phOffset*180/numpy.pi
3498 3500 return phOffset
3499 3501
3500 3502
3501 3503 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3502 3504
3503 3505 dataOut.flagNoData = True
3504 3506 self.__dataReady = False
3505 3507 dataOut.outputInterval = nHours*3600
3506 3508
3507 3509 if self.__isConfig == False:
3508 3510 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3509 3511 #Get Initial LTC time
3510 3512 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3511 3513 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3512 3514
3513 3515 self.__isConfig = True
3514 3516
3515 3517 if self.__buffer is None:
3516 3518 self.__buffer = dataOut.data_param.copy()
3517 3519
3518 3520 else:
3519 3521 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3520 3522
3521 3523 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3522 3524
3523 3525 if self.__dataReady:
3524 3526 dataOut.utctimeInit = self.__initime
3525 3527 self.__initime += dataOut.outputInterval #to erase time offset
3526 3528
3527 3529 freq = dataOut.frequency
3528 3530 c = dataOut.C #m/s
3529 3531 lamb = c/freq
3530 3532 k = 2*numpy.pi/lamb
3531 3533 azimuth = 0
3532 3534 h = (hmin, hmax)
3533 3535 # pairs = ((0,1),(2,3)) #Estrella
3534 3536 # pairs = ((1,0),(2,3)) #T
3535 3537
3536 3538 if channelPositions is None:
3537 3539 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3538 3540 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3539 3541 meteorOps = SMOperations()
3540 3542 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3541 3543
3542 3544 #Checking correct order of pairs
3543 3545 pairs = []
3544 3546 if distances[1] > distances[0]:
3545 3547 pairs.append((1,0))
3546 3548 else:
3547 3549 pairs.append((0,1))
3548 3550
3549 3551 if distances[3] > distances[2]:
3550 3552 pairs.append((3,2))
3551 3553 else:
3552 3554 pairs.append((2,3))
3553 3555 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3554 3556
3555 3557 meteorsArray = self.__buffer
3556 3558 error = meteorsArray[:,-1]
3557 3559 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3558 3560 ind1 = numpy.where(boolError)[0]
3559 3561 meteorsArray = meteorsArray[ind1,:]
3560 3562 meteorsArray[:,-1] = 0
3561 3563 phases = meteorsArray[:,8:12]
3562 3564
3563 3565 #Calculate Gammas
3564 3566 gammas = self.__getGammas(pairs, distances, phases)
3565 3567 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3566 3568 #Calculate Phases
3567 3569 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3568 3570 phasesOff = phasesOff.reshape((1,phasesOff.size))
3569 3571 dataOut.data_output = -phasesOff
3570 3572 dataOut.flagNoData = False
3571 3573 self.__buffer = None
3572 3574
3573 3575
3574 3576 return
3575 3577
3576 3578 class SMOperations():
3577 3579
3578 3580 def __init__(self):
3579 3581
3580 3582 return
3581 3583
3582 3584 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3583 3585
3584 3586 arrayParameters = arrayParameters0.copy()
3585 3587 hmin = h[0]
3586 3588 hmax = h[1]
3587 3589
3588 3590 #Calculate AOA (Error N 3, 4)
3589 3591 #JONES ET AL. 1998
3590 3592 AOAthresh = numpy.pi/8
3591 3593 error = arrayParameters[:,-1]
3592 3594 phases = -arrayParameters[:,8:12] + jph
3593 3595 # phases = numpy.unwrap(phases)
3594 3596 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3595 3597
3596 3598 #Calculate Heights (Error N 13 and 14)
3597 3599 error = arrayParameters[:,-1]
3598 3600 Ranges = arrayParameters[:,1]
3599 3601 zenith = arrayParameters[:,4]
3600 3602 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3601 3603
3602 3604 #----------------------- Get Final data ------------------------------------
3603 3605 # error = arrayParameters[:,-1]
3604 3606 # ind1 = numpy.where(error==0)[0]
3605 3607 # arrayParameters = arrayParameters[ind1,:]
3606 3608
3607 3609 return arrayParameters
3608 3610
3609 3611 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3610 3612
3611 3613 arrayAOA = numpy.zeros((phases.shape[0],3))
3612 3614 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3613 3615
3614 3616 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3615 3617 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3616 3618 arrayAOA[:,2] = cosDirError
3617 3619
3618 3620 azimuthAngle = arrayAOA[:,0]
3619 3621 zenithAngle = arrayAOA[:,1]
3620 3622
3621 3623 #Setting Error
3622 3624 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3623 3625 error[indError] = 0
3624 3626 #Number 3: AOA not fesible
3625 3627 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3626 3628 error[indInvalid] = 3
3627 3629 #Number 4: Large difference in AOAs obtained from different antenna baselines
3628 3630 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3629 3631 error[indInvalid] = 4
3630 3632 return arrayAOA, error
3631 3633
3632 3634 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3633 3635
3634 3636 #Initializing some variables
3635 3637 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3636 3638 ang_aux = ang_aux.reshape(1,ang_aux.size)
3637 3639
3638 3640 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3639 3641 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3640 3642
3641 3643
3642 3644 for i in range(2):
3643 3645 ph0 = arrayPhase[:,pairsList[i][0]]
3644 3646 ph1 = arrayPhase[:,pairsList[i][1]]
3645 3647 d0 = distances[pairsList[i][0]]
3646 3648 d1 = distances[pairsList[i][1]]
3647 3649
3648 3650 ph0_aux = ph0 + ph1
3649 3651 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3650 3652 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3651 3653 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3652 3654 #First Estimation
3653 3655 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3654 3656
3655 3657 #Most-Accurate Second Estimation
3656 3658 phi1_aux = ph0 - ph1
3657 3659 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3658 3660 #Direction Cosine 1
3659 3661 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3660 3662
3661 3663 #Searching the correct Direction Cosine
3662 3664 cosdir0_aux = cosdir0[:,i]
3663 3665 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3664 3666 #Minimum Distance
3665 3667 cosDiff = (cosdir1 - cosdir0_aux)**2
3666 3668 indcos = cosDiff.argmin(axis = 1)
3667 3669 #Saving Value obtained
3668 3670 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3669 3671
3670 3672 return cosdir0, cosdir
3671 3673
3672 3674 def __calculateAOA(self, cosdir, azimuth):
3673 3675 cosdirX = cosdir[:,0]
3674 3676 cosdirY = cosdir[:,1]
3675 3677
3676 3678 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3677 3679 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3678 3680 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3679 3681
3680 3682 return angles
3681 3683
3682 3684 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3683 3685
3684 3686 Ramb = 375 #Ramb = c/(2*PRF)
3685 3687 Re = 6371 #Earth Radius
3686 3688 heights = numpy.zeros(Ranges.shape)
3687 3689
3688 3690 R_aux = numpy.array([0,1,2])*Ramb
3689 3691 R_aux = R_aux.reshape(1,R_aux.size)
3690 3692
3691 3693 Ranges = Ranges.reshape(Ranges.size,1)
3692 3694
3693 3695 Ri = Ranges + R_aux
3694 3696 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3695 3697
3696 3698 #Check if there is a height between 70 and 110 km
3697 3699 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3698 3700 ind_h = numpy.where(h_bool == 1)[0]
3699 3701
3700 3702 hCorr = hi[ind_h, :]
3701 3703 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3702 3704
3703 3705 hCorr = hi[ind_hCorr][:len(ind_h)]
3704 3706 heights[ind_h] = hCorr
3705 3707
3706 3708 #Setting Error
3707 3709 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3708 3710 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3709 3711 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3710 3712 error[indError] = 0
3711 3713 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3712 3714 error[indInvalid2] = 14
3713 3715 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3714 3716 error[indInvalid1] = 13
3715 3717
3716 3718 return heights, error
3717 3719
3718 3720 def getPhasePairs(self, channelPositions):
3719 3721 chanPos = numpy.array(channelPositions)
3720 3722 listOper = list(itertools.combinations(list(range(5)),2))
3721 3723
3722 3724 distances = numpy.zeros(4)
3723 3725 axisX = []
3724 3726 axisY = []
3725 3727 distX = numpy.zeros(3)
3726 3728 distY = numpy.zeros(3)
3727 3729 ix = 0
3728 3730 iy = 0
3729 3731
3730 3732 pairX = numpy.zeros((2,2))
3731 3733 pairY = numpy.zeros((2,2))
3732 3734
3733 3735 for i in range(len(listOper)):
3734 3736 pairi = listOper[i]
3735 3737
3736 3738 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3737 3739
3738 3740 if posDif[0] == 0:
3739 3741 axisY.append(pairi)
3740 3742 distY[iy] = posDif[1]
3741 3743 iy += 1
3742 3744 elif posDif[1] == 0:
3743 3745 axisX.append(pairi)
3744 3746 distX[ix] = posDif[0]
3745 3747 ix += 1
3746 3748
3747 3749 for i in range(2):
3748 3750 if i==0:
3749 3751 dist0 = distX
3750 3752 axis0 = axisX
3751 3753 else:
3752 3754 dist0 = distY
3753 3755 axis0 = axisY
3754 3756
3755 3757 side = numpy.argsort(dist0)[:-1]
3756 3758 axis0 = numpy.array(axis0)[side,:]
3757 3759 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3758 3760 axis1 = numpy.unique(numpy.reshape(axis0,4))
3759 3761 side = axis1[axis1 != chanC]
3760 3762 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3761 3763 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3762 3764 if diff1<0:
3763 3765 chan2 = side[0]
3764 3766 d2 = numpy.abs(diff1)
3765 3767 chan1 = side[1]
3766 3768 d1 = numpy.abs(diff2)
3767 3769 else:
3768 3770 chan2 = side[1]
3769 3771 d2 = numpy.abs(diff2)
3770 3772 chan1 = side[0]
3771 3773 d1 = numpy.abs(diff1)
3772 3774
3773 3775 if i==0:
3774 3776 chanCX = chanC
3775 3777 chan1X = chan1
3776 3778 chan2X = chan2
3777 3779 distances[0:2] = numpy.array([d1,d2])
3778 3780 else:
3779 3781 chanCY = chanC
3780 3782 chan1Y = chan1
3781 3783 chan2Y = chan2
3782 3784 distances[2:4] = numpy.array([d1,d2])
3783 3785 # axisXsides = numpy.reshape(axisX[ix,:],4)
3784 3786 #
3785 3787 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3786 3788 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3787 3789 #
3788 3790 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3789 3791 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3790 3792 # channel25X = int(pairX[0,ind25X])
3791 3793 # channel20X = int(pairX[1,ind20X])
3792 3794 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3793 3795 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3794 3796 # channel25Y = int(pairY[0,ind25Y])
3795 3797 # channel20Y = int(pairY[1,ind20Y])
3796 3798
3797 3799 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3798 3800 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3799 3801
3800 3802 return pairslist, distances
3801 3803 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3802 3804 #
3803 3805 # arrayAOA = numpy.zeros((phases.shape[0],3))
3804 3806 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3805 3807 #
3806 3808 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3807 3809 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3808 3810 # arrayAOA[:,2] = cosDirError
3809 3811 #
3810 3812 # azimuthAngle = arrayAOA[:,0]
3811 3813 # zenithAngle = arrayAOA[:,1]
3812 3814 #
3813 3815 # #Setting Error
3814 3816 # #Number 3: AOA not fesible
3815 3817 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3816 3818 # error[indInvalid] = 3
3817 3819 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3818 3820 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3819 3821 # error[indInvalid] = 4
3820 3822 # return arrayAOA, error
3821 3823 #
3822 3824 # def __getDirectionCosines(self, arrayPhase, pairsList):
3823 3825 #
3824 3826 # #Initializing some variables
3825 3827 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3826 3828 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3827 3829 #
3828 3830 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3829 3831 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3830 3832 #
3831 3833 #
3832 3834 # for i in range(2):
3833 3835 # #First Estimation
3834 3836 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3835 3837 # #Dealias
3836 3838 # indcsi = numpy.where(phi0_aux > numpy.pi)
3837 3839 # phi0_aux[indcsi] -= 2*numpy.pi
3838 3840 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3839 3841 # phi0_aux[indcsi] += 2*numpy.pi
3840 3842 # #Direction Cosine 0
3841 3843 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3842 3844 #
3843 3845 # #Most-Accurate Second Estimation
3844 3846 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3845 3847 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3846 3848 # #Direction Cosine 1
3847 3849 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3848 3850 #
3849 3851 # #Searching the correct Direction Cosine
3850 3852 # cosdir0_aux = cosdir0[:,i]
3851 3853 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3852 3854 # #Minimum Distance
3853 3855 # cosDiff = (cosdir1 - cosdir0_aux)**2
3854 3856 # indcos = cosDiff.argmin(axis = 1)
3855 3857 # #Saving Value obtained
3856 3858 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3857 3859 #
3858 3860 # return cosdir0, cosdir
3859 3861 #
3860 3862 # def __calculateAOA(self, cosdir, azimuth):
3861 3863 # cosdirX = cosdir[:,0]
3862 3864 # cosdirY = cosdir[:,1]
3863 3865 #
3864 3866 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3865 3867 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3866 3868 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3867 3869 #
3868 3870 # return angles
3869 3871 #
3870 3872 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3871 3873 #
3872 3874 # Ramb = 375 #Ramb = c/(2*PRF)
3873 3875 # Re = 6371 #Earth Radius
3874 3876 # heights = numpy.zeros(Ranges.shape)
3875 3877 #
3876 3878 # R_aux = numpy.array([0,1,2])*Ramb
3877 3879 # R_aux = R_aux.reshape(1,R_aux.size)
3878 3880 #
3879 3881 # Ranges = Ranges.reshape(Ranges.size,1)
3880 3882 #
3881 3883 # Ri = Ranges + R_aux
3882 3884 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3883 3885 #
3884 3886 # #Check if there is a height between 70 and 110 km
3885 3887 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3886 3888 # ind_h = numpy.where(h_bool == 1)[0]
3887 3889 #
3888 3890 # hCorr = hi[ind_h, :]
3889 3891 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3890 3892 #
3891 3893 # hCorr = hi[ind_hCorr]
3892 3894 # heights[ind_h] = hCorr
3893 3895 #
3894 3896 # #Setting Error
3895 3897 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3896 3898 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3897 3899 #
3898 3900 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3899 3901 # error[indInvalid2] = 14
3900 3902 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3901 3903 # error[indInvalid1] = 13
3902 3904 #
3903 3905 # return heights, error
3904 3906
3905 3907
3906 3908 class WeatherRadar(Operation):
3907 3909 '''
3908 3910 Function tat implements Weather Radar operations-
3909 3911 Input:
3910 3912 Output:
3911 3913 Parameters affected:
3912 3914 '''
3913 3915 isConfig = False
3914 3916
3915 3917 def __init__(self):
3916 3918 Operation.__init__(self)
3917 3919
3918 3920 def setup(self,dataOut,Pt=0,Gt=0,Gr=0,lambda_=0, aL=0,
3919 3921 tauW= 0,thetaT=0,thetaR=0,Km =0):
3920 3922 self.nCh = dataOut.nChannels
3921 3923 self.nHeis = dataOut.nHeights
3922 3924 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
3923 3925 self.Range = numpy.arange(dataOut.nHeights)*deltaHeight + dataOut.heightList[0]
3924 3926 self.Range = self.Range.reshape(1,self.nHeis)
3925 3927 self.Range = numpy.tile(self.Range,[self.nCh,1])
3926 3928 '''-----------1 Constante del Radar----------'''
3927 3929 self.Pt = Pt
3928 3930 self.Gt = Gt
3929 3931 self.Gr = Gr
3930 3932 self.lambda_ = lambda_
3931 3933 self.aL = aL
3932 3934 self.tauW = tauW
3933 3935 self.thetaT = thetaT
3934 3936 self.thetaR = thetaR
3935 3937 self.Km = Km
3936 3938 Numerator = ((4*numpy.pi)**3 * aL**2 * 16 *numpy.log(2))
3937 3939 Denominator = (Pt * Gt * Gr * lambda_**2 * SPEED_OF_LIGHT * tauW * numpy.pi*thetaT*thetaR)
3938 3940 self.RadarConstant = Numerator/Denominator
3939 3941 '''-----------2 Reflectividad del Radar y Factor de Reflectividad------'''
3940 3942 self.n_radar = numpy.zeros((self.nCh,self.nHeis))
3941 3943 self.Z_radar = numpy.zeros((self.nCh,self.nHeis))
3942 3944
3943 3945 def setMoments(self,dataOut,i):
3944 3946
3945 3947 type = dataOut.inputUnit
3946 3948 nCh = dataOut.nChannels
3947 3949 nHeis= dataOut.nHeights
3948 3950 data_param = numpy.zeros((nCh,4,nHeis))
3949 3951 if type == "Voltage":
3950 3952 data_param[:,0,:] = dataOut.dataPP_POW/(dataOut.nCohInt**2)
3951 3953 data_param[:,1,:] = dataOut.dataPP_DOP
3952 3954 data_param[:,2,:] = dataOut.dataPP_WIDTH
3953 3955 data_param[:,3,:] = dataOut.dataPP_SNR
3954 3956 if type == "Spectra":
3955 3957 data_param[:,0,:] = dataOut.data_POW
3956 3958 data_param[:,1,:] = dataOut.data_DOP
3957 3959 data_param[:,2,:] = dataOut.data_WIDTH
3958 3960 def setMoments(self,dataOut,i):
3959 3961 data_param[:,3,:] = dataOut.data_SNR
3960 3962
3961 3963 return data_param[:,i,:]
3962 3964
3963 3965
3964 3966 def run(self,dataOut,Pt=25,Gt=200.0,Gr=50.0,lambda_=0.32, aL=2.5118,
3965 3967 tauW= 4.0e-6,thetaT=0.165,thetaR=0.367,Km =0.93):
3966 3968
3967 3969 if not self.isConfig:
3968 3970 self.setup(dataOut= dataOut,Pt=25,Gt=200.0,Gr=50.0,lambda_=0.32, aL=2.5118,
3969 3971 tauW= 4.0e-6,thetaT=0.165,thetaR=0.367,Km =0.93)
3970 3972 self.isConfig = True
3971 3973 '''-----------------------------Potencia de Radar -Signal S-----------------------------'''
3972 3974 Pr = self.setMoments(dataOut,0)
3973 3975
3974 3976 for R in range(self.nHeis):
3975 3977 self.n_radar[:,R] = self.RadarConstant*Pr[:,R]* (self.Range[:,R])**2
3976 3978
3977 3979 self.Z_radar[:,R] = self.n_radar[:,R]* self.lambda_**4/( numpy.pi**5 * self.Km**2)
3978 3980
3979 3981 '''----------- Factor de Reflectividad Equivalente lamda_ < 10 cm , lamda_= 3.2cm-------'''
3980 3982 Zeh = self.Z_radar
3981 3983 dBZeh = 10*numpy.log10(Zeh)
3982 3984 dataOut.factor_Zeh= dBZeh
3983 3985 self.n_radar = numpy.zeros((self.nCh,self.nHeis))
3984 3986 self.Z_radar = numpy.zeros((self.nCh,self.nHeis))
3985 3987
3986 3988 return dataOut
3987 3989
3988 3990 class PedestalInformation(Operation):
3989 3991 path_ped = None
3990 3992 path_adq = None
3991 3993 t_Interval_p = None
3992 3994 n_Muestras_p = None
3993 3995 isConfig = False
3994 3996 blocksPerfile= None
3995 3997 f_a_p = None
3996 3998 online = None
3997 3999 angulo_adq = None
3998 4000 nro_file = None
3999 4001 nro_key_p = None
4000 4002 tmp = None
4001 4003
4002 4004
4003 4005 def __init__(self):
4004 4006 Operation.__init__(self)
4005 4007
4008
4009 def getAnguloProfile(self,utc_adq,list_pedestal):
4010 ##print("NEW-METHOD")
4011 utc_adq = utc_adq
4012 list_pedestal = list_pedestal
4013 utc_ped_list = []
4014
4015 for i in range(len(list_pedestal)):
4016 #print(i)# OJO IDENTIFICADOR DE SINCRONISMO
4017 utc_ped_list.append(self.gettimeutcfromDirFilename(path=self.path_ped,file=list_pedestal[i]))
4018
4019 nro_file,utc_ped,utc_ped_1 =self.getNROFile(utc_adq,utc_ped_list)
4020 #print("utc_adq",utc_adq)
4021 print("utc_ped",utc_ped)
4022 print("DIFF",utc_adq-utc_ped)
4023 print("nro_file",nro_file)
4024 if nro_file < 0:
4025 #print("-----------------------------------------------------------------")
4026 #print("INSERTANDO ANGULO NAN")
4027 return numpy.NaN
4028 else:
4029 nro_key_p = int((utc_adq-utc_ped)/self.t_Interval_p)-1 # ojito al -1 estimado alex
4030 ff_pedestal = list_pedestal[nro_file]
4031 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4032 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4033 if 99>=nro_key_p>0:
4034 ##print("angulo_array :",angulo[nro_key_p])
4035 return angulo[nro_key_p]
4036 else:
4037 #print("-----------------------------------------------------------------")
4038 return numpy.NaN
4039
4040
4006 4041 def getfirstFilefromPath(self,path,meta,ext):
4007 4042 validFilelist = []
4008 4043 #print("SEARH",path)
4009 4044 try:
4010 4045 fileList = os.listdir(path)
4011 4046 except:
4012 4047 print("check path - fileList")
4013 4048 if len(fileList)<1:
4014 4049 return None
4015 4050 # meta 1234 567 8-18 BCDE
4016 4051 # H,D,PE YYYY DDD EPOC .ext
4017 4052
4018 4053 for thisFile in fileList:
4019 4054 #print("HI",thisFile)
4020 4055 if meta =="PE":
4021 4056 try:
4022 4057 number= int(thisFile[len(meta)+7:len(meta)+17])
4023 4058 except:
4024 4059 print("There is a file or folder with different format")
4025 4060 if meta == "D":
4026 4061 try:
4027 4062 number= int(thisFile[8:11])
4028 4063 except:
4029 4064 print("There is a file or folder with different format")
4030 4065
4031 4066 if not isNumber(str=number):
4032 4067 continue
4033 4068 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
4034 4069 continue
4035 4070 validFilelist.sort()
4036 4071 validFilelist.append(thisFile)
4037 4072 if len(validFilelist)>0:
4038 4073 validFilelist = sorted(validFilelist,key=str.lower)
4039 4074 return validFilelist
4040 4075 return None
4041 4076
4042 4077 def gettimeutcfromDirFilename(self,path,file):
4043 4078 dir_file= path+"/"+file
4044 4079 fp = h5py.File(dir_file,'r')
4045 4080 #epoc = fp['Metadata'].get('utctimeInit')[()]
4046 4081 epoc = fp['Data'].get('utc')[()]
4047 4082 fp.close()
4048 4083 return epoc
4049 4084
4050 4085 def gettimeutcadqfromDirFilename(self,path,file):
4051 4086 dir_file= path+"/"+file
4052 4087 fp = h5py.File(dir_file,'r')
4053 4088 epoc = fp['Metadata'].get('utctimeInit')[()]
4054 4089 #epoc = fp['Data'].get('utc')[()]
4055 4090 fp.close()
4056 4091 return epoc
4057 4092
4058 4093 def getDatavaluefromDirFilename(self,path,file,value):
4059 4094 dir_file= path+"/"+file
4060 4095 fp = h5py.File(dir_file,'r')
4061 4096 array = fp['Data'].get(value)[()]
4062 4097 fp.close()
4063 4098 return array
4064 4099
4065 4100 def getFile_KeyP(self,list_pedestal,list_adq):
4066 4101 print(list_pedestal)
4067 4102 print(list_adq)
4068 4103
4069 4104 def getNROFile(self,utc_adq,utc_ped_list):
4070 4105 c=0
4071 print("insidegetNROFile")
4072 print(utc_adq)
4073 print(len(utc_ped_list))
4106 #print("insidegetNROFile")
4107 #print(utc_adq)
4108 #print(len(utc_ped_list))
4074 4109 for i in range(len(utc_ped_list)):
4075 4110 if utc_adq>utc_ped_list[i]:
4076 4111 #print("mayor")
4077 4112 #print("utc_ped_list",utc_ped_list[i])
4078 4113 c +=1
4079 4114
4080 4115 return c-1,utc_ped_list[c-1],utc_ped_list[c]
4081 4116
4082 4117 def verificarNROFILE(self,dataOut,utc_ped,f_a_p,n_Muestras_p):
4083 4118 var =int(f_a_p/n_Muestras_p)
4084 4119 flag=0
4085 4120 for i in range(var):
4086 4121 if dataOut.utctime+i==utc_ped:
4087 4122 flag==1
4088 4123 break
4089 4124 return flag
4090 4125
4091 4126 #def setup_offline(self,dataOut,list_pedestal,list_adq):
4092 4127 def setup_offline(self,dataOut,list_pedestal):
4093 4128
4094 4129 print("SETUP OFFLINE")
4095 4130 print(self.path_ped)
4096 4131 #print(self.path_adq)
4097 4132 print(len(self.list_pedestal))
4098 4133 #print(len(self.list_adq))
4099 4134 utc_ped_list=[]
4100 4135 for i in range(len(self.list_pedestal)):
4101 4136 #print(i)# OJO IDENTIFICADOR DE SINCRONISMO
4102 4137 utc_ped_list.append(self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[i]))
4103 4138
4104 4139 #utc_ped_list= utc_ped_list
4105 4140 ###utc_adq = self.gettimeutcadqfromDirFilename(path=self.path_adq,file=self.list_adq[0])
4106 4141 print("dios existe donde esta")
4107 4142
4108 #print("utc_ped_list",utc_ped_list)
4109 ###print("utc_adq",utc_adq)
4143 print("utc_ped_list",utc_ped_list)
4144 #print("utc_adq",utc_adq)
4110 4145 # utc_adq_dataOut
4111 4146 utc_adq_dataOut =dataOut.utctime
4112 print("Offline-utc_adq_dataout",utc_adq_dataOut)
4147 print("OFFLINE-UTC_ADQ_dataout",utc_adq_dataOut)
4113 4148
4114 4149 nro_file,utc_ped,utc_ped_1 = self.getNROFile(utc_adq=utc_adq_dataOut, utc_ped_list= utc_ped_list)
4115 4150
4116 4151 print("nro_file",nro_file,"utc_ped",utc_ped)
4117 4152 print("nro_file",i)
4118 4153 nro_key_p = int((utc_adq_dataOut-utc_ped)/self.t_Interval_p)-1 # ojito al -1 estimado alex
4119 4154 print("nro_key_p",nro_key_p)
4120 4155
4121 ff_pedestal = self.list_pedestal[nro_file]
4156 ff_pedestal = self.list_
4157 pedestal[nro_file]
4122 4158 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4123 4159 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4124 4160
4125 4161 print("utc_pedestal_init :",utc_ped+nro_key_p*self.t_Interval_p)
4126 print("angulo_array :",angulo[nro_key_p])
4162 if nro_key_p>0:
4163 print("angulo_array :",angulo[nro_key_p])
4127 4164 self.nro_file = nro_file
4128 4165 self.nro_key_p = nro_key_p
4129 4166
4130 4167 def setup_online(self,dataOut):
4131 4168 utc_adq =dataOut.utctime
4132 4169 print("Online-utc_adq",utc_adq)
4133 4170 print(len(self.list_pedestal))
4134 4171 utc_ped_list=[]
4135 4172 for i in range(len(self.list_pedestal)):
4136 4173 utc_ped_list.append(self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[i]))
4137 4174 print(utc_ped_list[:20])
4138 4175 #print(utc_ped_list[488:498])
4139 4176 print("ultimo UTC-PEDESTAL",utc_ped_list[-1])
4140 4177 nro_file,utc_ped,utc_ped_1 = self.getNROFile(utc_adq=utc_adq, utc_ped_list= utc_ped_list)
4141 4178 print("nro_file",nro_file,"utc_ped",utc_ped,"utc_ped_1",utc_ped_1)
4142 4179 print("name_PEDESTAL",self.list_pedestal[nro_file])
4143 4180 nro_key_p = int((utc_adq-utc_ped)/self.t_Interval_p)-1
4144 4181 print("nro_key_p",nro_key_p)
4145 4182 ff_pedestal = self.list_pedestal[nro_file]
4146 4183 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4147 4184 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4148 4185
4149 4186 print("utc_pedestal_init :",utc_ped+nro_key_p*self.t_Interval_p)
4150 4187 print("angulo_array :",angulo[nro_key_p])
4151 4188 self.nro_file = nro_file
4152 4189 self.nro_key_p = nro_key_p
4153 4190
4154 4191 #def setup(self,dataOut,path_ped,path_adq,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online):
4155 4192 def setup(self,dataOut,path_ped,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online):
4156 4193 print("SETUP PEDESTAL")
4157 4194 self.__dataReady = False
4158 4195 self.path_ped = path_ped
4159 4196 #self.path_adq = path_adq
4160 4197 self.t_Interval_p = t_Interval_p
4161 4198 self.n_Muestras_p = n_Muestras_p
4162 4199 self.blocksPerfile= blocksPerfile
4200 #print("dasdadasdasda",self.blocksPerfile)
4163 4201 self.f_a_p = f_a_p
4164 4202 self.online = online
4165 4203 self.angulo_adq = numpy.zeros(self.blocksPerfile)
4166 4204 self.__profIndex = 0
4167 4205 self.tmp = 0
4168 4206 self.c_ped = 0
4169 4207 print(self.path_ped)
4170 4208 #print(self.path_adq)
4171 4209 self.list_pedestal = self.getfirstFilefromPath(path=self.path_ped,meta="PE",ext=".hdf5")
4172 4210 print("LIST NEW", self.list_pedestal[:20])
4173 4211 #self.list_adq = self.getfirstFilefromPath(path=self.path_adq,meta="D",ext=".hdf5")
4174 4212 print("*************Longitud list pedestal****************",len(self.list_pedestal))
4175
4213 '''
4176 4214 if self.online:
4177 4215 print("Enable Online")
4178 4216 self.setup_online(dataOut)
4179 4217 else:
4180 4218 #self.setup_offline(dataOut,list_pedestal=self.list_pedestal,list_adq=self.list_adq)
4181 4219 self.setup_offline(dataOut,list_pedestal=self.list_pedestal)
4182
4220 '''
4183 4221
4184 4222 def setNextFileP(self,dataOut):
4185 4223 if self.online:
4186 4224 data_pedestal = self.setNextFileonline()
4187 4225 else:
4188 4226 data_pedestal = self.setNextFileoffline(dataOut)
4189 4227
4190 4228 return data_pedestal
4191 4229
4192 4230
4231
4232 def checkPedFile(self,path,nro_file):
4233 try:
4234 utc_pedestal_now = self.gettimeutcfromDirFilename(path=path,file=self.list_pedestal[nro_file])
4235 utc_pedestal_past = self.gettimeutcfromDirFilename(path=path,file=self.list_pedestal[nro_file-1])
4236 diff_utc = utc_pedestal_now - utc_pedestal_past
4237 except:
4238 diff_utc = -1
4239 return diff_utc
4240
4193 4241 def setNextFileoffline(self,dataOut):
4194 ##tmp=0
4242 print("error")
4243 print("no entiendo")
4244 flag_NOPedfile = False
4245 cont_NOPedFile = 0
4246 if self.nro_file<0:
4247 print("adq empieza antes")
4248 return numpy.ones(self.blocksPerfile)*numpy.nan
4249 print("INICIO-----------------------------------------")
4250 sleep(3)
4195 4251 for j in range(self.blocksPerfile):
4196 ###print("NUMERO DEL BLOQUE---->",j)
4197 ###print("nro_key_p",self.nro_key_p)
4252 print(j)
4253 iterador = self.nro_key_p + self.f_a_p*self.c_ped
4254 self.c_ped = self.c_ped + 1
4255 print("Iterador-->", iterador)
4256 if iterador < self.n_Muestras_p:
4257 self.nro_file = self.nro_file
4258 else:
4259 if flag_NOPedfile==False:
4260 ###########################
4261 self.nro_file = self.nro_file +1
4262 ###########################
4263 print("nro_file",self.nro_file)
4264 diff_utc = self.checkPedFile(path=self.path_ped,nro_file=self.nro_file)
4265 print("diff_utc",diff_utc)
4266 if diff_utc==1:
4267 utc_ped_setnext=self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[self.nro_file])
4268 utc_adq_setnext=dataOut.utctime
4269 print("utc_pedestal",utc_ped_setnext)
4270 print("utc_adq",utc_adq_setnext)
4271 print("self.c_ped",self.c_ped)
4272 #dif = self.blocksPerfile-(self.nro_key_p+self.f_a_p*(self.c_ped-2))
4273 dif = self.n_Muestras_p-(self.nro_key_p+self.f_a_p*(self.c_ped-2))
4274 self.c_ped = 1
4275 ##tmp = j
4276 ##print("tmp else",tmp)
4277 self.nro_key_p= self.f_a_p-dif
4278 iterador = self.nro_key_p
4279 print("iterador else",iterador)
4280 if diff_utc >1:
4281 print("FALTAN DATOS DEL PEDESTAL AMIGO WAIT")
4282 sleep(1)
4283 #aqui no hay DATA pero creo el nro_key_p "como si existiera" y reinicio el c_ped
4284 print("continua bro")
4285 print("self.c_ped",self.c_ped)
4286 dif = self.n_Muestras_p-(self.nro_key_p+self.f_a_p*(self.c_ped-2))
4287 self.c_ped = 1
4288 self.nro_key_p = self.f_a_p-dif
4289 #
4290 iterador = self.nro_key_p
4291 print("iterador else",iterador)
4292 #
4293 utc_ped_nofile = self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[self.nro_file-1])+1
4294 #iterador = self.nro_key_p #no hay iterador pero importa el nro_key_p
4295 flag_NOPedfile= True
4296 print("q paso")
4297
4298 if diff_utc <0:
4299 print("No hay mas archivos")
4300 self.angulo_adq = numpy.ones(self.blocksPerfile)*numpy.nan
4301 else:
4302 print("NO EXISTE DATA - IMAGINA")
4303 dif = self.n_Muestras_p-(self.nro_key_p+self.f_a_p*(self.c_ped-2))
4304 self.c_ped = 1
4305 ##tmp = j
4306 ##print("tmp else",tmp)
4307 self.nro_key_p= self.f_a_p-dif
4198 4308
4199 #iterador = self.nro_key_p +self.f_a_p*(j-tmp)
4200 iterador = self.nro_key_p +self.f_a_p*self.c_ped
4201 self.c_ped = self.c_ped +1
4202 4309
4203 print("iterador------------->",iterador)
4204 if iterador < self.n_Muestras_p:
4205 self.nro_file = self.nro_file
4206 else:
4207 self.nro_file = self.nro_file+1
4208 print("PRUEBA-------------")
4209 utc_ped_setnext=self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[self.nro_file])
4210 utc_adq_setnext=dataOut.utctime
4211 print("utc_pedestal",utc_ped_setnext)
4212 print("utc_adq",utc_adq_setnext)
4213
4214 print("self.c_ped",self.c_ped)
4215 #dif = self.blocksPerfile-(self.nro_key_p+self.f_a_p*(self.c_ped-2))
4216 dif = self.n_Muestras_p-(self.nro_key_p+self.f_a_p*(self.c_ped-2))
4217
4218 self.c_ped = 1
4219 ##tmp = j
4220 ##print("tmp else",tmp)
4221 self.nro_key_p= self.f_a_p-dif
4222 iterador = self.nro_key_p
4223 print("iterador else",iterador)
4224 #self.c_ped = self.c_ped +1
4225 4310
4226 print("nro_file",self.nro_file)
4227 #print("tmp",tmp)
4228 try:
4311
4312 if flag_NOPedfile == False:
4313 # AQUI NECESITO DOS COSAS "nro_file" y el "iterador"
4229 4314 ff_pedestal = self.list_pedestal[self.nro_file]
4230 4315 print("ff_pedestal",ff_pedestal)
4231 except:
4232 print("############# EXCEPCION ######################")
4233 return numpy.ones(self.blocksPerfile)*numpy.nan
4316 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4317 save_angulo = angulo[iterador]
4318 self.angulo_adq[j]= angulo[iterador]
4319 else:
4320 print(" DATA SIMULADA------------------------------------",j)
4321 self.angulo_adq[j]= numpy.nan
4322 cont_NOPedFile = cont_NOPedFile + 1
4323 print("contador:",cont_NOPedFile)
4324 sleep(2)
4325 if cont_NOPedFile==self.f_a_p:
4326 cont_NOPedFile = 0
4327 utc_ped_nofile = utc_ped_nofile+1
4328 utc_ped_setnext= self.gettimeutcfromDirFilename(path=self.path_ped,file=self.list_pedestal[self.nro_file])
4329 if utc_ped_setnext-utc_ped_nofile==1:
4330 fladata_pg_NOPedfile=False
4331
4234 4332
4235 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4236 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4237 4333
4238 self.angulo_adq[j]= angulo[iterador]
4239 4334
4240 4335 return self.angulo_adq
4241 4336
4242 4337 def setNextFileonline(self):
4243 4338 tmp = 0
4244 4339 self.nTries_p = 3
4245 4340 self.delay = 3
4246 4341 ready = 1
4247 4342 for j in range(self.blocksPerfile):
4248 4343 iterador = self.nro_key_p +self.f_a_p*(j-tmp)
4249 4344 if iterador < self.n_Muestras_p:
4250 4345 self.nro_file = self.nro_file
4251 4346 else:
4252 4347 self.nro_file = self.nro_file+1
4253 4348 dif = self.blocksPerfile-(self.nro_key_p+self.f_a_p*(j-tmp-1))
4254 4349 tmp = j
4255 4350 self.nro_key_p= self.f_a_p-dif
4256 4351 iterador = self.nro_key_p
4257 4352 #print("nro_file---------------- :",self.nro_file)
4258 4353 try:
4259 4354 # update list_pedestal
4260 4355 self.list_pedestal = self.getfirstFilefromPath(path=self.path_ped,meta="PE",ext=".hdf5")
4261 4356 ff_pedestal = self.list_pedestal[self.nro_file]
4262 4357 except:
4263 4358 ff_pedestal = None
4264 4359 ready = 0
4265 4360 for nTries_p in range(self.nTries_p):
4266 4361 try:
4267 4362 # update list_pedestal
4268 4363 self.list_pedestal = self.getfirstFilefromPath(path=self.path_ped,meta="PE",ext=".hdf5")
4269 4364 ff_pedestal = self.list_pedestal[self.nro_file]
4270 4365 except:
4271 4366 ff_pedestal = None
4272 4367 if ff_pedestal is not None:
4273 4368 ready=1
4274 4369 break
4275 4370 log.warning("Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, self.nro_file, nTries_p + 1))
4276 4371 time.sleep(self.delay)
4277 4372 continue
4278 4373 #return numpy.ones(self.blocksPerfile)*numpy.nan
4279 4374
4280 4375 if ready == 1:
4281 4376 #angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azimuth")
4282 4377 angulo = self.getDatavaluefromDirFilename(path=self.path_ped,file=ff_pedestal,value="azi_pos")
4283 4378
4284 4379 else:
4285 4380 print("there is no pedestal file")
4286 4381 angulo = numpy.ones(self.n_Muestras_p)*numpy.nan
4287 4382 self.angulo_adq[j]= angulo[iterador]
4288 4383 ####print("Angulo",self.angulo_adq)
4289 4384 ####print("Angulo",len(self.angulo_adq))
4290 4385 #self.nro_key_p=iterador + self.f_a_p
4291 4386 #if self.nro_key_p< self.n_Muestras_p:
4292 4387 # self.nro_file = self.nro_file
4293 4388 #else:
4294 4389 # self.nro_file = self.nro_file+1
4295 4390 # self.nro_key_p= self.nro_key_p
4296 4391 return self.angulo_adq
4297 4392
4298 4393
4299 4394 #def run(self, dataOut,path_ped,path_adq,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online):
4300 4395 def run(self, dataOut,path_ped,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online):
4396 if not self.isConfig:
4397 self.setup(dataOut, path_ped,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online)
4398 self.__dataReady = True
4399 self.isConfig = True
4400 #print("config TRUE")
4401
4402 utc_adq = dataOut.utctime
4403 print("utc_adq---------------",utc_adq)
4404 list_pedestal = self.list_pedestal
4405 angulo = self.getAnguloProfile(utc_adq=utc_adq,list_pedestal=list_pedestal)
4406 print("angulo**********",angulo)
4407 dataOut.flagNoData = False
4408 if numpy.isnan(angulo):
4409 dataOut.flagNoData = True
4410 return dataOut
4411 dataOut.azimuth = angulo
4412 return dataOut
4301 4413
4414 '''
4302 4415 if not self.isConfig:
4303 4416 print("######################SETUP#########################################")
4417 print("CONFIG dataOut.utctime---",dataOut.utctime)
4304 4418 #self.setup( dataOut, path_ped,path_adq,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online)
4305 4419 self.setup( dataOut, path_ped,t_Interval_p,n_Muestras_p,blocksPerfile,f_a_p,online)
4306 self.isConfig = True
4420 print("aqui bro")
4421 if self.nro_file>=0:
4422 self.isConfig = True
4423 else:
4424 self.isConfig = False
4425 print("RUN---dataOut.utctime---",dataOut.utctime)
4307 4426
4308 4427 dataOut.flagNoData = True
4309 ###print("profIndex",self.__profIndex)
4310
4428 #sleep(0.5)
4429 #print("--------------------------------------------------------------------------------")
4430 #print("profIndex",self.__profIndex)
4431 '''
4432 '''
4311 4433 if self.__profIndex==0:
4312 4434 angulo_adq = self.setNextFileP(dataOut)
4313 4435 dataOut.azimuth = angulo_adq
4314 ######print("TIEMPO:",dataOut.utctime)
4315 ##print("####################################################################")
4316 ######print("angulos",dataOut.azimuth,len(dataOut.azimuth))
4436 print("TIEMPO--------:",dataOut.utctime)
4437 print("####################################################################")
4438 ####print("angulos",dataOut.azimuth,len(dataOut.azimuth))
4317 4439 self.__dataReady = True
4440 '''
4441 '''
4318 4442 self.__profIndex += 1
4319 4443 ####print("TIEMPO_bucle:",dataOut.utctime)
4320 ####print("profIndex",self.__profIndex)
4444 #print("profIndex",self.__profIndex)
4321 4445 if self.__profIndex== blocksPerfile:
4446 self.__dataReady = True
4322 4447 self.__profIndex = 0
4323 4448 if self.__dataReady:
4324 4449 #print(self.__profIndex,dataOut.azimuth[:10])
4450 angulo_adq = self.setNextFileP(dataOut)
4451 dataOut.azimuth = angulo_adq
4452 print("TIEMPO--------:",dataOut.utctime)
4453 print("####################################################################")
4454 ####print("angulos",dataOut.azimuth,len(dataOut.azimuth))
4325 4455 dataOut.flagNoData = False
4326 return dataOut
4456 '''
4457 #return dataOut
4327 4458
4328 4459
4329 4460 class Block360(Operation):
4330 4461 '''
4331 4462 '''
4332 4463 isConfig = False
4333 4464 __profIndex = 0
4334 4465 __initime = None
4335 4466 __lastdatatime = None
4336 4467 __buffer = None
4337 4468 __dataReady = False
4338 4469 n = None
4339 4470 __nch = 0
4340 4471 __nHeis = 0
4341 4472 index = 0
4342 4473 mode = 0
4343 4474
4344 4475 def __init__(self,**kwargs):
4345 4476 Operation.__init__(self,**kwargs)
4346 4477
4347 4478 def setup(self, dataOut, n = None, mode = None):
4348 4479 '''
4349 4480 n= Numero de PRF's de entrada
4350 4481 '''
4351 4482 self.__initime = None
4352 4483 self.__lastdatatime = 0
4353 4484 self.__dataReady = False
4354 4485 self.__buffer = 0
4355 4486 self.__buffer_1D = 0
4356 4487 self.__profIndex = 0
4357 4488 self.index = 0
4358 4489 self.__nch = dataOut.nChannels
4359 4490 self.__nHeis = dataOut.nHeights
4360 4491 ##print("ELVALOR DE n es:", n)
4361 4492 if n == None:
4362 4493 raise ValueError("n should be specified.")
4363 4494
4364 4495 if mode == None:
4365 4496 raise ValueError("mode should be specified.")
4366 4497
4367 4498 if n != None:
4368 4499 if n<1:
4369 4500 print("n should be greater than 2")
4370 4501 raise ValueError("n should be greater than 2")
4371 4502
4372 4503 self.n = n
4373 4504 self.mode = mode
4374 4505 print("self.mode",self.mode)
4375 4506 #print("nHeights")
4376 4507 self.__buffer = numpy.zeros(( dataOut.nChannels,n, dataOut.nHeights))
4377 4508 self.__buffer2= numpy.zeros(n)
4378 4509
4379 4510 def putData(self,data,mode):
4380 4511 '''
4381 4512 Add a profile to he __buffer and increase in one the __profiel Index
4382 4513 '''
4383 4514 #print("line 4049",data.dataPP_POW.shape,data.dataPP_POW[:10])
4384 4515 #print("line 4049",data.azimuth.shape,data.azimuth)
4385 4516 if self.mode==0:
4386 4517 self.__buffer[:,self.__profIndex,:]= data.dataPP_POWER# PRIMER MOMENTO
4387 4518 if self.mode==1:
4388 4519 self.__buffer[:,self.__profIndex,:]= data.data_pow
4389 4520 #print("me casi",self.index,data.azimuth[self.index])
4390 4521 #print(self.__profIndex, self.index , data.azimuth[self.index] )
4391 4522 #print("magic",data.profileIndex)
4392 4523 #print(data.azimuth[self.index])
4393 4524 #print("index",self.index)
4394 4525
4395 self.__buffer2[self.__profIndex] = data.azimuth[self.index]
4526 #####self.__buffer2[self.__profIndex] = data.azimuth[self.index]
4527 self.__buffer2[self.__profIndex] = data.azimuth
4396 4528 #print("q pasa")
4397 self.index+=1
4529 #####self.index+=1
4398 4530 #print("index",self.index,data.azimuth[:10])
4399 4531 self.__profIndex += 1
4400 4532 return #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Remove DCΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
4401 4533
4402 4534 def pushData(self,data):
4403 4535 '''
4404 4536 Return the PULSEPAIR and the profiles used in the operation
4405 4537 Affected : self.__profileIndex
4406 4538 '''
4407 4539 #print("pushData")
4408 4540
4409 4541 data_360 = self.__buffer
4410 4542 data_p = self.__buffer2
4411 4543 n = self.__profIndex
4412 4544
4413 4545 self.__buffer = numpy.zeros((self.__nch, self.n,self.__nHeis))
4414 4546 self.__buffer2 = numpy.zeros(self.n)
4415 4547 self.__profIndex = 0
4416 4548 #print("pushData")
4417 4549 return data_360,n,data_p
4418 4550
4419 4551
4420 4552 def byProfiles(self,dataOut):
4421 4553
4422 4554 self.__dataReady = False
4423 4555 data_360 = None
4424 4556 data_p = None
4425 4557 #print("dataOu",dataOut.dataPP_POW)
4426 4558 self.putData(data=dataOut,mode = self.mode)
4427 #print("profIndex",self.__profIndex)
4559 ##### print("profIndex",self.__profIndex)
4428 4560 if self.__profIndex == self.n:
4429 4561 data_360,n,data_p = self.pushData(data=dataOut)
4430 4562 self.__dataReady = True
4431 4563
4432 4564 return data_360,data_p
4433 4565
4434 4566
4435 4567 def blockOp(self, dataOut, datatime= None):
4436 4568 if self.__initime == None:
4437 4569 self.__initime = datatime
4438 4570 data_360,data_p = self.byProfiles(dataOut)
4439 4571 self.__lastdatatime = datatime
4440 4572
4441 4573 if data_360 is None:
4442 4574 return None, None,None
4443 4575
4576
4444 4577 avgdatatime = self.__initime
4578 if self.n==1:
4579 avgdatatime = datatime
4445 4580 deltatime = datatime - self.__lastdatatime
4446 4581 self.__initime = datatime
4447 4582 #print(data_360.shape,avgdatatime,data_p.shape)
4448 4583 return data_360,avgdatatime,data_p
4449 4584
4450 4585 def run(self, dataOut,n = None,mode=None,**kwargs):
4451 ####print("BLOCK 360 HERE WE GO MOMENTOS")
4586 #print("BLOCK 360 HERE WE GO MOMENTOS")
4452 4587 if not self.isConfig:
4453 4588 self.setup(dataOut = dataOut, n = n ,mode= mode ,**kwargs)
4454 self.index = 0
4589 ####self.index = 0
4455 4590 #print("comova",self.isConfig)
4456 4591 self.isConfig = True
4457 if self.index==dataOut.azimuth.shape[0]:
4458 self.index=0
4592 ####if self.index==dataOut.azimuth.shape[0]:
4593 #### self.index=0
4459 4594 data_360, avgdatatime,data_p = self.blockOp(dataOut, dataOut.utctime)
4460 4595 dataOut.flagNoData = True
4461 4596
4462 4597 if self.__dataReady:
4463 4598 dataOut.data_360 = data_360 # S
4464 4599 ##print("---------------------------------------------------------------------------------")
4465 ##print("---------------------------DATAREADY---------------------------------------------")
4600 #####print("---------------------------DATAREADY---------------------------------------------")
4466 4601 ##print("---------------------------------------------------------------------------------")
4467 4602 ##print("data_360",dataOut.data_360.shape)
4468 4603 dataOut.data_azi = data_p
4469 ##print("azi: ",dataOut.data_azi)
4604 #####print("azi: ",dataOut.data_azi)
4470 4605 #print("jroproc_parameters",data_p[0],data_p[-1])#,data_360.shape,avgdatatime)
4471 4606 dataOut.utctime = avgdatatime
4472 4607 dataOut.flagNoData = False
4473 4608 return dataOut
@@ -1,464 +1,464
1 1 # scp.py
2 2 # Copyright (C) 2008 James Bardin <j.bardin@gmail.com>
3 3
4 4 """
5 5 Utilities for sending files over ssh using the scp1 protocol.
6 6 """
7 7
8 8 __version__ = '0.10.0'
9 9
10 10 import locale
11 11 import os
12 12 import re
13 13 from socket import timeout as SocketTimeout
14 14
15 15
16 16 # this is quote from the shlex module, added in py3.3
17 17 _find_unsafe = re.compile(br'[^\w@%+=:,./~-]').search
18 18
19 19
20 20 def _sh_quote(s):
21 21 """Return a shell-escaped version of the string `s`."""
22 22 if not s:
23 23 return b""
24 24 if _find_unsafe(s) is None:
25 25 return s
26 26
27 27 # use single quotes, and put single quotes into double quotes
28 28 # the string $'b is then quoted as '$'"'"'b'
29 29 return b"'" + s.replace(b"'", b"'\"'\"'") + b"'"
30 30
31 31
32 32 # Unicode conversion functions; assume UTF-8
33 33
34 34 def asbytes(s):
35 35 """Turns unicode into bytes, if needed.
36 36 Assumes UTF-8.
37 37 """
38 38 if isinstance(s, bytes):
39 39 return s
40 40 else:
41 41 return s.encode('utf-8')
42 42
43 43
44 44 def asunicode(s):
45 45 """Turns bytes into unicode, if needed.
46 46 Uses UTF-8.
47 47 """
48 48 if isinstance(s, bytes):
49 49 return s.decode('utf-8', 'replace')
50 50 else:
51 51 return s
52 52
53 53
54 54 # os.path.sep is unicode on Python 3, no matter the platform
55 55 bytes_sep = asbytes(os.path.sep)
56 56
57 57
58 58 # Unicode conversion function for Windows
59 59 # Used to convert local paths if the local machine is Windows
60 60
61 61 def asunicode_win(s):
62 62 """Turns bytes into unicode, if needed.
63 63 """
64 64 if isinstance(s, bytes):
65 65 return s.decode(locale.getpreferredencoding())
66 66 else:
67 67 return s
68 68
69 69
70 70 class SCPClient(object):
71 71 """
72 72 An scp1 implementation, compatible with openssh scp.
73 73 Raises SCPException for all transport related errors. Local filesystem
74 74 and OS errors pass through.
75 75 Main public methods are .put and .get
76 76 The get method is controlled by the remote scp instance, and behaves
77 77 accordingly. This means that symlinks are resolved, and the transfer is
78 78 halted after too many levels of symlinks are detected.
79 79 The put method uses os.walk for recursion, and sends files accordingly.
80 80 Since scp doesn't support symlinks, we send file symlinks as the file
81 81 (matching scp behaviour), but we make no attempt at symlinked directories.
82 82 """
83 83 def __init__(self, transport, buff_size=16384, socket_timeout=5.0,
84 84 progress=None, sanitize=_sh_quote):
85 85 """
86 86 Create an scp1 client.
87 87 @param transport: an existing paramiko L{Transport}
88 88 @type transport: L{Transport}
89 89 @param buff_size: size of the scp send buffer.
90 90 @type buff_size: int
91 91 @param socket_timeout: channel socket timeout in seconds
92 92 @type socket_timeout: float
93 93 @param progress: callback - called with (filename, size, sent) during
94 94 transfers
95 95 @param sanitize: function - called with filename, should return
96 96 safe or escaped string. Uses _sh_quote by default.
97 97 @type progress: function(string, int, int)
98 98 """
99 99 self.transport = transport
100 100 self.buff_size = buff_size
101 101 self.socket_timeout = socket_timeout
102 102 self.channel = None
103 103 self.preserve_times = False
104 104 self._progress = progress
105 105 self._recv_dir = b''
106 106 self._rename = False
107 107 self._utime = None
108 108 self.sanitize = sanitize
109 109 self._dirtimes = {}
110 110
111 111 def __enter__(self):
112 112 self.channel = self._open()
113 113 return self
114 114
115 115 def __exit__(self, type, value, traceback):
116 116 self.close()
117 117
118 118 def put(self, files, remote_path=b'.',
119 119 recursive=False, preserve_times=False):
120 120 """
121 121 Transfer files to remote host.
122 122 @param files: A single path, or a list of paths to be transfered.
123 123 recursive must be True to transfer directories.
124 124 @type files: string OR list of strings
125 125 @param remote_path: path in which to receive the files on the remote
126 126 host. defaults to '.'
127 127 @type remote_path: str
128 128 @param recursive: transfer files and directories recursively
129 129 @type recursive: bool
130 130 @param preserve_times: preserve mtime and atime of transfered files
131 131 and directories.
132 132 @type preserve_times: bool
133 133 """
134 134 self.preserve_times = preserve_times
135 135 self.channel = self._open()
136 136 self._pushed = 0
137 137 self.channel.settimeout(self.socket_timeout)
138 138 scp_command = (b'scp -t ', b'scp -r -t ')[recursive]
139 139 self.channel.exec_command(scp_command +
140 140 self.sanitize(asbytes(remote_path)))
141 141 self._recv_confirm()
142 142
143 143 if not isinstance(files, (list, tuple)):
144 144 files = [files]
145 145
146 146 if recursive:
147 147 self._send_recursive(files)
148 148 else:
149 149 self._send_files(files)
150 150
151 151 self.close()
152 152
153 153 def get(self, remote_path, local_path='',
154 154 recursive=False, preserve_times=False):
155 155 """
156 156 Transfer files from remote host to localhost
157 157 @param remote_path: path to retreive from remote host. since this is
158 158 evaluated by scp on the remote host, shell wildcards and
159 159 environment variables may be used.
160 160 @type remote_path: str
161 161 @param local_path: path in which to receive files locally
162 162 @type local_path: str
163 163 @param recursive: transfer files and directories recursively
164 164 @type recursive: bool
165 165 @param preserve_times: preserve mtime and atime of transfered files
166 166 and directories.
167 167 @type preserve_times: bool
168 168 """
169 169 if not isinstance(remote_path, (list, tuple)):
170 170 remote_path = [remote_path]
171 171 remote_path = [self.sanitize(asbytes(r)) for r in remote_path]
172 172 self._recv_dir = local_path or os.getcwd()
173 173 self._rename = (len(remote_path) == 1 and
174 174 not os.path.isdir(os.path.abspath(local_path)))
175 175 if len(remote_path) > 1:
176 176 if not os.path.exists(self._recv_dir):
177 177 raise SCPException("Local path '%s' does not exist" %
178 178 asunicode(self._recv_dir))
179 179 elif not os.path.isdir(self._recv_dir):
180 180 raise SCPException("Local path '%s' is not a directory" %
181 181 asunicode(self._recv_dir))
182 182 rcsv = (b'', b' -r')[recursive]
183 183 prsv = (b'', b' -p')[preserve_times]
184 184 self.channel = self._open()
185 185 self._pushed = 0
186 186 self.channel.settimeout(self.socket_timeout)
187 187 self.channel.exec_command(b"scp" +
188 188 rcsv +
189 189 prsv +
190 190 b" -f " +
191 191 b' '.join(remote_path))
192 192 self._recv_all()
193 193 self.close()
194 194
195 195 def _open(self):
196 196 """open a scp channel"""
197 197 if self.channel is None:
198 198 self.channel = self.transport.open_session()
199 199
200 200 return self.channel
201 201
202 202 def close(self):
203 203 """close scp channel"""
204 204 if self.channel is not None:
205 205 self.channel.close()
206 206 self.channel = None
207 207
208 208 def _read_stats(self, name):
209 209 """return just the file stats needed for scp"""
210 210 if os.name == 'nt':
211 211 name = asunicode(name)
212 212 stats = os.stat(name)
213 213 mode = oct(stats.st_mode)[-4:]
214 214 size = stats.st_size
215 215 atime = int(stats.st_atime)
216 216 mtime = int(stats.st_mtime)
217 217 return (mode, size, mtime, atime)
218 218
219 219 def _send_files(self, files):
220 220 for name in files:
221 221 basename = asbytes(os.path.basename(name))
222 222 (mode, size, mtime, atime) = self._read_stats(name)
223 223 if self.preserve_times:
224 224 self._send_time(mtime, atime)
225 225 file_hdl = open(name, 'rb')
226 226
227 227 # The protocol can't handle \n in the filename.
228 228 # Quote them as the control sequence \^J for now,
229 229 # which is how openssh handles it.
230 230 self.channel.sendall(("C%s %d " % (mode, size)).encode('ascii') +
231 231 basename.replace(b'\n', b'\\^J') + b"\n")
232 232 self._recv_confirm()
233 233 file_pos = 0
234 234 if self._progress:
235 235 if size == 0:
236 236 # avoid divide-by-zero
237 237 self._progress(basename, 1, 1)
238 238 else:
239 239 self._progress(basename, size, 0)
240 240 buff_size = self.buff_size
241 241 chan = self.channel
242 242 while file_pos < size:
243 243 chan.sendall(file_hdl.read(buff_size))
244 244 file_pos = file_hdl.tell()
245 245 if self._progress:
246 246 self._progress(basename, size, file_pos)
247 247 chan.sendall('\x00')
248 248 file_hdl.close()
249 249 self._recv_confirm()
250 250
251 251 def _chdir(self, from_dir, to_dir):
252 252 # Pop until we're one level up from our next push.
253 253 # Push *once* into to_dir.
254 254 # This is dependent on the depth-first traversal from os.walk
255 255
256 256 # add path.sep to each when checking the prefix, so we can use
257 257 # path.dirname after
258 258 common = os.path.commonprefix([from_dir + bytes_sep,
259 259 to_dir + bytes_sep])
260 260 # now take the dirname, since commonprefix is character based,
261 261 # and we either have a seperator, or a partial name
262 262 common = os.path.dirname(common)
263 263 cur_dir = from_dir.rstrip(bytes_sep)
264 264 while cur_dir != common:
265 265 cur_dir = os.path.split(cur_dir)[0]
266 266 self._send_popd()
267 267 # now we're in our common base directory, so on
268 268 self._send_pushd(to_dir)
269 269
270 270 def _send_recursive(self, files):
271 271 for base in files:
272 272 if not os.path.isdir(base):
273 273 # filename mixed into the bunch
274 274 self._send_files([base])
275 275 continue
276 276 last_dir = asbytes(base)
277 277 for root, dirs, fls in os.walk(base):
278 278 self._chdir(last_dir, asbytes(root))
279 279 self._send_files([os.path.join(root, f) for f in fls])
280 280 last_dir = asbytes(root)
281 281 # back out of the directory
282 282 while self._pushed > 0:
283 283 self._send_popd()
284 284
285 285 def _send_pushd(self, directory):
286 286 (mode, size, mtime, atime) = self._read_stats(directory)
287 287 basename = asbytes(os.path.basename(directory))
288 288 if self.preserve_times:
289 289 self._send_time(mtime, atime)
290 290 self.channel.sendall(('D%s 0 ' % mode).encode('ascii') +
291 291 basename.replace(b'\n', b'\\^J') + b'\n')
292 292 self._recv_confirm()
293 293 self._pushed += 1
294 294
295 295 def _send_popd(self):
296 296 self.channel.sendall('E\n')
297 297 self._recv_confirm()
298 298 self._pushed -= 1
299 299
300 300 def _send_time(self, mtime, atime):
301 301 self.channel.sendall(('T%d 0 %d 0\n' % (mtime, atime)).encode('ascii'))
302 302 self._recv_confirm()
303 303
304 304 def _recv_confirm(self):
305 305 # read scp response
306 306 msg = b''
307 307 try:
308 308 msg = self.channel.recv(512)
309 309 except SocketTimeout:
310 310 raise SCPException('Timout waiting for scp response')
311 311 # slice off the first byte, so this compare will work in py2 and py3
312 312 if msg and msg[0:1] == b'\x00':
313 313 return
314 314 elif msg and msg[0:1] == b'\x01':
315 315 raise SCPException(asunicode(msg[1:]))
316 316 elif self.channel.recv_stderr_ready():
317 317 msg = self.channel.recv_stderr(512)
318 318 raise SCPException(asunicode(msg))
319 319 elif not msg:
320 320 raise SCPException('No response from server')
321 321 else:
322 322 raise SCPException('Invalid response from server', msg)
323 323
324 324 def _recv_all(self):
325 325 # loop over scp commands, and receive as necessary
326 326 command = {b'C': self._recv_file,
327 327 b'T': self._set_time,
328 328 b'D': self._recv_pushd,
329 329 b'E': self._recv_popd}
330 330 while not self.channel.closed:
331 331 # wait for command as long as we're open
332 332 self.channel.sendall('\x00')
333 333 msg = self.channel.recv(1024)
334 334 if not msg: # chan closed while recving
335 335 break
336 336 assert msg[-1:] == b'\n'
337 337 msg = msg[:-1]
338 338 code = msg[0:1]
339 339 try:
340 340 command[code](msg[1:])
341 341 except KeyError:
342 342 raise SCPException(asunicode(msg[1:]))
343 343 # directory times can't be set until we're done writing files
344 344 self._set_dirtimes()
345 345
346 346 def _set_time(self, cmd):
347 347 try:
348 348 times = cmd.split(b' ')
349 349 mtime = int(times[0])
350 350 atime = int(times[2]) or mtime
351 351 except:
352 352 self.channel.send(b'\x01')
353 353 raise SCPException('Bad time format')
354 354 # save for later
355 355 self._utime = (atime, mtime)
356 356
357 357 def _recv_file(self, cmd):
358 358 chan = self.channel
359 359 parts = cmd.strip().split(b' ', 2)
360 360
361 361 try:
362 362 mode = int(parts[0], 8)
363 363 size = int(parts[1])
364 364 if self._rename:
365 365 path = self._recv_dir
366 366 self._rename = False
367 367 elif os.name == 'nt':
368 368 path = os.path.join(asunicode_win(self._recv_dir),
369 369 parts[2].decode('utf-8'))
370 370 else:
371 371 path = os.path.join(asbytes(self._recv_dir),
372 372 parts[2])
373 373 except:
374 374 chan.send('\x01')
375 375 chan.close()
376 376 raise SCPException('Bad file format')
377 377
378 378 try:
379 379 file_hdl = open(path, 'wb')
380 380 except IOError as e:
381 381 chan.send(b'\x01' + str(e).encode('utf-8'))
382 382 chan.close()
383 383 raise
384 384
385 385 if self._progress:
386 386 if size == 0:
387 387 # avoid divide-by-zero
388 388 self._progress(path, 1, 1)
389 389 else:
390 390 self._progress(path, size, 0)
391 391 buff_size = self.buff_size
392 392 pos = 0
393 393 chan.send(b'\x00')
394 394 try:
395 395 while pos < size:
396 396 # we have to make sure we don't read the final byte
397 397 if size - pos <= buff_size:
398 398 buff_size = size - pos
399 399 file_hdl.write(chan.recv(buff_size))
400 400 pos = file_hdl.tell()
401 401 if self._progress:
402 402 self._progress(path, size, pos)
403 403
404 404 msg = chan.recv(512)
405 405 if msg and msg[0:1] != b'\x00':
406 406 raise SCPException(asunicode(msg[1:]))
407 407 except SocketTimeout:
408 408 chan.close()
409 409 raise SCPException('Error receiving, socket.timeout')
410 410
411 411 file_hdl.truncate()
412 412 try:
413 413 os.utime(path, self._utime)
414 414 self._utime = None
415 415 os.chmod(path, mode)
416 416 # should we notify the other end?
417 417 finally:
418 418 file_hdl.close()
419 419 # '\x00' confirmation sent in _recv_all
420 420
421 421 def _recv_pushd(self, cmd):
422 422 parts = cmd.split(b' ', 2)
423 423 try:
424 424 mode = int(parts[0], 8)
425 425 if self._rename:
426 426 path = self._recv_dir
427 427 self._rename = False
428 428 elif os.name == 'nt':
429 429 path = os.path.join(asunicode_win(self._recv_dir),
430 430 parts[2].decode('utf-8'))
431 431 else:
432 432 path = os.path.join(asbytes(self._recv_dir),
433 433 parts[2])
434 434 except:
435 435 self.channel.send(b'\x01')
436 436 raise SCPException('Bad directory format')
437 437 try:
438 438 if not os.path.exists(path):
439 439 os.mkdir(path, mode)
440 440 elif os.path.isdir(path):
441 441 os.chmod(path, mode)
442 442 else:
443 443 raise SCPException('%s: Not a directory' % path)
444 444 self._dirtimes[path] = (self._utime)
445 445 self._utime = None
446 446 self._recv_dir = path
447 447 except (OSError, SCPException) as e:
448 448 self.channel.send(b'\x01' + asbytes(str(e)))
449 449 raise
450 450
451 451 def _recv_popd(self, *cmd):
452 452 self._recv_dir = os.path.split(self._recv_dir)[0]
453 453
454 454 def _set_dirtimes(self):
455 455 try:
456 456 for d in self._dirtimes:
457 457 os.utime(d, self._dirtimes[d])
458 458 finally:
459 459 self._dirtimes = {}
460 460
461 461
462 462 class SCPException(Exception):
463 463 """SCP exception class"""
464 pass No newline at end of file
464 pass
@@ -1,216 +1,282
1 1 # Ing. AVP
2 2 # 06/10/2021
3 3 # ARCHIVO DE LECTURA
4 4 import os, sys
5 5 import datetime
6 6 import time
7 7 from schainpy.controller import Project
8 8 #### NOTA###########################################
9 9 # INPUT :
10 10 # VELOCIDAD PARAMETRO : V = 2Β°/seg
11 11 # MODO PULSE PAIR O MOMENTOS: 0 : Pulse Pair ,1 : Momentos
12 12 ######################################################
13 13 ##### PROCESAMIENTO ##################################
14 14 ##### OJO TENER EN CUENTA EL n= para el Pulse Pair ##
15 15 ##### O EL n= nFFTPoints ###
16 16 ######################################################
17 17 ######## BUSCAMOS EL numero de IPP equivalente 1Β°#####
18 18 ######## Sea V la velocidad del Pedestal en Β°/seg#####
19 19 ######## 1Β° sera Recorrido en un tiempo de 1/V ######
20 20 ######## IPP del Radar 400 useg --> 60 Km ############
21 21 ######## n = 1/(V(Β°/seg)*IPP(Km)) , NUMERO DE IPP ##
22 22 ######## n = 1/(V*IPP) #############################
23 23 ######## VELOCIDAD DEL PEDESTAL ######################
24 24 print("SETUP- RADAR METEOROLOGICO")
25 V = 10
25 V = 6
26 26 mode = 1
27 #path = '/DATA_RM/23/6v'
28 ####path = '/DATA_RM/TEST_INTEGRACION_2M'
29 #path = '/DATA_RM/TEST_19OCTUBRE/10MHZ'
30 path = '/DATA_RM/WR_20_OCT'
27 #--------------------------PATH -----------------------------
28 # path = '/DATA_RM/23/6v'
29 # path = '/DATA_RM/TEST_INTEGRACION_2M'
30 # path = '/DATA_RM/TEST_19OCTUBRE/10MHZ'
31 # path = '/DATA_RM/WR_20_OCT'
31 32 #### path_ped='/DATA_RM/TEST_PEDESTAL/P20211012-082745'
32 33 ####path_ped='/DATA_RM/TEST_PEDESTAL/P20211019-192244'
33 figpath_pp = "/home/soporte/Pictures/TEST_PP"
34 figpath_spec = "/home/soporte/Pictures/TEST_MOM"
34 #path = '/DATA_RM/WR_POT_09_1'
35 #path = '/DATA_RM/WR_POT_09_2'
36 #path ="/DATA_RM/10"
37 #path ="/DATA_RM/11"
38 #path ="/DATA_RM/DRONE/2MHZ_6V"
39 path ="/DATA_RM/DRONE/2MHZ_10V_ELEVACION"
40 path ="/DATA_RM/DRONE/2MHZ_6V_AZIMUTH"
41 #-------------------------PATH-PLOTEO------------------------------------
42 #figpath_pp = "/home/soporte/Pictures/TEST_PP"
43 #figpath_spec = "/home/soporte/Pictures/TEST_MOM_15"
44 #figpath_spec = "/home/soporte/Pictures/11_"
45 figpath_spec = "/home/soporte/Pictures/23NOV"
46 figpath_spec = "/home/soporte/Pictures/23NOV_TEST_AZI"
47 figpath_pp = "/home/soporte/Pictures/TEST_POT"
48 figpath_pp = "/home/soporte/Pictures/TEST_POT_10_"
49
50
51 #--------------------------OPCIONES-----------------------------------
35 52 plot = 0
36 integration = 1
53 integration = 0
37 54 save = 0
55 plot_spec = 1
56 #-------------------------------------------------------------------------
38 57 if save == 1:
39 58 if mode==0:
40 59 path_save = '/DATA_RM/TEST_HDF5_PP_23/6v'
41 60 path_save = '/DATA_RM/TEST_HDF5_PP'
42 61 path_save = '/DATA_RM/TEST_HDF5_PP_100'
43 62 else:
44 63 path_save = '/DATA_RM/TEST_HDF5_SPEC_23_V2/6v'
45 64
46 65 print("* PATH data ADQ :", path)
47 66 print("* Velocidad Pedestal :",V,"Β°/seg")
48 67 ############################ NRO Perfiles PROCESAMIENTO ###################
49 68 V=V
50 69 IPP=400*1e-6
51 70 n= int(1/(V*IPP))
52 71 print("* n - NRO Perfiles Proc:", n )
53 72 ################################## MODE ###################################
54 73 print("* Modo de Operacion :",mode)
55 74 if mode ==0:
56 75 print("* Met. Seleccionado : Pulse Pair")
57 76 else:
58 77 print("* Met. Momentos : Momentos")
59 78
60 79 ################################## MODE ###################################
61 80 print("* Grabado de datos :",save)
62 81 if save ==1:
63 82 if mode==0:
64 83 ope= "Pulse Pair"
65 84 else:
66 85 ope= "Momentos"
67 86 print("* Path-Save Data -", ope , path_save)
68 87
69 88 print("* Integracion de datos :",integration)
70 89
71 time.sleep(15)
90 time.sleep(3)
72 91 #remotefolder = "/home/wmaster/graficos"
73 92 #######################################################################
74 93 ################# RANGO DE PLOTEO######################################
75 dBmin = '1'
76 dBmax = '65'
77 xmin = '13.2'
78 xmax = '13.5'
79 ymin = '0'
80 ymax = '60'
94 dBmin = '8'
95 dBmax = '35'
96 xmin = '12.1'#17.1,17.5
97 xmax = '12.2'#17.2,17.8
98 ymin = '0'#### PONER A 0
99 ymax = '8'#### PONER A 8
81 100 #######################################################################
82 101 ########################FECHA##########################################
83 102 str = datetime.date.today()
84 103 today = str.strftime("%Y/%m/%d")
85 104 str2 = str - datetime.timedelta(days=1)
86 105 yesterday = str2.strftime("%Y/%m/%d")
87 106 #######################################################################
88 107 ########################SIGNAL CHAIN ##################################
89 108 #######################################################################
90 109 desc = "USRP_test"
91 110 filename = "USRP_processing.xml"
92 111 controllerObj = Project()
93 112 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
94 113 #######################################################################
95 114 ######################## UNIDAD DE LECTURA#############################
96 115 #######################################################################
97 116 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
98 117 path=path,
99 startDate="2021/01/01",#today,
100 endDate="2021/12/30",#today,
101 startTime='00:00:00',
102 endTime='23:59:59',
118 startDate="2021/11/23",#today,
119 endDate="2021/12/23",#today,
120 startTime='15:00:00',#'17:39:25',
121 endTime='16:00:59',#23:59:59',
103 122 delay=0,
104 123 #set=0,
105 online=0,
124 online=1,
106 125 walk=1,
107 126 ippKm = 60)
108 127
109 128 opObj11 = readUnitConfObj.addOperation(name='printInfo')
110 129
111 130 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
112 131
132 opObj11 = procUnitConfObjA.addOperation(name='setH0')
133 opObj11.addParameter(name='h0', value='-3.3', format='float')
134 #-2.8 5MHZ
135 #-3.3 2MHZ
136
137 ###opObj11 = procUnitConfObjA.addOperation(name='selectChannels')
138 ###opObj11.addParameter(name='channelList', value='[1]',format='intList')
139
140
141 opObj11 = procUnitConfObjA.addOperation(name='selectHeights')
142 opObj11.addParameter(name='minIndex', value='1', format='int')
143 ###opObj11.addParameter(name='maxIndex', value='10000', format='int')
144 opObj11.addParameter(name='maxIndex', value='160', format='int')
145 ## 400 PARA 5MHZ 12KM 2000
146 ## 160 αΉ”ARA 2MHZ 12KM 800
147
113 148 if mode ==0:
114 149 ####################### METODO PULSE PAIR ######################################################################
115 150 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
116 151 opObj11.addParameter(name='n', value=int(n), format='int')#10 VOY A USAR 250 DADO QUE LA VELOCIDAD ES 10 GRADOS
117 152 #opObj11.addParameter(name='removeDC', value=1, format='int')
118 153 ####################### METODO Parametros ######################################################################
119 154 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
120 155 if plot==1:
121 156 opObj11 = procUnitConfObjB.addOperation(name='GenericRTIPlot',optype='external')
122 157 opObj11.addParameter(name='attr_data', value='dataPP_POWER')
123 158 opObj11.addParameter(name='colormap', value='jet')
124 159 opObj11.addParameter(name='xmin', value=xmin)
125 160 opObj11.addParameter(name='xmax', value=xmax)
161 #opObj11.addParameter(name='ymin', value=ymin)
162 #opObj11.addParameter(name='ymax', value=ymax)
126 163 opObj11.addParameter(name='zmin', value=dBmin)
127 164 opObj11.addParameter(name='zmax', value=dBmax)
128 165 opObj11.addParameter(name='save', value=figpath_pp)
129 166 opObj11.addParameter(name='showprofile', value=0)
130 167 opObj11.addParameter(name='save_period', value=10)
131 168
132 169 ####################### METODO ESCRITURA #######################################################################
133 170 if save==1:
134 171 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
135 172 opObj10.addParameter(name='path',value=path_save)
136 173 #opObj10.addParameter(name='mode',value=0)
137 174 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
138 175 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
139 176 opObj10.addParameter(name='dataList',value='dataPP_POWER,dataPP_DOP,utctime',format='list')#,format='list'
140 177 if integration==1:
141 178 V=10
142 179 blocksPerfile=360
143 180 print("* Velocidad del Pedestal:",V)
144 181 tmp_blocksPerfile = 100
145 182 f_a_p= int(tmp_blocksPerfile/V)
146 183
147 184 opObj11 = procUnitConfObjB.addOperation(name='PedestalInformation')
148 185 opObj11.addParameter(name='path_ped', value=path_ped)
149 186 #opObj11.addParameter(name='path_adq', value=path_adq)
150 187 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
151 188 opObj11.addParameter(name='blocksPerfile', value=blocksPerfile, format='int')
152 189 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
153 190 opObj11.addParameter(name='f_a_p', value=f_a_p, format='int')
154 191 opObj11.addParameter(name='online', value='0', format='int')
155 192
156 193 opObj11 = procUnitConfObjB.addOperation(name='Block360')
157 194 opObj11.addParameter(name='n', value='10', format='int')
158 195 opObj11.addParameter(name='mode', value=mode, format='int')
159 196
160 197 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
161 198
162 199 opObj11= procUnitConfObjB.addOperation(name='WeatherPlot',optype='other')
163 200
164 201
165 202 else:
166 203 ####################### METODO SPECTROS ######################################################################
167 204 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
168 205 procUnitConfObjB.addParameter(name='nFFTPoints', value=n, format='int')
169 206 procUnitConfObjB.addParameter(name='nProfiles' , value=n, format='int')
207 if plot_spec==1:
208 '''
209 opObj11 = procUnitConfObjB.addOperation(name='SpectraPlot', optype='external')
210 #.addParameter(name='id', value='2', format='int')
211 opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str')
212 opObj11.addParameter(name='channelList', value='1', format='intlist')
213 #opObj11.addParameter(name='xmin', value=xmin)
214 #opObj11.addParameter(name='xmax', value=xmax)
215 opObj11.addParameter(name='ymin', value=ymin)
216 opObj11.addParameter(name='ymax', value=ymax)
217 opObj11.addParameter(name='zmin', value=dBmin, format='int')
218 opObj11.addParameter(name='zmax', value=dBmax, format='int')
219 opObj11.addParameter(name='save', value=figpath_spec)
220 opObj11.addParameter(name='showprofile', value=0)
221 opObj11.addParameter(name='save_period', value=1)
222 '''
223 opObj11 = procUnitConfObjB.addOperation(name='RTIPlot', optype='external')
224 #.addParameter(name='id', value='2', format='int')
225 opObj11.addParameter(name='wintitle', value='RTIPlot', format='str')
226 opObj11.addParameter(name='channelList', value='1', format='intlist')
227 opObj11.addParameter(name='xmin', value=xmin)
228 opObj11.addParameter(name='xmax', value=xmax)
229 opObj11.addParameter(name='ymin', value=ymin)
230 opObj11.addParameter(name='ymax', value=ymax)
231 opObj11.addParameter(name='zmin', value=dBmin, format='int')
232 opObj11.addParameter(name='zmax', value=dBmax, format='int')
233 opObj11.addParameter(name='save', value=figpath_spec)
234 opObj11.addParameter(name='showprofile', value=0)
235 opObj11.addParameter(name='save_period', value=1)
170 236
171 237 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjB.getId())
172 238 procUnitConfObjC.addOperation(name='SpectralMoments')
173 239 if plot==1:
174 dBmin = '1'
175 dBmax = '65'
176 240 opObj11 = procUnitConfObjC.addOperation(name='PowerPlot',optype='external')
177 241 opObj11.addParameter(name='xmin', value=xmin)
178 242 opObj11.addParameter(name='xmax', value=xmax)
243 opObj11.addParameter(name='ymin', value=ymin)
244 opObj11.addParameter(name='ymax', value=ymax)
179 245 opObj11.addParameter(name='zmin', value=dBmin)
180 246 opObj11.addParameter(name='zmax', value=dBmax)
181 247 opObj11.addParameter(name='save', value=figpath_spec)
182 248 opObj11.addParameter(name='showprofile', value=0)
183 opObj11.addParameter(name='save_period', value=10)
249 opObj11.addParameter(name='save_period', value=1)
184 250
185 251 if save==1:
186 252 opObj10 = procUnitConfObjC.addOperation(name='HDFWriter')
187 253 opObj10.addParameter(name='path',value=path_save)
188 254 #opObj10.addParameter(name='mode',value=0)
189 255 opObj10.addParameter(name='blocksPerFile',value='360',format='int')
190 256 #opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
191 257 opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
192 258 opObj10.addParameter(name='dataList',value='data_pow,data_dop,utctime',format='list')#,format='list'
193 259
194 260 if integration==1:
195 261 V=10
196 262 blocksPerfile=360
197 263 print("* Velocidad del Pedestal:",V)
198 264 tmp_blocksPerfile = 100
199 265 f_a_p= int(tmp_blocksPerfile/V)
200 266
201 267 opObj11 = procUnitConfObjC.addOperation(name='PedestalInformation')
202 268 opObj11.addParameter(name='path_ped', value=path_ped)
203 269 #opObj11.addParameter(name='path_adq', value=path_adq)
204 270 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
205 271 opObj11.addParameter(name='blocksPerfile', value=blocksPerfile, format='int')
206 272 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
207 273 opObj11.addParameter(name='f_a_p', value=f_a_p, format='int')
208 274 opObj11.addParameter(name='online', value='0', format='int')
209 275
210 276 opObj11 = procUnitConfObjC.addOperation(name='Block360')
211 277 opObj11.addParameter(name='n', value='10', format='int')
212 278 opObj11.addParameter(name='mode', value=mode, format='int')
213 279
214 280 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
215 281 opObj11= procUnitConfObjC.addOperation(name='WeatherPlot',optype='other')
216 282 controllerObj.start()
@@ -1,217 +1,269
1 1 # Ing. AVP
2 2 # 06/10/2021
3 3 # ARCHIVO DE LECTURA
4 4 import os, sys
5 5 import datetime
6 6 import time
7 7 from schainpy.controller import Project
8 8 #### NOTA###########################################
9 9 # INPUT :
10 10 # VELOCIDAD PARAMETRO : V = 2Β°/seg
11 11 # MODO PULSE PAIR O MOMENTOS: 0 : Pulse Pair ,1 : Momentos
12 12 ######################################################
13 13 ##### PROCESAMIENTO ##################################
14 14 ##### OJO TENER EN CUENTA EL n= para el Pulse Pair ##
15 15 ##### O EL n= nFFTPoints ###
16 16 ######################################################
17 17 ######## BUSCAMOS EL numero de IPP equivalente 1Β°#####
18 18 ######## Sea V la velocidad del Pedestal en Β°/seg#####
19 19 ######## 1Β° sera Recorrido en un tiempo de 1/V ######
20 20 ######## IPP del Radar 400 useg --> 60 Km ############
21 21 ######## n = 1/(V(Β°/seg)*IPP(Km)) , NUMERO DE IPP ##
22 22 ######## n = 1/(V*IPP) #############################
23 23 ######## VELOCIDAD DEL PEDESTAL ######################
24 24 print("SETUP- RADAR METEOROLOGICO")
25 V = 6
25 26 V = 10
26 mode = 0
27 #path = '/DATA_RM/23/6v'
28 #path = '/DATA_RM/TEST_INTEGRACION_2M'
29 path = '/DATA_RM/WR_20_OCT'
27 mode = 1
28 #--------------------------PATH -----------------------------
29 # path = '/DATA_RM/23/6v'
30 # path = '/DATA_RM/TEST_INTEGRACION_2M'
31 # path = '/DATA_RM/TEST_19OCTUBRE/10MHZ'
32 # path = '/DATA_RM/WR_20_OCT'
33 #### path_ped='/DATA_RM/TEST_PEDESTAL/P20211012-082745'
34 ####path_ped='/DATA_RM/TEST_PEDESTAL/P20211019-192244'
35 path ="/DATA_RM/10"
36 # path = '/DATA_RM/WR_POT_09_1'
37 #path ="/DATA_RM/11"
38 #path ="/DATA_RM/11"
39 #-------------------------PATH-PLOTEO------------------------------------
40 #figpath_pp = "/home/soporte/Pictures/TEST_PP"
41 #figpath_spec = "/home/soporte/Pictures/TEST_MOM"
42 figpath_spec = "/home/soporte/Pictures/ppi"
43 figpath_ppi = "/home/soporte/Pictures/ppi_SPEC_10DIC"
44 figpath_ppi_pp = "/home/soporte/Pictures/ppi_2_10"
45 figpath_pp = "/home/soporte/Pictures/TEST_POT"
46 figpath_pp = "/home/soporte/Pictures/TEST_POT2"
30 47
31 48 #path_ped='/DATA_RM/TEST_PEDESTAL/P20211012-082745'
32 path_ped='/DATA_RM/TEST_PEDESTAL/P20211020-131248'
49 #path_ped='/DATA_RM/TEST_PEDESTAL/P20211020-131248'
50 #path_ped='/DATA_RM/TEST_PEDESTAL/P20211110-171003'
51 #path_ped='/DATA_RM/TEST_PEDESTAL/P20211111-173856'
52 path_ped = "/DATA_RM/TEST_PEDESTAL/P20211110-171003"
33 53
34 figpath_pp = "/home/soporte/Pictures/TEST_PP"
54
55 figpath_pp = "/home/soporte/Pictures/25TEST_PP"
35 56 figpath_mom = "/home/soporte/Pictures/TEST_MOM"
57 #--------------------------OPCIONES-----------------------------------
36 58 plot = 0
37 59 integration = 1
38 save = 0
60 save = 1
39 61 if save == 1:
40 62 if mode==0:
41 63 path_save = '/DATA_RM/TEST_HDF5_PP_23/6v'
42 64 path_save = '/DATA_RM/TEST_HDF5_PP'
43 65 path_save = '/DATA_RM/TEST_HDF5_PP_100'
66 path_save = '/DATA_RM/TEST_EMPTHY'
44 67 else:
45 68 path_save = '/DATA_RM/TEST_HDF5_SPEC_23_V2/6v'
69 path_save = '/DATA_RM/TEST_EMPTHY_SPEC'
70 path_save = '/DATA_RM/LAST_TEST_16_VACIO3'
71 path_save = '/DATA_RM/LAST_TEST_30_360'
46 72
47 73 print("* PATH data ADQ :", path)
48 74 print("* Velocidad Pedestal :",V,"Β°/seg")
49 75 ############################ NRO Perfiles PROCESAMIENTO ###################
50 76 V=V
51 77 IPP=400*1e-6
52 78 n= int(1/(V*IPP))
53 79 print("* n - NRO Perfiles Proc:", n )
54 80 ################################## MODE ###################################
55 81 print("* Modo de Operacion :",mode)
56 82 if mode ==0:
57 83 print("* Met. Seleccionado : Pulse Pair")
58 84 else:
59 85 print("* Met. Momentos : Momentos")
60 86
61 87 ################################## MODE ###################################
62 88 print("* Grabado de datos :",save)
63 89 if save ==1:
64 90 if mode==0:
65 91 ope= "Pulse Pair"
66 92 else:
67 93 ope= "Momentos"
68 94 print("* Path-Save Data -", ope , path_save)
69 95
70 96 print("* Integracion de datos :",integration)
71 97
72 time.sleep(5)
98 time.sleep(3)
73 99 #remotefolder = "/home/wmaster/graficos"
74 100 #######################################################################
75 101 ################# RANGO DE PLOTEO######################################
76 102 dBmin = '1'
77 103 dBmax = '85'
78 xmin = '15'
79 xmax = '15.25'
104 xmin = '17'
105 xmax = '17.25'
80 106 ymin = '0'
81 107 ymax = '600'
82 108 #######################################################################
83 109 ########################FECHA##########################################
84 110 str = datetime.date.today()
85 111 today = str.strftime("%Y/%m/%d")
86 112 str2 = str - datetime.timedelta(days=1)
87 113 yesterday = str2.strftime("%Y/%m/%d")
88 114 #######################################################################
89 115 ########################SIGNAL CHAIN ##################################
90 116 #######################################################################
91 117 desc = "USRP_test"
92 118 filename = "USRP_processing.xml"
93 119 controllerObj = Project()
94 120 controllerObj.setup(id = '191', name='Test_USRP', description=desc)
95 121 #######################################################################
96 122 ######################## UNIDAD DE LECTURA#############################
97 123 #######################################################################
98 124 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRFReader',
99 125 path=path,
100 startDate="2021/01/01",#today,
126 startDate="2021/11/10",#today,
101 127 endDate="2021/12/30",#today,
102 startTime='00:00:00',
128 startTime='17:10:25',
103 129 endTime='23:59:59',
104 130 delay=0,
105 131 #set=0,
106 132 online=0,
107 133 walk=1,
108 134 ippKm = 60)
109 135
110 136 opObj11 = readUnitConfObj.addOperation(name='printInfo')
111 137
112 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc', inputId=readUnitConfObj.getId())
138 procUnitConfObjA = controllerObj.addProcUnit(datatype='VoltageProc',inputId=readUnitConfObj.getId())
139
140
141 #opObj11 = procUnitConfObjA.addOperation(name='setH0')
142 #opObj11.addParameter(name='h0', value='-2.8', format='float')
143
144 opObj11 = procUnitConfObjA.addOperation(name='selectHeights')
145 opObj11.addParameter(name='minIndex', value='1', format='int')
146 # opObj11.addParameter(name='maxIndex', value='10000', format='int')
147 opObj11.addParameter(name='maxIndex', value='400', format='int')
113 148
114 149 if mode ==0:
115 150 ####################### METODO PULSE PAIR ######################################################################
116 151 opObj11 = procUnitConfObjA.addOperation(name='PulsePair', optype='other')
117 152 opObj11.addParameter(name='n', value=int(n), format='int')#10 VOY A USAR 250 DADO QUE LA VELOCIDAD ES 10 GRADOS
118 153 #opObj11.addParameter(name='removeDC', value=1, format='int')
119 154 ####################### METODO Parametros ######################################################################
120 155 procUnitConfObjB= controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjA.getId())
121 156 if plot==1:
122 157 opObj11 = procUnitConfObjB.addOperation(name='GenericRTIPlot',optype='external')
123 158 opObj11.addParameter(name='attr_data', value='dataPP_POW')
124 159 opObj11.addParameter(name='colormap', value='jet')
125 160 opObj11.addParameter(name='xmin', value=xmin)
126 161 opObj11.addParameter(name='xmax', value=xmax)
127 162 opObj11.addParameter(name='zmin', value=dBmin)
128 163 opObj11.addParameter(name='zmax', value=dBmax)
129 164 opObj11.addParameter(name='save', value=figpath_pp)
130 165 opObj11.addParameter(name='showprofile', value=0)
131 166 opObj11.addParameter(name='save_period', value=50)
132 167
133 168 ####################### METODO ESCRITURA #######################################################################
134 if save==1:
135 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
136 opObj10.addParameter(name='path',value=path_save)
137 #opObj10.addParameter(name='mode',value=0)
138 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
139 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
140 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,utctime',format='list')#,format='list'
169
141 170 if integration==1:
142 V=10
143 blocksPerfile=360
171 V=V
172 blocksPerfile=100
144 173 print("* Velocidad del Pedestal:",V)
145 174 tmp_blocksPerfile = 100
146 175 f_a_p= int(tmp_blocksPerfile/V)
147 176
148 177 opObj11 = procUnitConfObjB.addOperation(name='PedestalInformation')
149 178 opObj11.addParameter(name='path_ped', value=path_ped)
150 179 #opObj11.addParameter(name='path_adq', value=path_adq)
151 180 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
152 181 opObj11.addParameter(name='blocksPerfile', value=blocksPerfile, format='int')
153 182 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
154 183 opObj11.addParameter(name='f_a_p', value=f_a_p, format='int')
155 184 opObj11.addParameter(name='online', value='0', format='int')
156
185 '''
157 186 opObj11 = procUnitConfObjB.addOperation(name='Block360')
158 187 opObj11.addParameter(name='n', value='10', format='int')
159 188 opObj11.addParameter(name='mode', value=mode, format='int')
160
189 '''
161 190 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
162
191 '''
163 192 opObj11= procUnitConfObjB.addOperation(name='WeatherPlot',optype='other')
164
193 opObj11.addParameter(name='save', value=figpath_ppi_pp)
194 opObj11.addParameter(name='save_period', value=1)
195 '''
196 if save==1:
197 opObj10 = procUnitConfObjB.addOperation(name='HDFWriter')
198 opObj10.addParameter(name='path',value=path_save)
199 #opObj10.addParameter(name='mode',value=0)
200 opObj10.addParameter(name='blocksPerFile',value='100',format='int')
201 opObj10.addParameter(name='metadataList',value='utctimeInit,timeZone,paramInterval,profileIndex,channelList,heightList,flagDataAsBlock',format='list')
202 opObj10.addParameter(name='dataList',value='dataPP_POW,dataPP_DOP,azimuth,utctime',format='list')#,format='list'
165 203
166 204 else:
167 205 ####################### METODO SPECTROS ######################################################################
168 206 procUnitConfObjB = controllerObj.addProcUnit(datatype='SpectraProc', inputId=procUnitConfObjA.getId())
169 207 procUnitConfObjB.addParameter(name='nFFTPoints', value=n, format='int')
170 208 procUnitConfObjB.addParameter(name='nProfiles' , value=n, format='int')
171 209
172 210 procUnitConfObjC = controllerObj.addProcUnit(datatype='ParametersProc',inputId=procUnitConfObjB.getId())
173 211 procUnitConfObjC.addOperation(name='SpectralMoments')
174 212 if plot==1:
175 213 dBmin = '1'
176 214 dBmax = '65'
177 215 opObj11 = procUnitConfObjC.addOperation(name='PowerPlot',optype='external')
178 216 opObj11.addParameter(name='xmin', value=xmin)
179 217 opObj11.addParameter(name='xmax', value=xmax)
180 218 opObj11.addParameter(name='zmin', value=dBmin)
181 219 opObj11.addParameter(name='zmax', value=dBmax)
182 220 opObj11.addParameter(name='save', value=figpath_mom)
183 221 opObj11.addParameter(name='showprofile', value=0)
184 222 opObj11.addParameter(name='save_period', value=100)
185 223
186 if save==1:
187 opObj10 = procUnitConfObjC.addOperation(name='HDFWriter')
188 opObj10.addParameter(name='path',value=path_save)
189 #opObj10.addParameter(name='mode',value=0)
190 opObj10.addParameter(name='blocksPerFile',value='360',format='int')
191 #opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
192 opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
193 opObj10.addParameter(name='dataList',value='data_pow,data_dop,utctime',format='list')#,format='list'
194 224
195 225 if integration==1:
196 V=10
197 blocksPerfile=360
226 V=V
227 blocksPerfile=100
198 228 print("* Velocidad del Pedestal:",V)
199 229 tmp_blocksPerfile = 100
200 230 f_a_p= int(tmp_blocksPerfile/V)
201 231
202 232 opObj11 = procUnitConfObjC.addOperation(name='PedestalInformation')
203 233 opObj11.addParameter(name='path_ped', value=path_ped)
204 234 #opObj11.addParameter(name='path_adq', value=path_adq)
205 235 opObj11.addParameter(name='t_Interval_p', value='0.01', format='float')
206 236 opObj11.addParameter(name='blocksPerfile', value=blocksPerfile, format='int')
207 237 opObj11.addParameter(name='n_Muestras_p', value='100', format='float')
208 238 opObj11.addParameter(name='f_a_p', value=f_a_p, format='int')
209 239 opObj11.addParameter(name='online', value='0', format='int')
210 240
211 241 opObj11 = procUnitConfObjC.addOperation(name='Block360')
212 opObj11.addParameter(name='n', value='10', format='int')
242 opObj11.addParameter(name='n', value='1', format='int')
213 243 opObj11.addParameter(name='mode', value=mode, format='int')
244 '''
245 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
246 opObj11= procUnitConfObjC.addOperation(name='WeatherPlot',optype='other')
247 opObj11.addParameter(name='save', value=figpath_ppi)
248 opObj11.addParameter(name='save_period', value=1)
249 '''
250
251 if save==1:
252 opObj10 = procUnitConfObjC.addOperation(name='HDFWriter')
253 opObj10.addParameter(name='path',value=path_save)
254 opObj10.addParameter(name='mode',value='weather')
255 opObj10.addParameter(name='blocksPerFile',value='360',format='int')
256 #opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
257 opObj10.addParameter(name='metadataList',value='utctimeInit,heightList,nIncohInt,nCohInt,nProfiles,channelList',format='list')#profileIndex
258 #opObj10.addParameter(name='dataList',value='data_pow,data_dop,azimuth,utctime',format='list')#,format='list'
259 opObj10.addParameter(name='dataList',value='data_360,data_azi,utctime',format='list')#,format='list'
260
261
214 262
263 '''
215 264 # este bloque funciona bien con divisores de 360 no olvidar 0 10 20 30 40 60 90 120 180
216 265 opObj11= procUnitConfObjC.addOperation(name='WeatherPlot',optype='other')
266 opObj11.addParameter(name='save', value=figpath_ppi)
267 opObj11.addParameter(name='save_period', value=1)
268 '''
217 269 controllerObj.start()
@@ -1,119 +1,122
1 1 # Ing-AlexanderValdez
2 2 # Monitoreo de Pedestal
3 3
4 4 ############## IMPORTA LIBRERIAS ###################
5 5 import os,numpy,h5py
6 6 import sys,time
7 7 import matplotlib.pyplot as plt
8 8 ####################################################
9 9 #################################################################
10 10 # LA FECHA 21-10-20 CORRESPONDE A LAS PRUEBAS DEL DIA MIERCOLES
11 11 # 1:15:51 pm hasta 3:49:32 pm
12 12 #################################################################
13 13
14 14 #path_ped = '/DATA_RM/TEST_PEDESTAL/P20211012-082745'
15 15 path_ped = '/DATA_RM/TEST_PEDESTAL/P20211020-131248'
16 path_ped = '/DATA_RM/TEST_PEDESTAL/P20211110-171003'
17 path_ped = '/DATA_RM/TEST_PEDESTAL/P20211111-173856'
18 #path_ped = '/DATA_RM/TEST_PEDESTAL/P20211111-173409'
16 19 # Metodo para verificar numero
17 20 def isNumber(str):
18 21 try:
19 22 float(str)
20 23 return True
21 24 except:
22 25 return False
23 26 # Metodo para extraer el arreglo
24 27 def getDatavaluefromDirFilename(path,file,value):
25 28 dir_file= path+"/"+file
26 29 fp = h5py.File(dir_file,'r')
27 30 array = fp['Data'].get(value)[()]
28 31 fp.close()
29 32 return array
30 33
31 34 # LISTA COMPLETA DE ARCHIVOS HDF5 Pedestal
32 35 LIST= sorted(os.listdir(path_ped))
33 36 m=len(LIST)
34 37 print("TOTAL DE ARCHIVOS DE PEDESTAL:",m)
35 38 # Contadores temporales
36 39 k= 0
37 40 l= 0
38 41 t= 0
39 42 # Marca de tiempo temporal
40 43 time_ = numpy.zeros([m])
41 44 # creacion de
42 45 for i in range(m):
43 46 print("order:",i)
44 47 tmp_azi_pos = getDatavaluefromDirFilename(path=path_ped,file=LIST[i],value="azi_pos")
45 48 tmp_ele_pos = getDatavaluefromDirFilename(path=path_ped,file=LIST[i],value="ele_pos")
46 49 tmp_azi_vel = getDatavaluefromDirFilename(path=path_ped,file=LIST[i],value="azi_vel")
47 50 tmp_ele_vel = getDatavaluefromDirFilename(path=path_ped,file=LIST[i],value="azi_vel")# nuevo :D
48 51
49 52 time_[i] = getDatavaluefromDirFilename(path=path_ped,file=LIST[i],value="utc")
50 53
51 54 k=k +tmp_azi_pos.shape[0]
52 55 l=l +tmp_ele_pos.shape[0]
53 56 t=t +tmp_azi_vel.shape[0]
54 57
55 58 print("TOTAL DE MUESTRAS, ARCHIVOS X100:",k)
56 59 time.sleep(5)
57 60 ######CREACION DE ARREGLOS CANTIDAD DE VALORES POR MUESTRA#################
58 61 azi_pos = numpy.zeros([k])
59 62 ele_pos = numpy.zeros([l])
60 63 time_azi_pos= numpy.zeros([k])
61 64 # Contadores temporales
62 65 p=0
63 66 r=0
64 67 z=0
65 68 # VARIABLES TMP para almacenar azimuth, elevacion y tiempo
66 69
67 70 #for filename in sorted(os.listdir(path_ped)):
68 71 # CONDICION POR LEER EN TIEMPO REAL NO OFFLINE
69 72
70 73 for filename in LIST:
71 74 tmp_azi_pos = getDatavaluefromDirFilename(path=path_ped,file=filename,value="azi_pos")
72 75 tmp_ele_pos = getDatavaluefromDirFilename(path=path_ped,file=filename,value="ele_pos")
73 76 # CONDICION POR LEER EN TIEMPO REAL NO OFFLINE
74 77
75 78 if z==(m-1):
76 79 tmp_azi_time=numpy.arange(time_[z],time_[z]+1,1/(tmp_azi_pos.shape[0]))
77 80 else:
78 81 tmp_azi_time=numpy.arange(time_[z],time_[z+1],(time_[z+1]-time_[z])/(tmp_azi_pos.shape[0]))
79 82
80 83 print(filename,time_[z])
81 84 print(z,tmp_azi_pos.shape[0])
82 85
83 86 i=0
84 87 for i in range(tmp_azi_pos.shape[0]):
85 88 index=p+i
86 89 azi_pos[index]=tmp_azi_pos[i]
87 90 time_azi_pos[index]=tmp_azi_time[i]
88 91 p=p+tmp_azi_pos.shape[0]
89 92 i=0
90 93 for i in range(tmp_ele_pos.shape[0]):
91 94 index=r+i
92 95 ele_pos[index]=tmp_ele_pos[i]
93 96 r=r+tmp_ele_pos.shape[0]
94 97
95 98
96 99 z+=1
97 100
98 101
99 102 ######## GRAFIQUEMOS Y VEAMOS LOS DATOS DEL Pedestal
100 103 fig, ax = plt.subplots(figsize=(16,8))
101 104 print(time_azi_pos.shape)
102 105 print(azi_pos.shape)
103 106 t=numpy.arange(time_azi_pos.shape[0])*0.01/(60.0)
104 107 plt.plot(t,azi_pos,label='AZIMUTH_POS',color='blue')
105 108
106 109 # AQUI ESTOY ADICIONANDO LA POSICION EN elevaciont=numpy.arange(len(ele_pos))*0.01/60.0
107 110 t=numpy.arange(len(ele_pos))*0.01/60.0
108 111 plt.plot(t,ele_pos,label='ELEVATION_POS',color='red')#*10
109 112
110 113 #ax.set_xlim(0, 9)
111 ax.set_ylim(-5, 400)
114 ax.set_ylim(-5, 20)
112 115 plt.ylabel("Azimuth Position")
113 116 plt.xlabel("Muestra")
114 117 plt.title('Azimuth Position vs Muestra ', fontsize=20)
115 118 axes = plt.gca()
116 119 axes.yaxis.grid()
117 120 plt.xticks(fontsize=16)
118 121 plt.yticks(fontsize=16)
119 122 plt.show()
General Comments 0
You need to be logged in to leave comments. Login now