##// END OF EJS Templates
Antes del Branch a ind_plt_chs
J Gomez -
r962:d347d1acfad0 merge
parent child
Show More
@@ -0,0 +1,34
1 from schainpy.controller import Project
2
3 desc = "A schain project"
4
5 controller = Project()
6 controller.setup(id='191', name="project", description=desc)
7
8 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
9 path="/home/nanosat/schain",
10 startDate="1970/01/01",
11 endDate="2017/12/31",
12 startTime="00:00:00",
13 endTime="23:59:59",
14 online=0,
15 verbose=1,
16 walk=1,
17 )
18
19 procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId())
20
21 opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other')
22 opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist')
23
24 opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other')
25 opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
26 opObj11.addParameter(name='showprofile', value='0', format='int')
27 opObj11.addParameter(name='xmin', value='0', format='int')
28 opObj11.addParameter(name='xmax', value='24', format='int')
29 opObj11.addParameter(name='figpath', value="/home/nanosat/schain/figs", format='str')
30 opObj11.addParameter(name='wr_period', value='5', format='int')
31 opObj11.addParameter(name='exp_code', value='22', format='int')
32
33
34 controller.start()
@@ -0,0 +1,1
1 <Project description="A schain project" id="191" name="project"><ReadUnit datatype="Voltage" id="1911" inputId="0" name="VoltageReader"><Operation id="19111" name="run" priority="1" type="self"><Parameter format="str" id="191111" name="datatype" value="VoltageReader" /><Parameter format="str" id="191112" name="path" value="/home/nanosat/schain" /><Parameter format="date" id="191113" name="startDate" value="1970/01/01" /><Parameter format="date" id="191114" name="endDate" value="2017/12/31" /><Parameter format="time" id="191115" name="startTime" value="00:00:00" /><Parameter format="time" id="191116" name="endTime" value="23:59:59" /><Parameter format="int" id="191118" name="walk" value="1" /><Parameter format="int" id="191119" name="verbose" value="1" /><Parameter format="int" id="191120" name="online" value="0" /></Operation></ReadUnit><ProcUnit datatype="Voltage" id="1912" inputId="1911" name="VoltageProc"><Operation id="19121" name="run" priority="1" type="self" /><Operation id="19122" name="ProfileSelector" priority="2" type="other"><Parameter format="intlist" id="191221" name="profileRangeList" value="120,183" /></Operation><Operation id="19123" name="RTIPlot" priority="3" type="plotter"><Parameter format="str" id="191231" name="wintitle" value="Jicamarca Radio Observatory" /><Parameter format="int" id="191232" name="showprofile" value="0" /><Parameter format="int" id="191233" name="xmin" value="0" /><Parameter format="int" id="191234" name="xmax" value="24" /><Parameter format="str" id="191235" name="figpath" value="/home/nanosat/schain/figs" /><Parameter format="int" id="191236" name="wr_period" value="5" /><Parameter format="int" id="191237" name="exp_code" value="22" /></Operation></ProcUnit></Project> No newline at end of file
@@ -0,0 +1,9
1 # schaing
2
3 Command Line Interface for SIGNAL CHAIN - jro
4
5 # Usage
6
7 To use it:
8
9 $ schain-cli --help
1 NO CONTENT: new file 100644
@@ -0,0 +1,34
1 from schainpy.controller import Project
2
3 desc = "asdasddsad"
4
5 controller = Project()
6 controller.setup(id='191', name="asdasd", description=desc)
7
8 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
9 path="/home/nanosat/schain/schain-cli",
10 startDate="1970/01/01",
11 endDate="2017/12/31",
12 startTime="00:00:00",
13 endTime="23:59:59",
14 online=0,
15 verbose=1,
16 walk=1,
17 )
18
19 procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId())
20
21 opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other')
22 opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist')
23
24 opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other')
25 opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
26 opObj11.addParameter(name='showprofile', value='0', format='int')
27 opObj11.addParameter(name='xmin', value='0', format='int')
28 opObj11.addParameter(name='xmax', value='24', format='int')
29 opObj11.addParameter(name='figpath', value="/home/nanosat/schain/schain-cli/figs", format='str')
30 opObj11.addParameter(name='wr_period', value='5', format='int')
31 opObj11.addParameter(name='exp_code', value='22', format='int')
32
33
34 controller.start()
@@ -0,0 +1,188
1 import click
2 import schainpy
3 import subprocess
4 import os
5 import sys
6 import glob
7 save_stdout = sys.stdout
8 sys.stdout = open('trash', 'w')
9 from multiprocessing import cpu_count
10 from schaincli import templates
11 from schainpy import controller_api
12 from schainpy.model import Operation, ProcessingUnit
13 from schainpy.utils import log
14 from importlib import import_module
15 from pydoc import locate
16 from fuzzywuzzy import process
17 sys.stdout = save_stdout
18
19
20 def print_version(ctx, param, value):
21 if not value or ctx.resilient_parsing:
22 return
23 click.echo(schainpy.__version__)
24 ctx.exit()
25
26
27 cliLogger = log.makelogger('schain cli')
28 PREFIX = 'experiment'
29
30
31 @click.command()
32 @click.option('--version', '-v', is_flag=True, callback=print_version, help='SChain version', type=str)
33 @click.option('--xml', '-x', default=None, help='run an XML file', type=click.Path(exists=True, resolve_path=True))
34 @click.argument('command', default='run', required=True)
35 @click.argument('nextcommand', default=None, required=False, type=str)
36 def main(command, nextcommand, version, xml):
37 """COMMAND LINE INTERFACE FOR SIGNAL CHAIN - JICAMARCA RADIO OBSERVATORY \n
38 Available commands.\n
39 --xml: runs a schain XML generated file\n
40 run: runs any python script starting 'experiment_'\n
41 generate: generates a template schain script\n
42 search: return avilable operations, procs or arguments of the give operation/proc\n"""
43 if xml is not None:
44 runFromXML(xml)
45 elif command == 'generate':
46 generate()
47 elif command == 'test':
48 test()
49 elif command == 'run':
50 runschain(nextcommand)
51 elif command == 'search':
52 search(nextcommand)
53 else:
54 log.error('Command {} is not defined'.format(command))
55
56 def check_module(possible, instance):
57 def check(x):
58 try:
59 instancia = locate('schainpy.model.{}'.format(x))
60 return isinstance(instancia(), instance)
61 except Exception as e:
62 return False
63 clean = clean_modules(possible)
64 return [x for x in clean if check(x)]
65
66
67 def clean_modules(module):
68 noEndsUnder = [x for x in module if not x.endswith('__')]
69 noStartUnder = [x for x in noEndsUnder if not x.startswith('__')]
70 noFullUpper = [x for x in noStartUnder if not x.isupper()]
71 return noFullUpper
72
73
74 def search(nextcommand):
75 if nextcommand is None:
76 log.error('There is no Operation/ProcessingUnit to search')
77 elif nextcommand == 'procs':
78 module = dir(import_module('schainpy.model'))
79 procs = check_module(module, ProcessingUnit)
80 try:
81 procs.remove('ProcessingUnit')
82 except Exception as e:
83 pass
84 log.success('Current ProcessingUnits are:\n\033[1m{}\033[0m'.format('\n'.join(procs)))
85
86 elif nextcommand == 'operations':
87 module = dir(import_module('schainpy.model'))
88 noProcs = [x for x in module if not x.endswith('Proc')]
89 operations = check_module(noProcs, Operation)
90 try:
91 operations.remove('Operation')
92 except Exception as e:
93 pass
94 log.success('Current Operations are:\n\033[1m{}\033[0m'.format('\n'.join(operations)))
95 else:
96 try:
97 module = locate('schainpy.model.{}'.format(nextcommand))
98 args = module().getAllowedArgs()
99 log.warning('Use this feature with caution. It may not return all the allowed arguments')
100 try:
101 args.remove('self')
102 except Exception as e:
103 pass
104 try:
105 args.remove('dataOut')
106 except Exception as e:
107 pass
108 if len(args) == 0:
109 log.success('{} has no arguments'.format(nextcommand))
110 else:
111 log.success('Showing arguments of {} are:\n\033[1m{}\033[0m'.format(nextcommand, '\n'.join(args)))
112 except Exception as e:
113 log.error('Module {} does not exists'.format(nextcommand))
114 allModules = dir(import_module('schainpy.model'))
115 module = check_module(allModules, Operation)
116 module.extend(check_module(allModules, ProcessingUnit))
117 similar = process.extractOne(nextcommand, module)[0]
118 log.success('Searching {} instead'.format(similar))
119 search(similar)
120
121
122 def runschain(nextcommand):
123 if nextcommand is None:
124 currentfiles = glob.glob('./{}_*.py'.format(PREFIX))
125 numberfiles = len(currentfiles)
126 if numberfiles > 1:
127 log.error('There is more than one file to run')
128 elif numberfiles == 1:
129 subprocess.call(['python ' + currentfiles[0]], shell=True)
130 else:
131 log.error('There is no file to run')
132 else:
133 try:
134 subprocess.call(['python ' + nextcommand], shell=True)
135 except Exception as e:
136 log.error("I cannot run the file. Does it exists?")
137
138
139 def basicInputs():
140 inputs = {}
141 inputs['desc'] = click.prompt('Enter a description', default="A schain project", type=str)
142 inputs['name'] = click.prompt('Name of the project', default="project", type=str)
143 inputs['path'] = click.prompt('Data path', default=os.getcwd(), type=click.Path(exists=True, resolve_path=True))
144 inputs['startDate'] = click.prompt('Start date', default='1970/01/01', type=str)
145 inputs['endDate'] = click.prompt('End date', default='2017/12/31', type=str)
146 inputs['startHour'] = click.prompt('Start hour', default='00:00:00', type=str)
147 inputs['endHour'] = click.prompt('End hour', default='23:59:59', type=str)
148 inputs['figpath'] = inputs['path'] + '/figs'
149 return inputs
150
151
152 def generate():
153 inputs = basicInputs()
154 inputs['multiprocess'] = click.confirm('Is this a multiprocess script?')
155 if inputs['multiprocess']:
156 inputs['nProcess'] = click.prompt('How many process?', default=cpu_count(), type=int)
157 current = templates.multiprocess.format(**inputs)
158 else:
159 current = templates.basic.format(**inputs)
160 scriptname = '{}_{}.py'.format(PREFIX, inputs['name'])
161 script = open(scriptname, 'w')
162 try:
163 script.write(current)
164 log.success('Script {} generated'.format(scriptname))
165 except Exception as e:
166 log.error('I cannot create the file. Do you have writing permissions?')
167
168
169 def test():
170 log.warning('testing')
171
172
173 def runFromXML(filename):
174 controller = controller_api.ControllerThread()
175 if not controller.readXml(filename):
176 return
177
178 plotterObj = controller.useExternalPlotter()
179
180 controller.start()
181 plotterObj.start()
182
183 cliLogger("Finishing all processes")
184
185 controller.join(5)
186
187 cliLogger("End of script")
188 return
@@ -0,0 +1,75
1 basic = '''from schainpy.controller import Project
2
3 desc = "{desc}"
4
5 controller = Project()
6 controller.setup(id='191', name="{name}", description=desc)
7
8 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
9 path="{path}",
10 startDate="{startDate}",
11 endDate="{endDate}",
12 startTime="{startHour}",
13 endTime="{endHour}",
14 online=0,
15 verbose=1,
16 walk=1,
17 )
18
19 procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId())
20
21 opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other')
22 opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist')
23
24 opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other')
25 opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
26 opObj11.addParameter(name='showprofile', value='0', format='int')
27 opObj11.addParameter(name='xmin', value='0', format='int')
28 opObj11.addParameter(name='xmax', value='24', format='int')
29 opObj11.addParameter(name='figpath', value="{figpath}", format='str')
30 opObj11.addParameter(name='wr_period', value='5', format='int')
31 opObj11.addParameter(name='exp_code', value='22', format='int')
32
33
34 controller.start()
35 '''
36
37 multiprocess = '''from schainpy.controller import Project, multiSchain
38
39 desc = "{desc}"
40
41 def fiber(cursor, skip, q, day):
42 controller = Project()
43 controller.setup(id='191', name="{name}", description=desc)
44
45 readUnitConf = controller.addReadUnit(datatype='SpectraReader',
46 path="{path}",
47 startDate=day,
48 endDate=day,
49 startTime="{startHour}",
50 endTime="{endHour}",
51 online=0,
52 queue=q,
53 cursor=cursor,
54 skip=skip,
55 verbose=1,
56 walk=1,
57 )
58
59 procUnitConf1 = controller.addProcUnit(datatype='Spectra', inputId=readUnitConf.getId())
60
61 procUnitConf2 = controller.addProcUnit(datatype='ParametersProc', inputId=readUnitConf.getId())
62 opObj11 = procUnitConf2.addOperation(name='SpectralMoments', optype='other')
63
64 opObj12 = procUnitConf2.addOperation(name='PublishData', optype='other')
65 opObj12.addParameter(name='zeromq', value=1, format='int')
66 opObj12.addParameter(name='verbose', value=0, format='bool')
67
68 controller.start()
69
70
71 if __name__ == '__main__':
72 multiSchain(fiber, nProcess={nProcess}, startDate="{startDate}", endDate="{endDate}")
73
74
75 '''
@@ -0,0 +1,1
1
@@ -0,0 +1,29
1 import pytest
2 from click.testing import CliRunner
3 from schaincli import cli
4
5
6 @pytest.fixture
7 def runner():
8 return CliRunner()
9
10
11 def test_cli(runner):
12 result = runner.invoke(cli.main)
13 assert result.exit_code == 0
14 assert not result.exception
15 assert result.output.strip() == 'Hello, world.'
16
17
18 def test_cli_with_option(runner):
19 result = runner.invoke(cli.main, ['--as-cowboy'])
20 assert not result.exception
21 assert result.exit_code == 0
22 assert result.output.strip() == 'Howdy, world.'
23
24
25 def test_cli_with_arg(runner):
26 result = runner.invoke(cli.main, ['Jicamarca'])
27 assert result.exit_code == 0
28 assert not result.exception
29 assert result.output.strip() == 'Hello, Jicamarca.'
@@ -0,0 +1,34
1 from schainpy.controller import Project
2
3 desc = "A schain project"
4
5 controller = Project()
6 controller.setup(id='191', name="project", description=desc)
7
8 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
9 path="/home/nanosat/schain/schainpy",
10 startDate="1970/01/01",
11 endDate="2017/12/31",
12 startTime="00:00:00",
13 endTime="23:59:59",
14 online=0,
15 verbose=1,
16 walk=1,
17 )
18
19 procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId())
20
21 opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other')
22 opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist')
23
24 opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other')
25 opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
26 opObj11.addParameter(name='showprofile', value='0', format='int')
27 opObj11.addParameter(name='xmin', value='0', format='int')
28 opObj11.addParameter(name='xmax', value='24', format='int')
29 opObj11.addParameter(name='figpath', value="/home/nanosat/schain/schainpy/figs", format='str')
30 opObj11.addParameter(name='wr_period', value='5', format='int')
31 opObj11.addParameter(name='exp_code', value='22', format='int')
32
33
34 controller.start()
@@ -0,0 +1,33
1 from schainpy.controller import Project
2
3 desc = "A schain project"
4
5 controller = Project()
6 controller.setup(id='191', name="project", description=desc)
7
8 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
9 path="/home/nanosat/schain/schainpy/scripts",
10 startDate="1970/01/01",
11 endDate="2017/12/31",
12 startTime="00:00:00",
13 endTime="23:59:59",
14 online=0,
15 walk=1,
16 )
17
18 procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId())
19
20 opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other')
21 opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist')
22
23 opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other')
24 opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
25 opObj11.addParameter(name='showprofile', value='0', format='int')
26 opObj11.addParameter(name='xmin', value='0', format='int')
27 opObj11.addParameter(name='xmax', value='24', format='int')
28 opObj11.addParameter(name='figpath', value="/home/nanosat/schain/schainpy/scripts/figs", format='str')
29 opObj11.addParameter(name='wr_period', value='5', format='int')
30 opObj11.addParameter(name='exp_code', value='22', format='int')
31
32
33 controller.start()
1 NO CONTENT: new file 100644
@@ -0,0 +1,45
1 """.
2 SCHAINPY - LOG
3 Simple helper for log standarization
4 Usage:
5 from schainpy.utils import log
6 log.error('A kitten died beacuse of you')
7 log.warning('You are doing it wrong but what the heck, I'll allow it)
8 log.succes('YOU ROCK!')
9 To create your own logger inside your class do it like this:
10 from schainpy.utils import log
11 awesomeLogger = log.makelogger("never gonna", bg="red", fg="white")
12 awesomeLogger('give you up')
13 which will look like this:
14 [NEVER GONNA] - give you up
15 with color red as background and white as foreground.
16 """
17
18 import click
19
20
21 def warning(message):
22 click.echo(click.style('[WARNING] - ' + message, fg='yellow'))
23 pass
24
25
26 def error(message):
27 click.echo(click.style('[ERROR] - ' + message, fg='red'))
28 pass
29
30
31 def success(message):
32 click.echo(click.style(message, fg='green'))
33 pass
34
35
36 def log(message):
37 click.echo('[LOG] - ' + message)
38 pass
39
40
41 def makelogger(topic, bg='reset', fg='reset'):
42 def func(message):
43 click.echo(click.style('[{}] - '.format(topic.upper()) + message,
44 bg=bg, fg=fg))
45 return func
@@ -0,0 +1,1
1 You should install "digital_rf_hdf5" module if you want to read USRP data
@@ -1,1323 +1,1323
1 1 '''
2 2 Created on September , 2012
3 3 @author:
4 4 '''
5 5
6 6 import sys
7 7 import ast
8 8 import datetime
9 9 import traceback
10 10 import math
11 11 import time
12 12 from multiprocessing import Process, Queue, cpu_count
13 13
14 14 import schainpy
15 15 import schainpy.admin
16 16
17 17 from xml.etree.ElementTree import ElementTree, Element, SubElement, tostring
18 18 from xml.dom import minidom
19 19
20 20 from schainpy.model import *
21 21 from time import sleep
22 22
23 23 def prettify(elem):
24 24 """Return a pretty-printed XML string for the Element.
25 25 """
26 26 rough_string = tostring(elem, 'utf-8')
27 27 reparsed = minidom.parseString(rough_string)
28 28 return reparsed.toprettyxml(indent=" ")
29 29
30 30 def multiSchain(child, nProcess=cpu_count(), startDate=None, endDate=None, by_day=False):
31 31 skip = 0
32 32 cursor = 0
33 33 nFiles = None
34 34 processes = []
35 35 dt1 = datetime.datetime.strptime(startDate, '%Y/%m/%d')
36 36 dt2 = datetime.datetime.strptime(endDate, '%Y/%m/%d')
37 37 days = (dt2 - dt1).days
38 38
39 39 for day in range(days+1):
40 40 skip = 0
41 41 cursor = 0
42 42 q = Queue()
43 43 processes = []
44 44 dt = (dt1 + datetime.timedelta(day)).strftime('%Y/%m/%d')
45 45 firstProcess = Process(target=child, args=(cursor, skip, q, dt))
46 46 firstProcess.start()
47 47 if by_day:
48 48 continue
49 49 nFiles = q.get()
50 50 firstProcess.terminate()
51 51 skip = int(math.ceil(nFiles/nProcess))
52 52 while True:
53 53 processes.append(Process(target=child, args=(cursor, skip, q, dt)))
54 54 processes[cursor].start()
55 55 if nFiles < cursor*skip:
56 56 break
57 57 cursor += 1
58 58
59 59 def beforeExit(exctype, value, trace):
60 60 for process in processes:
61 61 process.terminate()
62 62 process.join()
63 63 print traceback.print_tb(trace)
64 64
65 65 sys.excepthook = beforeExit
66 66
67 67 for process in processes:
68 68 process.join()
69 69 process.terminate()
70 70
71 71 time.sleep(3)
72 72
73 73
74 74 class ParameterConf():
75 75
76 76 id = None
77 77 name = None
78 78 value = None
79 79 format = None
80 80
81 81 __formated_value = None
82 82
83 83 ELEMENTNAME = 'Parameter'
84 84
85 85 def __init__(self):
86 86
87 87 self.format = 'str'
88 88
89 89 def getElementName(self):
90 90
91 91 return self.ELEMENTNAME
92 92
93 93 def getValue(self):
94 94
95 95 value = self.value
96 96 format = self.format
97 97
98 98 if self.__formated_value != None:
99 99
100 100 return self.__formated_value
101 101
102 102 if format == 'obj':
103 103 return value
104 104
105 105 if format == 'str':
106 106 self.__formated_value = str(value)
107 107 return self.__formated_value
108 108
109 109 if value == '':
110 110 raise ValueError, "%s: This parameter value is empty" %self.name
111 111
112 112 if format == 'list':
113 113 strList = value.split(',')
114 114
115 115 self.__formated_value = strList
116 116
117 117 return self.__formated_value
118 118
119 119 if format == 'intlist':
120 120 """
121 121 Example:
122 122 value = (0,1,2)
123 123 """
124 124
125 125 new_value = ast.literal_eval(value)
126 126
127 127 if type(new_value) not in (tuple, list):
128 128 new_value = [int(new_value)]
129 129
130 130 self.__formated_value = new_value
131 131
132 132 return self.__formated_value
133 133
134 134 if format == 'floatlist':
135 135 """
136 136 Example:
137 137 value = (0.5, 1.4, 2.7)
138 138 """
139 139
140 140 new_value = ast.literal_eval(value)
141 141
142 142 if type(new_value) not in (tuple, list):
143 143 new_value = [float(new_value)]
144 144
145 145 self.__formated_value = new_value
146 146
147 147 return self.__formated_value
148 148
149 149 if format == 'date':
150 150 strList = value.split('/')
151 151 intList = [int(x) for x in strList]
152 152 date = datetime.date(intList[0], intList[1], intList[2])
153 153
154 154 self.__formated_value = date
155 155
156 156 return self.__formated_value
157 157
158 158 if format == 'time':
159 159 strList = value.split(':')
160 160 intList = [int(x) for x in strList]
161 161 time = datetime.time(intList[0], intList[1], intList[2])
162 162
163 163 self.__formated_value = time
164 164
165 165 return self.__formated_value
166 166
167 167 if format == 'pairslist':
168 168 """
169 169 Example:
170 170 value = (0,1),(1,2)
171 171 """
172 172
173 173 new_value = ast.literal_eval(value)
174 174
175 175 if type(new_value) not in (tuple, list):
176 176 raise ValueError, "%s has to be a tuple or list of pairs" %value
177 177
178 178 if type(new_value[0]) not in (tuple, list):
179 179 if len(new_value) != 2:
180 180 raise ValueError, "%s has to be a tuple or list of pairs" %value
181 181 new_value = [new_value]
182 182
183 183 for thisPair in new_value:
184 184 if len(thisPair) != 2:
185 185 raise ValueError, "%s has to be a tuple or list of pairs" %value
186 186
187 187 self.__formated_value = new_value
188 188
189 189 return self.__formated_value
190 190
191 191 if format == 'multilist':
192 192 """
193 193 Example:
194 194 value = (0,1,2),(3,4,5)
195 195 """
196 196 multiList = ast.literal_eval(value)
197 197
198 198 if type(multiList[0]) == int:
199 199 multiList = ast.literal_eval("(" + value + ")")
200 200
201 201 self.__formated_value = multiList
202 202
203 203 return self.__formated_value
204 204
205 205 if format == 'bool':
206 206 value = int(value)
207 207
208 208 if format == 'int':
209 209 value = float(value)
210 210
211 211 format_func = eval(format)
212 212
213 213 self.__formated_value = format_func(value)
214 214
215 215 return self.__formated_value
216 216
217 217 def updateId(self, new_id):
218 218
219 219 self.id = str(new_id)
220 220
221 221 def setup(self, id, name, value, format='str'):
222 222
223 223 self.id = str(id)
224 224 self.name = name
225 225 if format == 'obj':
226 226 self.value = value
227 227 else:
228 228 self.value = str(value)
229 229 self.format = str.lower(format)
230 230
231 231 self.getValue()
232 232
233 233 return 1
234 234
235 235 def update(self, name, value, format='str'):
236 236
237 237 self.name = name
238 238 self.value = str(value)
239 239 self.format = format
240 240
241 241 def makeXml(self, opElement):
242 242 if self.name not in ('queue',):
243 243 parmElement = SubElement(opElement, self.ELEMENTNAME)
244 244 parmElement.set('id', str(self.id))
245 245 parmElement.set('name', self.name)
246 246 parmElement.set('value', self.value)
247 247 parmElement.set('format', self.format)
248 248
249 249 def readXml(self, parmElement):
250 250
251 251 self.id = parmElement.get('id')
252 252 self.name = parmElement.get('name')
253 253 self.value = parmElement.get('value')
254 254 self.format = str.lower(parmElement.get('format'))
255 255
256 256 #Compatible with old signal chain version
257 257 if self.format == 'int' and self.name == 'idfigure':
258 258 self.name = 'id'
259 259
260 260 def printattr(self):
261 261
262 262 print "Parameter[%s]: name = %s, value = %s, format = %s" %(self.id, self.name, self.value, self.format)
263 263
264 264 class OperationConf():
265 265
266 266 id = None
267 267 name = None
268 268 priority = None
269 269 type = None
270 270
271 271 parmConfObjList = []
272 272
273 273 ELEMENTNAME = 'Operation'
274 274
275 275 def __init__(self):
276 276
277 277 self.id = '0'
278 278 self.name = None
279 279 self.priority = None
280 280 self.type = 'self'
281 281
282 282
283 283 def __getNewId(self):
284 284
285 285 return int(self.id)*10 + len(self.parmConfObjList) + 1
286 286
287 287 def updateId(self, new_id):
288 288
289 289 self.id = str(new_id)
290 290
291 291 n = 1
292 292 for parmObj in self.parmConfObjList:
293 293
294 294 idParm = str(int(new_id)*10 + n)
295 295 parmObj.updateId(idParm)
296 296
297 297 n += 1
298 298
299 299 def getElementName(self):
300 300
301 301 return self.ELEMENTNAME
302 302
303 303 def getParameterObjList(self):
304 304
305 305 return self.parmConfObjList
306 306
307 307 def getParameterObj(self, parameterName):
308 308
309 309 for parmConfObj in self.parmConfObjList:
310 310
311 311 if parmConfObj.name != parameterName:
312 312 continue
313 313
314 314 return parmConfObj
315 315
316 316 return None
317 317
318 318 def getParameterObjfromValue(self, parameterValue):
319 319
320 320 for parmConfObj in self.parmConfObjList:
321 321
322 322 if parmConfObj.getValue() != parameterValue:
323 323 continue
324 324
325 325 return parmConfObj.getValue()
326 326
327 327 return None
328 328
329 329 def getParameterValue(self, parameterName):
330 330
331 331 parameterObj = self.getParameterObj(parameterName)
332 332
333 333 # if not parameterObj:
334 334 # return None
335 335
336 336 value = parameterObj.getValue()
337 337
338 338 return value
339 339
340 340
341 341 def getKwargs(self):
342 342
343 343 kwargs = {}
344 344
345 345 for parmConfObj in self.parmConfObjList:
346 346 if self.name == 'run' and parmConfObj.name == 'datatype':
347 347 continue
348 348
349 349 kwargs[parmConfObj.name] = parmConfObj.getValue()
350 350
351 351 return kwargs
352 352
353 353 def setup(self, id, name, priority, type):
354 354
355 355 self.id = str(id)
356 356 self.name = name
357 357 self.type = type
358 358 self.priority = priority
359 359
360 360 self.parmConfObjList = []
361 361
362 362 def removeParameters(self):
363 363
364 364 for obj in self.parmConfObjList:
365 365 del obj
366 366
367 367 self.parmConfObjList = []
368 368
369 369 def addParameter(self, name, value, format='str'):
370 370
371 371 id = self.__getNewId()
372 372
373 373 parmConfObj = ParameterConf()
374 374 if not parmConfObj.setup(id, name, value, format):
375 375 return None
376 376
377 377 self.parmConfObjList.append(parmConfObj)
378 378
379 379 return parmConfObj
380 380
381 381 def changeParameter(self, name, value, format='str'):
382 382
383 383 parmConfObj = self.getParameterObj(name)
384 384 parmConfObj.update(name, value, format)
385 385
386 386 return parmConfObj
387 387
388 388 def makeXml(self, procUnitElement):
389 389
390 390 opElement = SubElement(procUnitElement, self.ELEMENTNAME)
391 391 opElement.set('id', str(self.id))
392 392 opElement.set('name', self.name)
393 393 opElement.set('type', self.type)
394 394 opElement.set('priority', str(self.priority))
395 395
396 396 for parmConfObj in self.parmConfObjList:
397 397 parmConfObj.makeXml(opElement)
398 398
399 399 def readXml(self, opElement):
400 400
401 401 self.id = opElement.get('id')
402 402 self.name = opElement.get('name')
403 403 self.type = opElement.get('type')
404 404 self.priority = opElement.get('priority')
405 405
406 406 #Compatible with old signal chain version
407 407 #Use of 'run' method instead 'init'
408 408 if self.type == 'self' and self.name == 'init':
409 409 self.name = 'run'
410 410
411 411 self.parmConfObjList = []
412 412
413 413 parmElementList = opElement.iter(ParameterConf().getElementName())
414 414
415 415 for parmElement in parmElementList:
416 416 parmConfObj = ParameterConf()
417 417 parmConfObj.readXml(parmElement)
418 418
419 419 #Compatible with old signal chain version
420 420 #If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER
421 421 if self.type != 'self' and self.name == 'Plot':
422 422 if parmConfObj.format == 'str' and parmConfObj.name == 'type':
423 423 self.name = parmConfObj.value
424 424 continue
425 425
426 426 self.parmConfObjList.append(parmConfObj)
427 427
428 428 def printattr(self):
429 429
430 430 print "%s[%s]: name = %s, type = %s, priority = %s" %(self.ELEMENTNAME,
431 431 self.id,
432 432 self.name,
433 433 self.type,
434 434 self.priority)
435 435
436 436 for parmConfObj in self.parmConfObjList:
437 437 parmConfObj.printattr()
438 438
439 439 def createObject(self, plotter_queue=None):
440 440
441 441
442 442 if self.type == 'self':
443 443 raise ValueError, "This operation type cannot be created"
444 444
445 445 if self.type == 'plotter':
446 446 #Plotter(plotter_name)
447 447 if not plotter_queue:
448 448 raise ValueError, "plotter_queue is not defined. Use:\nmyProject = Project()\nmyProject.setPlotterQueue(plotter_queue)"
449 449
450 450 opObj = Plotter(self.name, plotter_queue)
451 451
452 452 if self.type == 'external' or self.type == 'other':
453 453
454 454 className = eval(self.name)
455 455 kwargs = self.getKwargs()
456 456
457 457 opObj = className(**kwargs)
458 458
459 459 return opObj
460 460
461 461
462 462 class ProcUnitConf():
463 463
464 464 id = None
465 465 name = None
466 466 datatype = None
467 467 inputId = None
468 468 parentId = None
469 469
470 470 opConfObjList = []
471 471
472 472 procUnitObj = None
473 473 opObjList = []
474 474
475 475 ELEMENTNAME = 'ProcUnit'
476 476
477 477 def __init__(self):
478 478
479 479 self.id = None
480 480 self.datatype = None
481 481 self.name = None
482 482 self.inputId = None
483 483
484 484 self.opConfObjList = []
485 485
486 486 self.procUnitObj = None
487 487 self.opObjDict = {}
488 488
489 489 def __getPriority(self):
490 490
491 491 return len(self.opConfObjList)+1
492 492
493 493 def __getNewId(self):
494 494
495 495 return int(self.id)*10 + len(self.opConfObjList) + 1
496 496
497 497 def getElementName(self):
498 498
499 499 return self.ELEMENTNAME
500 500
501 501 def getId(self):
502 502
503 503 return self.id
504 504
505 505 def updateId(self, new_id, parentId=parentId):
506 506
507 507
508 508 new_id = int(parentId)*10 + (int(self.id) % 10)
509 509 new_inputId = int(parentId)*10 + (int(self.inputId) % 10)
510 510
511 511 #If this proc unit has not inputs
512 512 if self.inputId == '0':
513 513 new_inputId = 0
514 514
515 515 n = 1
516 516 for opConfObj in self.opConfObjList:
517 517
518 518 idOp = str(int(new_id)*10 + n)
519 519 opConfObj.updateId(idOp)
520 520
521 521 n += 1
522 522
523 523 self.parentId = str(parentId)
524 524 self.id = str(new_id)
525 525 self.inputId = str(new_inputId)
526 526
527 527
528 528 def getInputId(self):
529 529
530 530 return self.inputId
531 531
532 532 def getOperationObjList(self):
533 533
534 534 return self.opConfObjList
535 535
536 536 def getOperationObj(self, name=None):
537 537
538 538 for opConfObj in self.opConfObjList:
539 539
540 540 if opConfObj.name != name:
541 541 continue
542 542
543 543 return opConfObj
544 544
545 545 return None
546 546
547 547 def getOpObjfromParamValue(self, value=None):
548 548
549 549 for opConfObj in self.opConfObjList:
550 550 if opConfObj.getParameterObjfromValue(parameterValue=value) != value:
551 551 continue
552 552 return opConfObj
553 553 return None
554 554
555 555 def getProcUnitObj(self):
556 556
557 557 return self.procUnitObj
558 558
559 559 def setup(self, id, name, datatype, inputId, parentId=None):
560 560
561 561 #Compatible with old signal chain version
562 562 if datatype==None and name==None:
563 563 raise ValueError, "datatype or name should be defined"
564 564
565 565 if name==None:
566 566 if 'Proc' in datatype:
567 567 name = datatype
568 568 else:
569 569 name = '%sProc' %(datatype)
570 570
571 571 if datatype==None:
572 572 datatype = name.replace('Proc','')
573 573
574 574 self.id = str(id)
575 575 self.name = name
576 576 self.datatype = datatype
577 577 self.inputId = inputId
578 578 self.parentId = parentId
579 579
580 580 self.opConfObjList = []
581 581
582 582 self.addOperation(name='run', optype='self')
583 583
584 584 def removeOperations(self):
585 585
586 586 for obj in self.opConfObjList:
587 587 del obj
588 588
589 589 self.opConfObjList = []
590 590 self.addOperation(name='run')
591 591
592 592 def addParameter(self, **kwargs):
593 593 '''
594 594 Add parameters to "run" operation
595 595 '''
596 596 opObj = self.opConfObjList[0]
597 597
598 598 opObj.addParameter(**kwargs)
599 599
600 600 return opObj
601 601
602 602 def addOperation(self, name, optype='self'):
603 603
604 604 id = self.__getNewId()
605 605 priority = self.__getPriority()
606 606
607 607 opConfObj = OperationConf()
608 608 opConfObj.setup(id, name=name, priority=priority, type=optype)
609 609
610 610 self.opConfObjList.append(opConfObj)
611 611
612 612 return opConfObj
613 613
614 614 def makeXml(self, projectElement):
615 615
616 616 procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
617 617 procUnitElement.set('id', str(self.id))
618 618 procUnitElement.set('name', self.name)
619 619 procUnitElement.set('datatype', self.datatype)
620 620 procUnitElement.set('inputId', str(self.inputId))
621 621
622 622 for opConfObj in self.opConfObjList:
623 623 opConfObj.makeXml(procUnitElement)
624 624
625 625 def readXml(self, upElement):
626 626
627 627 self.id = upElement.get('id')
628 628 self.name = upElement.get('name')
629 629 self.datatype = upElement.get('datatype')
630 630 self.inputId = upElement.get('inputId')
631 631
632 632 if self.ELEMENTNAME == "ReadUnit":
633 633 self.datatype = self.datatype.replace("Reader", "")
634 634
635 635 if self.ELEMENTNAME == "ProcUnit":
636 636 self.datatype = self.datatype.replace("Proc", "")
637 637
638 638 if self.inputId == 'None':
639 639 self.inputId = '0'
640 640
641 641 self.opConfObjList = []
642 642
643 643 opElementList = upElement.iter(OperationConf().getElementName())
644 644
645 645 for opElement in opElementList:
646 646 opConfObj = OperationConf()
647 647 opConfObj.readXml(opElement)
648 648 self.opConfObjList.append(opConfObj)
649 649
650 650 def printattr(self):
651 651
652 652 print "%s[%s]: name = %s, datatype = %s, inputId = %s" %(self.ELEMENTNAME,
653 653 self.id,
654 654 self.name,
655 655 self.datatype,
656 656 self.inputId)
657 657
658 658 for opConfObj in self.opConfObjList:
659 659 opConfObj.printattr()
660 660
661 661
662 662 def getKwargs(self):
663 663
664 664 opObj = self.opConfObjList[0]
665 665 kwargs = opObj.getKwargs()
666 666
667 667 return kwargs
668 668
669 669 def createObjects(self, plotter_queue=None):
670 670
671 671 className = eval(self.name)
672 672 kwargs = self.getKwargs()
673 673 procUnitObj = className(**kwargs)
674 674
675 675 for opConfObj in self.opConfObjList:
676 676
677 677 if opConfObj.type=='self' and self.name=='run':
678 678 continue
679 679 elif opConfObj.type=='self':
680 680 procUnitObj.addOperationKwargs(opConfObj.id, **opConfObj.getKwargs())
681 681 continue
682 682
683 683 opObj = opConfObj.createObject(plotter_queue)
684 684
685 685 self.opObjDict[opConfObj.id] = opObj
686 686
687 687 procUnitObj.addOperation(opObj, opConfObj.id)
688 688
689 689 self.procUnitObj = procUnitObj
690 690
691 691 return procUnitObj
692 692
693 693 def run(self):
694 694
695 695 is_ok = False
696 696
697 697 for opConfObj in self.opConfObjList:
698 698
699 699 kwargs = {}
700 700 for parmConfObj in opConfObj.getParameterObjList():
701 701 if opConfObj.name == 'run' and parmConfObj.name == 'datatype':
702 702 continue
703 703
704 704 kwargs[parmConfObj.name] = parmConfObj.getValue()
705 705
706 706 #ini = time.time()
707 707
708 708 #print "\tRunning the '%s' operation with %s" %(opConfObj.name, opConfObj.id)
709 709 sts = self.procUnitObj.call(opType = opConfObj.type,
710 710 opName = opConfObj.name,
711 711 opId = opConfObj.id,
712 712 )
713 713
714 714 # total_time = time.time() - ini
715 715 #
716 716 # if total_time > 0.002:
717 717 # print "%s::%s took %f seconds" %(self.name, opConfObj.name, total_time)
718 718
719 719 is_ok = is_ok or sts
720 720
721 721 return is_ok
722 722
723 723 def close(self):
724 724
725 725 for opConfObj in self.opConfObjList:
726 726 if opConfObj.type == 'self':
727 727 continue
728 728
729 729 opObj = self.procUnitObj.getOperationObj(opConfObj.id)
730 730 opObj.close()
731 731
732 732 self.procUnitObj.close()
733 733
734 734 return
735 735
736 736 class ReadUnitConf(ProcUnitConf):
737 737
738 738 path = None
739 739 startDate = None
740 740 endDate = None
741 741 startTime = None
742 742 endTime = None
743 743
744 744 ELEMENTNAME = 'ReadUnit'
745 745
746 746 def __init__(self):
747 747
748 748 self.id = None
749 749 self.datatype = None
750 750 self.name = None
751 751 self.inputId = None
752 752
753 753 self.parentId = None
754 754
755 755 self.opConfObjList = []
756 756 self.opObjList = []
757 757
758 758 def getElementName(self):
759 759
760 760 return self.ELEMENTNAME
761 761
762 762 def setup(self, id, name, datatype, path, startDate="", endDate="", startTime="", endTime="", parentId=None, queue=None, **kwargs):
763 763
764 764 #Compatible with old signal chain version
765 765 if datatype==None and name==None:
766 766 raise ValueError, "datatype or name should be defined"
767 767
768 768 if name==None:
769 769 if 'Reader' in datatype:
770 770 name = datatype
771 771 else:
772 772 name = '%sReader' %(datatype)
773 773
774 774 if datatype==None:
775 775 datatype = name.replace('Reader','')
776 776
777 777 self.id = id
778 778 self.name = name
779 779 self.datatype = datatype
780 780
781 781 self.path = os.path.abspath(path)
782 782 self.startDate = startDate
783 783 self.endDate = endDate
784 784 self.startTime = startTime
785 785 self.endTime = endTime
786 786
787 787 self.inputId = '0'
788 788 self.parentId = parentId
789 789 self.queue = queue
790 790 self.addRunOperation(**kwargs)
791 791
792 792 def update(self, datatype, path, startDate, endDate, startTime, endTime, parentId=None, name=None, **kwargs):
793 793
794 794 #Compatible with old signal chain version
795 795 if datatype==None and name==None:
796 796 raise ValueError, "datatype or name should be defined"
797 797
798 798 if name==None:
799 799 if 'Reader' in datatype:
800 800 name = datatype
801 801 else:
802 802 name = '%sReader' %(datatype)
803 803
804 804 if datatype==None:
805 805 datatype = name.replace('Reader','')
806 806
807 807 self.datatype = datatype
808 808 self.name = name
809 809 self.path = path
810 810 self.startDate = startDate
811 811 self.endDate = endDate
812 812 self.startTime = startTime
813 813 self.endTime = endTime
814 814
815 815 self.inputId = '0'
816 816 self.parentId = parentId
817 817
818 818 self.updateRunOperation(**kwargs)
819 819
820 820 def removeOperations(self):
821 821
822 822 for obj in self.opConfObjList:
823 823 del obj
824 824
825 825 self.opConfObjList = []
826 826
827 827 def addRunOperation(self, **kwargs):
828 828
829 829 opObj = self.addOperation(name = 'run', optype = 'self')
830 830
831 831 opObj.addParameter(name='datatype' , value=self.datatype, format='str')
832 832 opObj.addParameter(name='path' , value=self.path, format='str')
833 833 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
834 834 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
835 835 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
836 836 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
837 837 opObj.addParameter(name='queue' , value=self.queue, format='obj')
838 838
839 839 for key, value in kwargs.items():
840 840 opObj.addParameter(name=key, value=value, format=type(value).__name__)
841 841
842 842 return opObj
843 843
844 844 def updateRunOperation(self, **kwargs):
845 845
846 846 opObj = self.getOperationObj(name = 'run')
847 847 opObj.removeParameters()
848 848
849 849 opObj.addParameter(name='datatype' , value=self.datatype, format='str')
850 850 opObj.addParameter(name='path' , value=self.path, format='str')
851 851 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
852 852 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
853 853 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
854 854 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
855 855
856 856 for key, value in kwargs.items():
857 857 opObj.addParameter(name=key, value=value, format=type(value).__name__)
858 858
859 859 return opObj
860 860
861 861 # def makeXml(self, projectElement):
862 862 #
863 863 # procUnitElement = SubElement(projectElement, self.ELEMENTNAME)
864 864 # procUnitElement.set('id', str(self.id))
865 865 # procUnitElement.set('name', self.name)
866 866 # procUnitElement.set('datatype', self.datatype)
867 867 # procUnitElement.set('inputId', str(self.inputId))
868 868 #
869 869 # for opConfObj in self.opConfObjList:
870 870 # opConfObj.makeXml(procUnitElement)
871 871
872 872 def readXml(self, upElement):
873 873
874 874 self.id = upElement.get('id')
875 875 self.name = upElement.get('name')
876 876 self.datatype = upElement.get('datatype')
877 877 self.inputId = upElement.get('inputId')
878 878
879 879 if self.ELEMENTNAME == "ReadUnit":
880 880 self.datatype = self.datatype.replace("Reader", "")
881 881
882 882 if self.inputId == 'None':
883 883 self.inputId = '0'
884 884
885 885 self.opConfObjList = []
886 886
887 887 opElementList = upElement.iter(OperationConf().getElementName())
888 888
889 889 for opElement in opElementList:
890 890 opConfObj = OperationConf()
891 891 opConfObj.readXml(opElement)
892 892 self.opConfObjList.append(opConfObj)
893 893
894 894 if opConfObj.name == 'run':
895 895 self.path = opConfObj.getParameterValue('path')
896 896 self.startDate = opConfObj.getParameterValue('startDate')
897 897 self.endDate = opConfObj.getParameterValue('endDate')
898 898 self.startTime = opConfObj.getParameterValue('startTime')
899 899 self.endTime = opConfObj.getParameterValue('endTime')
900 900
901 901 class Project():
902 902
903 903 id = None
904 904 name = None
905 905 description = None
906 906 filename = None
907 907
908 908 procUnitConfObjDict = None
909 909
910 910 ELEMENTNAME = 'Project'
911 911
912 912 plotterQueue = None
913 913
914 914 def __init__(self, plotter_queue=None):
915 915
916 916 self.id = None
917 917 self.name = None
918 918 self.description = None
919 919
920 920 self.plotterQueue = plotter_queue
921 921
922 922 self.procUnitConfObjDict = {}
923 923
924 924 def __getNewId(self):
925 925
926 926 idList = self.procUnitConfObjDict.keys()
927 927
928 928 id = int(self.id)*10
929 929
930 930 while True:
931 931 id += 1
932 932
933 933 if str(id) in idList:
934 934 continue
935 935
936 936 break
937 937
938 938 return str(id)
939 939
940 940 def getElementName(self):
941 941
942 942 return self.ELEMENTNAME
943 943
944 944 def getId(self):
945 945
946 946 return self.id
947 947
948 948 def updateId(self, new_id):
949 949
950 950 self.id = str(new_id)
951 951
952 952 keyList = self.procUnitConfObjDict.keys()
953 953 keyList.sort()
954 954
955 955 n = 1
956 956 newProcUnitConfObjDict = {}
957 957
958 958 for procKey in keyList:
959 959
960 960 procUnitConfObj = self.procUnitConfObjDict[procKey]
961 961 idProcUnit = str(int(self.id)*10 + n)
962 962 procUnitConfObj.updateId(idProcUnit, parentId = self.id)
963 963
964 964 newProcUnitConfObjDict[idProcUnit] = procUnitConfObj
965 965 n += 1
966 966
967 967 self.procUnitConfObjDict = newProcUnitConfObjDict
968 968
969 969 def setup(self, id, name, description):
970 970
971 971 self.id = str(id)
972 972 self.name = name
973 973 self.description = description
974 974
975 975 def update(self, name, description):
976 976
977 977 self.name = name
978 978 self.description = description
979 979
980 980 def addReadUnit(self, id=None, datatype=None, name=None, **kwargs):
981 981
982 982 if id is None:
983 983 idReadUnit = self.__getNewId()
984 984 else:
985 985 idReadUnit = str(id)
986 986
987 987 readUnitConfObj = ReadUnitConf()
988 988 readUnitConfObj.setup(idReadUnit, name, datatype, parentId=self.id, **kwargs)
989 989
990 990 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
991 991
992 992 return readUnitConfObj
993 993
994 994 def addProcUnit(self, inputId='0', datatype=None, name=None):
995 995
996 996 idProcUnit = self.__getNewId()
997 997
998 998 procUnitConfObj = ProcUnitConf()
999 999 procUnitConfObj.setup(idProcUnit, name, datatype, inputId, parentId=self.id)
1000 1000
1001 1001 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1002 1002
1003 1003 return procUnitConfObj
1004 1004
1005 1005 def removeProcUnit(self, id):
1006 1006
1007 1007 if id in self.procUnitConfObjDict.keys():
1008 1008 self.procUnitConfObjDict.pop(id)
1009 1009
1010 1010 def getReadUnitId(self):
1011 1011
1012 1012 readUnitConfObj = self.getReadUnitObj()
1013 1013
1014 1014 return readUnitConfObj.id
1015 1015
1016 1016 def getReadUnitObj(self):
1017 1017
1018 1018 for obj in self.procUnitConfObjDict.values():
1019 1019 if obj.getElementName() == "ReadUnit":
1020 1020 return obj
1021 1021
1022 1022 return None
1023 1023
1024 1024 def getProcUnitObj(self, id=None, name=None):
1025 1025
1026 1026 if id != None:
1027 1027 return self.procUnitConfObjDict[id]
1028 1028
1029 1029 if name != None:
1030 1030 return self.getProcUnitObjByName(name)
1031 1031
1032 1032 return None
1033 1033
1034 1034 def getProcUnitObjByName(self, name):
1035 1035
1036 1036 for obj in self.procUnitConfObjDict.values():
1037 1037 if obj.name == name:
1038 1038 return obj
1039 1039
1040 1040 return None
1041 1041
1042 1042 def procUnitItems(self):
1043 1043
1044 1044 return self.procUnitConfObjDict.items()
1045 1045
1046 1046 def makeXml(self):
1047 1047
1048 1048 projectElement = Element('Project')
1049 1049 projectElement.set('id', str(self.id))
1050 1050 projectElement.set('name', self.name)
1051 1051 projectElement.set('description', self.description)
1052 1052
1053 1053 for procUnitConfObj in self.procUnitConfObjDict.values():
1054 1054 procUnitConfObj.makeXml(projectElement)
1055 1055
1056 1056 self.projectElement = projectElement
1057 1057
1058 1058 def writeXml(self, filename=None):
1059 1059
1060 1060 if filename == None:
1061 1061 if self.filename:
1062 1062 filename = self.filename
1063 1063 else:
1064 1064 filename = "schain.xml"
1065 1065
1066 1066 if not filename:
1067 1067 print "filename has not been defined. Use setFilename(filename) for do it."
1068 1068 return 0
1069 1069
1070 1070 abs_file = os.path.abspath(filename)
1071 1071
1072 1072 if not os.access(os.path.dirname(abs_file), os.W_OK):
1073 1073 print "No write permission on %s" %os.path.dirname(abs_file)
1074 1074 return 0
1075 1075
1076 1076 if os.path.isfile(abs_file) and not(os.access(abs_file, os.W_OK)):
1077 1077 print "File %s already exists and it could not be overwriten" %abs_file
1078 1078 return 0
1079 1079
1080 1080 self.makeXml()
1081 1081
1082 1082 ElementTree(self.projectElement).write(abs_file, method='xml')
1083 1083
1084 1084 self.filename = abs_file
1085 1085
1086 1086 return 1
1087 1087
1088 1088 def readXml(self, filename = None):
1089 1089
1090 1090 if not filename:
1091 1091 print "filename is not defined"
1092 1092 return 0
1093 1093
1094 1094 abs_file = os.path.abspath(filename)
1095 1095
1096 1096 if not os.path.isfile(abs_file):
1097 1097 print "%s file does not exist" %abs_file
1098 1098 return 0
1099 1099
1100 1100 self.projectElement = None
1101 1101 self.procUnitConfObjDict = {}
1102 1102
1103 1103 try:
1104 1104 self.projectElement = ElementTree().parse(abs_file)
1105 1105 except:
1106 1106 print "Error reading %s, verify file format" %filename
1107 1107 return 0
1108 1108
1109 1109 self.project = self.projectElement.tag
1110 1110
1111 1111 self.id = self.projectElement.get('id')
1112 1112 self.name = self.projectElement.get('name')
1113 1113 self.description = self.projectElement.get('description')
1114 1114
1115 1115 readUnitElementList = self.projectElement.iter(ReadUnitConf().getElementName())
1116 1116
1117 1117 for readUnitElement in readUnitElementList:
1118 1118 readUnitConfObj = ReadUnitConf()
1119 1119 readUnitConfObj.readXml(readUnitElement)
1120 1120
1121 1121 if readUnitConfObj.parentId == None:
1122 1122 readUnitConfObj.parentId = self.id
1123 1123
1124 1124 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
1125 1125
1126 1126 procUnitElementList = self.projectElement.iter(ProcUnitConf().getElementName())
1127 1127
1128 1128 for procUnitElement in procUnitElementList:
1129 1129 procUnitConfObj = ProcUnitConf()
1130 1130 procUnitConfObj.readXml(procUnitElement)
1131 1131
1132 1132 if procUnitConfObj.parentId == None:
1133 1133 procUnitConfObj.parentId = self.id
1134 1134
1135 1135 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
1136 1136
1137 1137 self.filename = abs_file
1138 1138
1139 1139 return 1
1140 1140
1141 1141 def printattr(self):
1142 1142
1143 1143 print "Project[%s]: name = %s, description = %s" %(self.id,
1144 1144 self.name,
1145 1145 self.description)
1146 1146
1147 1147 for procUnitConfObj in self.procUnitConfObjDict.values():
1148 1148 procUnitConfObj.printattr()
1149 1149
1150 1150 def createObjects(self):
1151 1151
1152 1152 for procUnitConfObj in self.procUnitConfObjDict.values():
1153 1153 procUnitConfObj.createObjects(self.plotterQueue)
1154 1154
1155 1155 def __connect(self, objIN, thisObj):
1156 1156
1157 1157 thisObj.setInput(objIN.getOutputObj())
1158 1158
1159 1159 def connectObjects(self):
1160 1160
1161 1161 for thisPUConfObj in self.procUnitConfObjDict.values():
1162 1162
1163 1163 inputId = thisPUConfObj.getInputId()
1164 1164
1165 1165 if int(inputId) == 0:
1166 1166 continue
1167 1167
1168 1168 #Get input object
1169 1169 puConfINObj = self.procUnitConfObjDict[inputId]
1170 1170 puObjIN = puConfINObj.getProcUnitObj()
1171 1171
1172 1172 #Get current object
1173 1173 thisPUObj = thisPUConfObj.getProcUnitObj()
1174 1174
1175 1175 self.__connect(puObjIN, thisPUObj)
1176 1176
1177 1177 def __handleError(self, procUnitConfObj, send_email=True):
1178 1178
1179 1179 import socket
1180 1180
1181 1181 err = traceback.format_exception(sys.exc_info()[0],
1182 1182 sys.exc_info()[1],
1183 1183 sys.exc_info()[2])
1184 1184
1185 1185 print "***** Error occurred in %s *****" %(procUnitConfObj.name)
1186 1186 print "***** %s" %err[-1]
1187 1187
1188 1188 message = "".join(err)
1189 1189
1190 1190 sys.stderr.write(message)
1191 1191
1192 1192 if not send_email:
1193 1193 return
1194 1194
1195 1195 subject = "SChain v%s: Error running %s\n" %(schainpy.__version__, procUnitConfObj.name)
1196 1196
1197 1197 subtitle = "%s: %s\n" %(procUnitConfObj.getElementName() ,procUnitConfObj.name)
1198 1198 subtitle += "Hostname: %s\n" %socket.gethostbyname(socket.gethostname())
1199 1199 subtitle += "Working directory: %s\n" %os.path.abspath("./")
1200 1200 subtitle += "Configuration file: %s\n" %self.filename
1201 1201 subtitle += "Time: %s\n" %str(datetime.datetime.now())
1202 1202
1203 1203 readUnitConfObj = self.getReadUnitObj()
1204 1204 if readUnitConfObj:
1205 1205 subtitle += "\nInput parameters:\n"
1206 1206 subtitle += "[Data path = %s]\n" %readUnitConfObj.path
1207 1207 subtitle += "[Data type = %s]\n" %readUnitConfObj.datatype
1208 1208 subtitle += "[Start date = %s]\n" %readUnitConfObj.startDate
1209 1209 subtitle += "[End date = %s]\n" %readUnitConfObj.endDate
1210 1210 subtitle += "[Start time = %s]\n" %readUnitConfObj.startTime
1211 1211 subtitle += "[End time = %s]\n" %readUnitConfObj.endTime
1212 1212
1213 1213 adminObj = schainpy.admin.SchainNotify()
1214 1214 adminObj.sendAlert(message=message,
1215 1215 subject=subject,
1216 1216 subtitle=subtitle,
1217 1217 filename=self.filename)
1218 1218
1219 1219 def isPaused(self):
1220 1220 return 0
1221 1221
1222 1222 def isStopped(self):
1223 1223 return 0
1224 1224
1225 1225 def runController(self):
1226 1226 """
1227 1227 returns 0 when this process has been stopped, 1 otherwise
1228 1228 """
1229 1229
1230 1230 if self.isPaused():
1231 1231 print "Process suspended"
1232 1232
1233 1233 while True:
1234 1234 sleep(0.1)
1235 1235
1236 1236 if not self.isPaused():
1237 1237 break
1238 1238
1239 1239 if self.isStopped():
1240 1240 break
1241 1241
1242 1242 print "Process reinitialized"
1243 1243
1244 1244 if self.isStopped():
1245 1245 print "Process stopped"
1246 1246 return 0
1247 1247
1248 1248 return 1
1249 1249
1250 1250 def setFilename(self, filename):
1251 1251
1252 1252 self.filename = filename
1253 1253
1254 1254 def setPlotterQueue(self, plotter_queue):
1255 1255
1256 1256 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1257 1257
1258 1258 def getPlotterQueue(self):
1259 1259
1260 1260 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1261 1261
1262 1262 def useExternalPlotter(self):
1263 1263
1264 1264 raise NotImplementedError, "Use schainpy.controller_api.ControllerThread instead Project class"
1265 1265
1266 1266 def run(self):
1267 1267
1268 1268 print
1269 1269 print "*"*60
1270 1270 print " Starting SIGNAL CHAIN PROCESSING v%s " %schainpy.__version__
1271 1271 print "*"*60
1272 1272 print
1273 1273
1274 1274 keyList = self.procUnitConfObjDict.keys()
1275 1275 keyList.sort()
1276 1276
1277 1277 while(True):
1278 1278
1279 1279 is_ok = False
1280 1280
1281 1281 for procKey in keyList:
1282 1282 # print "Running the '%s' process with %s" %(procUnitConfObj.name, procUnitConfObj.id)
1283 1283
1284 1284 procUnitConfObj = self.procUnitConfObjDict[procKey]
1285 1285
1286 1286 try:
1287 1287 sts = procUnitConfObj.run()
1288 1288 is_ok = is_ok or sts
1289 1289 except KeyboardInterrupt:
1290 1290 is_ok = False
1291 1291 break
1292 1292 except ValueError, e:
1293 1293 sleep(0.5)
1294 1294 self.__handleError(procUnitConfObj, send_email=True)
1295 1295 is_ok = False
1296 1296 break
1297 1297 except:
1298 1298 sleep(0.5)
1299 1299 self.__handleError(procUnitConfObj)
1300 1300 is_ok = False
1301 1301 break
1302 1302
1303 1303 #If every process unit finished so end process
1304 1304 if not(is_ok):
1305 1305 # print "Every process unit have finished"
1306 1306 break
1307 1307
1308 1308 if not self.runController():
1309 1309 break
1310 1310
1311 1311 #Closing every process
1312 1312 for procKey in keyList:
1313 1313 procUnitConfObj = self.procUnitConfObjDict[procKey]
1314 1314 procUnitConfObj.close()
1315 1315
1316 1316 print "Process finished"
1317 1317
1318 def start(self):
1318 def start(self, filename=None):
1319 1319
1320 self.writeXml()
1320 self.writeXml(filename)
1321 1321 self.createObjects()
1322 1322 self.connectObjects()
1323 1323 self.run()
1 NO CONTENT: modified file
@@ -1,1225 +1,1220
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import copy
8 8 import numpy
9 9 import datetime
10 10
11 11 from jroheaderIO import SystemHeader, RadarControllerHeader
12 12 from schainpy import cSchain
13 13
14 14
15 15 def getNumpyDtype(dataTypeCode):
16 16
17 17 if dataTypeCode == 0:
18 18 numpyDtype = numpy.dtype([('real','<i1'),('imag','<i1')])
19 19 elif dataTypeCode == 1:
20 20 numpyDtype = numpy.dtype([('real','<i2'),('imag','<i2')])
21 21 elif dataTypeCode == 2:
22 22 numpyDtype = numpy.dtype([('real','<i4'),('imag','<i4')])
23 23 elif dataTypeCode == 3:
24 24 numpyDtype = numpy.dtype([('real','<i8'),('imag','<i8')])
25 25 elif dataTypeCode == 4:
26 26 numpyDtype = numpy.dtype([('real','<f4'),('imag','<f4')])
27 27 elif dataTypeCode == 5:
28 28 numpyDtype = numpy.dtype([('real','<f8'),('imag','<f8')])
29 29 else:
30 30 raise ValueError, 'dataTypeCode was not defined'
31 31
32 32 return numpyDtype
33 33
34 34 def getDataTypeCode(numpyDtype):
35 35
36 36 if numpyDtype == numpy.dtype([('real','<i1'),('imag','<i1')]):
37 37 datatype = 0
38 38 elif numpyDtype == numpy.dtype([('real','<i2'),('imag','<i2')]):
39 39 datatype = 1
40 40 elif numpyDtype == numpy.dtype([('real','<i4'),('imag','<i4')]):
41 41 datatype = 2
42 42 elif numpyDtype == numpy.dtype([('real','<i8'),('imag','<i8')]):
43 43 datatype = 3
44 44 elif numpyDtype == numpy.dtype([('real','<f4'),('imag','<f4')]):
45 45 datatype = 4
46 46 elif numpyDtype == numpy.dtype([('real','<f8'),('imag','<f8')]):
47 47 datatype = 5
48 48 else:
49 49 datatype = None
50 50
51 51 return datatype
52 52
53 53 def hildebrand_sekhon(data, navg):
54 54 """
55 55 This method is for the objective determination of the noise level in Doppler spectra. This
56 56 implementation technique is based on the fact that the standard deviation of the spectral
57 57 densities is equal to the mean spectral density for white Gaussian noise
58 58
59 59 Inputs:
60 60 Data : heights
61 61 navg : numbers of averages
62 62
63 63 Return:
64 64 -1 : any error
65 65 anoise : noise's level
66 66 """
67 67
68 68 sortdata = numpy.sort(data, axis=None)
69 69 # lenOfData = len(sortdata)
70 70 # nums_min = lenOfData*0.2
71 71 #
72 72 # if nums_min <= 5:
73 73 # nums_min = 5
74 74 #
75 75 # sump = 0.
76 76 #
77 77 # sumq = 0.
78 78 #
79 79 # j = 0
80 80 #
81 81 # cont = 1
82 82 #
83 83 # while((cont==1)and(j<lenOfData)):
84 84 #
85 85 # sump += sortdata[j]
86 86 #
87 87 # sumq += sortdata[j]**2
88 88 #
89 89 # if j > nums_min:
90 90 # rtest = float(j)/(j-1) + 1.0/navg
91 91 # if ((sumq*j) > (rtest*sump**2)):
92 92 # j = j - 1
93 93 # sump = sump - sortdata[j]
94 94 # sumq = sumq - sortdata[j]**2
95 95 # cont = 0
96 96 #
97 97 # j += 1
98 98 #
99 99 # lnoise = sump /j
100 100 #
101 101 # return lnoise
102 102
103 103 return cSchain.hildebrand_sekhon(sortdata, navg)
104 104
105 105
106 106 class Beam:
107 107
108 108 def __init__(self):
109 109 self.codeList = []
110 110 self.azimuthList = []
111 111 self.zenithList = []
112 112
113 113 class GenericData(object):
114 114
115 115 flagNoData = True
116 116
117 def __init__(self):
118
119 raise NotImplementedError
120
121 117 def copy(self, inputObj=None):
122 118
123 119 if inputObj == None:
124 120 return copy.deepcopy(self)
125 121
126 122 for key in inputObj.__dict__.keys():
127 123
128 124 attribute = inputObj.__dict__[key]
129 125
130 126 #If this attribute is a tuple or list
131 127 if type(inputObj.__dict__[key]) in (tuple, list):
132 128 self.__dict__[key] = attribute[:]
133 129 continue
134 130
135 131 #If this attribute is another object or instance
136 132 if hasattr(attribute, '__dict__'):
137 133 self.__dict__[key] = attribute.copy()
138 134 continue
139 135
140 136 self.__dict__[key] = inputObj.__dict__[key]
141 137
142 138 def deepcopy(self):
143 139
144 140 return copy.deepcopy(self)
145 141
146 142 def isEmpty(self):
147 143
148 144 return self.flagNoData
149 145
150 146 class JROData(GenericData):
151 147
152 148 # m_BasicHeader = BasicHeader()
153 149 # m_ProcessingHeader = ProcessingHeader()
154 150
155 151 systemHeaderObj = SystemHeader()
156 152
157 153 radarControllerHeaderObj = RadarControllerHeader()
158 154
159 155 # data = None
160 156
161 157 type = None
162 158
163 159 datatype = None #dtype but in string
164 160
165 161 # dtype = None
166 162
167 163 # nChannels = None
168 164
169 165 # nHeights = None
170 166
171 167 nProfiles = None
172 168
173 169 heightList = None
174 170
175 171 channelList = None
176 172
177 173 flagDiscontinuousBlock = False
178 174
179 175 useLocalTime = False
180 176
181 177 utctime = None
182 178
183 179 timeZone = None
184 180
185 181 dstFlag = None
186 182
187 183 errorCount = None
188 184
189 185 blocksize = None
190 186
191 187 # nCode = None
192 188 #
193 189 # nBaud = None
194 190 #
195 191 # code = None
196 192
197 193 flagDecodeData = False #asumo q la data no esta decodificada
198 194
199 195 flagDeflipData = False #asumo q la data no esta sin flip
200 196
201 197 flagShiftFFT = False
202 198
203 199 # ippSeconds = None
204 200
205 201 # timeInterval = None
206 202
207 203 nCohInt = None
208 204
209 205 # noise = None
210 206
211 207 windowOfFilter = 1
212 208
213 209 #Speed of ligth
214 210 C = 3e8
215 211
216 212 frequency = 49.92e6
217 213
218 214 realtime = False
219 215
220 216 beacon_heiIndexList = None
221 217
222 218 last_block = None
223 219
224 220 blocknow = None
225 221
226 222 azimuth = None
227 223
228 224 zenith = None
229 225
230 226 beam = Beam()
231 227
232 228 profileIndex = None
233 229
234 def __init__(self):
235
236 raise NotImplementedError
237
238 230 def getNoise(self):
239 231
240 232 raise NotImplementedError
241 233
242 234 def getNChannels(self):
243 235
244 236 return len(self.channelList)
245 237
246 238 def getChannelIndexList(self):
247 239
248 240 return range(self.nChannels)
249 241
250 242 def getNHeights(self):
251 243
252 244 return len(self.heightList)
253 245
254 246 def getHeiRange(self, extrapoints=0):
255 247
256 248 heis = self.heightList
257 249 # deltah = self.heightList[1] - self.heightList[0]
258 250 #
259 251 # heis.append(self.heightList[-1])
260 252
261 253 return heis
262 254
263 255 def getDeltaH(self):
264 256
265 257 delta = self.heightList[1] - self.heightList[0]
266 258
267 259 return delta
268 260
269 261 def getltctime(self):
270 262
271 263 if self.useLocalTime:
272 264 return self.utctime - self.timeZone*60
273 265
274 266 return self.utctime
275 267
276 268 def getDatatime(self):
277 269
278 270 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
279 271 return datatimeValue
280 272
281 273 def getTimeRange(self):
282 274
283 275 datatime = []
284 276
285 277 datatime.append(self.ltctime)
286 278 datatime.append(self.ltctime + self.timeInterval+1)
287 279
288 280 datatime = numpy.array(datatime)
289 281
290 282 return datatime
291 283
292 284 def getFmaxTimeResponse(self):
293 285
294 286 period = (10**-6)*self.getDeltaH()/(0.15)
295 287
296 288 PRF = 1./(period * self.nCohInt)
297 289
298 290 fmax = PRF
299 291
300 292 return fmax
301 293
302 294 def getFmax(self):
303 295
304 296 PRF = 1./(self.ippSeconds * self.nCohInt)
305 297
306 298 fmax = PRF
307 299
308 300 return fmax
309 301
310 302 def getVmax(self):
311 303
312 304 _lambda = self.C/self.frequency
313 305
314 306 vmax = self.getFmax() * _lambda/2
315 307
316 308 return vmax
317 309
318 310 def get_ippSeconds(self):
319 311 '''
320 312 '''
321 313 return self.radarControllerHeaderObj.ippSeconds
322 314
323 315 def set_ippSeconds(self, ippSeconds):
324 316 '''
325 317 '''
326 318
327 319 self.radarControllerHeaderObj.ippSeconds = ippSeconds
328 320
329 321 return
330 322
331 323 def get_dtype(self):
332 324 '''
333 325 '''
334 326 return getNumpyDtype(self.datatype)
335 327
336 328 def set_dtype(self, numpyDtype):
337 329 '''
338 330 '''
339 331
340 332 self.datatype = getDataTypeCode(numpyDtype)
341 333
342 334 def get_code(self):
343 335 '''
344 336 '''
345 337 return self.radarControllerHeaderObj.code
346 338
347 339 def set_code(self, code):
348 340 '''
349 341 '''
350 342 self.radarControllerHeaderObj.code = code
351 343
352 344 return
353 345
354 346 def get_ncode(self):
355 347 '''
356 348 '''
357 349 return self.radarControllerHeaderObj.nCode
358 350
359 351 def set_ncode(self, nCode):
360 352 '''
361 353 '''
362 354 self.radarControllerHeaderObj.nCode = nCode
363 355
364 356 return
365 357
366 358 def get_nbaud(self):
367 359 '''
368 360 '''
369 361 return self.radarControllerHeaderObj.nBaud
370 362
371 363 def set_nbaud(self, nBaud):
372 364 '''
373 365 '''
374 366 self.radarControllerHeaderObj.nBaud = nBaud
375 367
376 368 return
377 369
378 370 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
379 371 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
380 372 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
381 373 #noise = property(getNoise, "I'm the 'nHeights' property.")
382 374 datatime = property(getDatatime, "I'm the 'datatime' property")
383 375 ltctime = property(getltctime, "I'm the 'ltctime' property")
384 376 ippSeconds = property(get_ippSeconds, set_ippSeconds)
385 377 dtype = property(get_dtype, set_dtype)
386 378 # timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
387 379 code = property(get_code, set_code)
388 380 nCode = property(get_ncode, set_ncode)
389 381 nBaud = property(get_nbaud, set_nbaud)
390 382
391 383 class Voltage(JROData):
392 384
393 385 #data es un numpy array de 2 dmensiones (canales, alturas)
394 386 data = None
395 387
396 388 def __init__(self):
397 389 '''
398 390 Constructor
399 391 '''
400 392
401 393 self.useLocalTime = True
402 394
403 395 self.radarControllerHeaderObj = RadarControllerHeader()
404 396
405 397 self.systemHeaderObj = SystemHeader()
406 398
407 399 self.type = "Voltage"
408 400
409 401 self.data = None
410 402
411 403 # self.dtype = None
412 404
413 405 # self.nChannels = 0
414 406
415 407 # self.nHeights = 0
416 408
417 409 self.nProfiles = None
418 410
419 411 self.heightList = None
420 412
421 413 self.channelList = None
422 414
423 415 # self.channelIndexList = None
424 416
425 417 self.flagNoData = True
426 418
427 419 self.flagDiscontinuousBlock = False
428 420
429 421 self.utctime = None
430 422
431 423 self.timeZone = None
432 424
433 425 self.dstFlag = None
434 426
435 427 self.errorCount = None
436 428
437 429 self.nCohInt = None
438 430
439 431 self.blocksize = None
440 432
441 433 self.flagDecodeData = False #asumo q la data no esta decodificada
442 434
443 435 self.flagDeflipData = False #asumo q la data no esta sin flip
444 436
445 437 self.flagShiftFFT = False
446 438
447 439 self.flagDataAsBlock = False #Asumo que la data es leida perfil a perfil
448 440
449 441 self.profileIndex = 0
450 442
451 443 def getNoisebyHildebrand(self, channel = None):
452 444 """
453 445 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
454 446
455 447 Return:
456 448 noiselevel
457 449 """
458 450
459 451 if channel != None:
460 452 data = self.data[channel]
461 453 nChannels = 1
462 454 else:
463 455 data = self.data
464 456 nChannels = self.nChannels
465 457
466 458 noise = numpy.zeros(nChannels)
467 459 power = data * numpy.conjugate(data)
468 460
469 461 for thisChannel in range(nChannels):
470 462 if nChannels == 1:
471 463 daux = power[:].real
472 464 else:
473 465 daux = power[thisChannel,:].real
474 466 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
475 467
476 468 return noise
477 469
478 470 def getNoise(self, type = 1, channel = None):
479 471
480 472 if type == 1:
481 473 noise = self.getNoisebyHildebrand(channel)
482 474
483 475 return noise
484 476
485 477 def getPower(self, channel = None):
486 478
487 479 if channel != None:
488 480 data = self.data[channel]
489 481 else:
490 482 data = self.data
491 483
492 484 power = data * numpy.conjugate(data)
493 485 powerdB = 10*numpy.log10(power.real)
494 486 powerdB = numpy.squeeze(powerdB)
495 487
496 488 return powerdB
497 489
498 490 def getTimeInterval(self):
499 491
500 492 timeInterval = self.ippSeconds * self.nCohInt
501 493
502 494 return timeInterval
503 495
504 496 noise = property(getNoise, "I'm the 'nHeights' property.")
505 497 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
506 498
507 499 class Spectra(JROData):
508 500
509 501 #data spc es un numpy array de 2 dmensiones (canales, perfiles, alturas)
510 502 data_spc = None
511 503
512 504 #data cspc es un numpy array de 2 dmensiones (canales, pares, alturas)
513 505 data_cspc = None
514 506
515 507 #data dc es un numpy array de 2 dmensiones (canales, alturas)
516 508 data_dc = None
517 509
518 510 #data power
519 511 data_pwr = None
520 512
521 513 nFFTPoints = None
522 514
523 515 # nPairs = None
524 516
525 517 pairsList = None
526 518
527 519 nIncohInt = None
528 520
529 521 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
530 522
531 523 nCohInt = None #se requiere para determinar el valor de timeInterval
532 524
533 525 ippFactor = None
534 526
535 527 profileIndex = 0
536 528
537 529 plotting = "spectra"
538 530
539 531 def __init__(self):
540 532 '''
541 533 Constructor
542 534 '''
543 535
544 536 self.useLocalTime = True
545 537
546 538 self.radarControllerHeaderObj = RadarControllerHeader()
547 539
548 540 self.systemHeaderObj = SystemHeader()
549 541
550 542 self.type = "Spectra"
551 543
552 544 # self.data = None
553 545
554 546 # self.dtype = None
555 547
556 548 # self.nChannels = 0
557 549
558 550 # self.nHeights = 0
559 551
560 552 self.nProfiles = None
561 553
562 554 self.heightList = None
563 555
564 556 self.channelList = None
565 557
566 558 # self.channelIndexList = None
567 559
568 560 self.pairsList = None
569 561
570 562 self.flagNoData = True
571 563
572 564 self.flagDiscontinuousBlock = False
573 565
574 566 self.utctime = None
575 567
576 568 self.nCohInt = None
577 569
578 570 self.nIncohInt = None
579 571
580 572 self.blocksize = None
581 573
582 574 self.nFFTPoints = None
583 575
584 576 self.wavelength = None
585 577
586 578 self.flagDecodeData = False #asumo q la data no esta decodificada
587 579
588 580 self.flagDeflipData = False #asumo q la data no esta sin flip
589 581
590 582 self.flagShiftFFT = False
591 583
592 584 self.ippFactor = 1
593 585
594 586 #self.noise = None
595 587
596 588 self.beacon_heiIndexList = []
597 589
598 590 self.noise_estimation = None
599 591
600 592
601 593 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
602 594 """
603 595 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
604 596
605 597 Return:
606 598 noiselevel
607 599 """
608 600
609 601 noise = numpy.zeros(self.nChannels)
610 602
611 603 for channel in range(self.nChannels):
612 604 daux = self.data_spc[channel,xmin_index:xmax_index,ymin_index:ymax_index]
613 605 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
614 606
615 607 return noise
616 608
617 609 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
618 610
619 611 if self.noise_estimation is not None:
620 612 return self.noise_estimation #this was estimated by getNoise Operation defined in jroproc_spectra.py
621 613 else:
622 614 noise = self.getNoisebyHildebrand(xmin_index, xmax_index, ymin_index, ymax_index)
623 615 return noise
624 616
625 617 def getFreqRangeTimeResponse(self, extrapoints=0):
626 618
627 619 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints*self.ippFactor)
628 620 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
629 621
630 622 return freqrange
631 623
632 624 def getAcfRange(self, extrapoints=0):
633 625
634 626 deltafreq = 10./(self.getFmax() / (self.nFFTPoints*self.ippFactor))
635 627 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
636 628
637 629 return freqrange
638 630
639 631 def getFreqRange(self, extrapoints=0):
640 632
641 633 deltafreq = self.getFmax() / (self.nFFTPoints*self.ippFactor)
642 634 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
643 635
644 636 return freqrange
645 637
646 638 def getVelRange(self, extrapoints=0):
647 639
648 640 deltav = self.getVmax() / (self.nFFTPoints*self.ippFactor)
649 641 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) #- deltav/2
650 642
651 643 return velrange
652 644
653 645 def getNPairs(self):
654 646
655 647 return len(self.pairsList)
656 648
657 649 def getPairsIndexList(self):
658 650
659 651 return range(self.nPairs)
660 652
661 653 def getNormFactor(self):
662 654
663 655 pwcode = 1
664 656
665 657 if self.flagDecodeData:
666 658 pwcode = numpy.sum(self.code[0]**2)
667 659 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
668 660 normFactor = self.nProfiles*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
669 661
670 662 return normFactor
671 663
672 664 def getFlagCspc(self):
673 665
674 666 if self.data_cspc is None:
675 667 return True
676 668
677 669 return False
678 670
679 671 def getFlagDc(self):
680 672
681 673 if self.data_dc is None:
682 674 return True
683 675
684 676 return False
685 677
686 678 def getTimeInterval(self):
687 679
688 680 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles
689 681
690 682 return timeInterval
691 683
692 684 def getPower(self):
693 685
694 686 factor = self.normFactor
695 687 z = self.data_spc/factor
696 688 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
697 689 avg = numpy.average(z, axis=1)
698 690
699 691 return 10*numpy.log10(avg)
700 692
701 693 def getCoherence(self, pairsList=None, phase=False):
702 694
703 695 z = []
704 696 if pairsList is None:
705 697 pairsIndexList = self.pairsIndexList
706 698 else:
707 699 pairsIndexList = []
708 700 for pair in pairsList:
709 701 if pair not in self.pairsList:
710 702 raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair)
711 703 pairsIndexList.append(self.pairsList.index(pair))
712 704 for i in range(len(pairsIndexList)):
713 705 pair = self.pairsList[pairsIndexList[i]]
714 706 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
715 707 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
716 708 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
717 709 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
718 710 if phase:
719 711 data = numpy.arctan2(avgcoherenceComplex.imag,
720 712 avgcoherenceComplex.real)*180/numpy.pi
721 713 else:
722 714 data = numpy.abs(avgcoherenceComplex)
723 715
724 716 z.append(data)
725 717
726 718 return numpy.array(z)
727 719
728 720 def setValue(self, value):
729 721
730 722 print "This property should not be initialized"
731 723
732 724 return
733 725
734 726 nPairs = property(getNPairs, setValue, "I'm the 'nPairs' property.")
735 727 pairsIndexList = property(getPairsIndexList, setValue, "I'm the 'pairsIndexList' property.")
736 728 normFactor = property(getNormFactor, setValue, "I'm the 'getNormFactor' property.")
737 729 flag_cspc = property(getFlagCspc, setValue)
738 730 flag_dc = property(getFlagDc, setValue)
739 731 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
740 732 timeInterval = property(getTimeInterval, setValue, "I'm the 'timeInterval' property")
741 733
742 734 class SpectraHeis(Spectra):
743 735
744 736 data_spc = None
745 737
746 738 data_cspc = None
747 739
748 740 data_dc = None
749 741
750 742 nFFTPoints = None
751 743
752 744 # nPairs = None
753 745
754 746 pairsList = None
755 747
756 748 nCohInt = None
757 749
758 750 nIncohInt = None
759 751
760 752 def __init__(self):
761 753
762 754 self.radarControllerHeaderObj = RadarControllerHeader()
763 755
764 756 self.systemHeaderObj = SystemHeader()
765 757
766 758 self.type = "SpectraHeis"
767 759
768 760 # self.dtype = None
769 761
770 762 # self.nChannels = 0
771 763
772 764 # self.nHeights = 0
773 765
774 766 self.nProfiles = None
775 767
776 768 self.heightList = None
777 769
778 770 self.channelList = None
779 771
780 772 # self.channelIndexList = None
781 773
782 774 self.flagNoData = True
783 775
784 776 self.flagDiscontinuousBlock = False
785 777
786 778 # self.nPairs = 0
787 779
788 780 self.utctime = None
789 781
790 782 self.blocksize = None
791 783
792 784 self.profileIndex = 0
793 785
794 786 self.nCohInt = 1
795 787
796 788 self.nIncohInt = 1
797 789
798 790 def getNormFactor(self):
799 791 pwcode = 1
800 792 if self.flagDecodeData:
801 793 pwcode = numpy.sum(self.code[0]**2)
802 794
803 795 normFactor = self.nIncohInt*self.nCohInt*pwcode
804 796
805 797 return normFactor
806 798
807 799 def getTimeInterval(self):
808 800
809 801 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
810 802
811 803 return timeInterval
812 804
813 805 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
814 806 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
815 807
816 808 class Fits(JROData):
817 809
818 810 heightList = None
819 811
820 812 channelList = None
821 813
822 814 flagNoData = True
823 815
824 816 flagDiscontinuousBlock = False
825 817
826 818 useLocalTime = False
827 819
828 820 utctime = None
829 821
830 822 timeZone = None
831 823
832 824 # ippSeconds = None
833 825
834 826 # timeInterval = None
835 827
836 828 nCohInt = None
837 829
838 830 nIncohInt = None
839 831
840 832 noise = None
841 833
842 834 windowOfFilter = 1
843 835
844 836 #Speed of ligth
845 837 C = 3e8
846 838
847 839 frequency = 49.92e6
848 840
849 841 realtime = False
850 842
851 843
852 844 def __init__(self):
853 845
854 846 self.type = "Fits"
855 847
856 848 self.nProfiles = None
857 849
858 850 self.heightList = None
859 851
860 852 self.channelList = None
861 853
862 854 # self.channelIndexList = None
863 855
864 856 self.flagNoData = True
865 857
866 858 self.utctime = None
867 859
868 860 self.nCohInt = 1
869 861
870 862 self.nIncohInt = 1
871 863
872 864 self.useLocalTime = True
873 865
874 866 self.profileIndex = 0
875 867
876 868 # self.utctime = None
877 869 # self.timeZone = None
878 870 # self.ltctime = None
879 871 # self.timeInterval = None
880 872 # self.header = None
881 873 # self.data_header = None
882 874 # self.data = None
883 875 # self.datatime = None
884 876 # self.flagNoData = False
885 877 # self.expName = ''
886 878 # self.nChannels = None
887 879 # self.nSamples = None
888 880 # self.dataBlocksPerFile = None
889 881 # self.comments = ''
890 882 #
891 883
892 884
893 885 def getltctime(self):
894 886
895 887 if self.useLocalTime:
896 888 return self.utctime - self.timeZone*60
897 889
898 890 return self.utctime
899 891
900 892 def getDatatime(self):
901 893
902 894 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
903 895 return datatime
904 896
905 897 def getTimeRange(self):
906 898
907 899 datatime = []
908 900
909 901 datatime.append(self.ltctime)
910 902 datatime.append(self.ltctime + self.timeInterval)
911 903
912 904 datatime = numpy.array(datatime)
913 905
914 906 return datatime
915 907
916 908 def getHeiRange(self):
917 909
918 910 heis = self.heightList
919 911
920 912 return heis
921 913
922 914 def getNHeights(self):
923 915
924 916 return len(self.heightList)
925 917
926 918 def getNChannels(self):
927 919
928 920 return len(self.channelList)
929 921
930 922 def getChannelIndexList(self):
931 923
932 924 return range(self.nChannels)
933 925
934 926 def getNoise(self, type = 1):
935 927
936 928 #noise = numpy.zeros(self.nChannels)
937 929
938 930 if type == 1:
939 931 noise = self.getNoisebyHildebrand()
940 932
941 933 if type == 2:
942 934 noise = self.getNoisebySort()
943 935
944 936 if type == 3:
945 937 noise = self.getNoisebyWindow()
946 938
947 939 return noise
948 940
949 941 def getTimeInterval(self):
950 942
951 943 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
952 944
953 945 return timeInterval
954 946
955 947 datatime = property(getDatatime, "I'm the 'datatime' property")
956 948 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
957 949 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
958 950 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
959 951 noise = property(getNoise, "I'm the 'nHeights' property.")
960 952
961 953 ltctime = property(getltctime, "I'm the 'ltctime' property")
962 954 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
963 955
964 956
965 957 class Correlation(JROData):
966 958
967 959 noise = None
968 960
969 961 SNR = None
970 962
971 963 #--------------------------------------------------
972 964
973 965 mode = None
974 966
975 967 split = False
976 968
977 969 data_cf = None
978 970
979 971 lags = None
980 972
981 973 lagRange = None
982 974
983 975 pairsList = None
984 976
985 977 normFactor = None
986 978
987 979 #--------------------------------------------------
988 980
989 981 # calculateVelocity = None
990 982
991 983 nLags = None
992 984
993 985 nPairs = None
994 986
995 987 nAvg = None
996 988
997 989
998 990 def __init__(self):
999 991 '''
1000 992 Constructor
1001 993 '''
1002 994 self.radarControllerHeaderObj = RadarControllerHeader()
1003 995
1004 996 self.systemHeaderObj = SystemHeader()
1005 997
1006 998 self.type = "Correlation"
1007 999
1008 1000 self.data = None
1009 1001
1010 1002 self.dtype = None
1011 1003
1012 1004 self.nProfiles = None
1013 1005
1014 1006 self.heightList = None
1015 1007
1016 1008 self.channelList = None
1017 1009
1018 1010 self.flagNoData = True
1019 1011
1020 1012 self.flagDiscontinuousBlock = False
1021 1013
1022 1014 self.utctime = None
1023 1015
1024 1016 self.timeZone = None
1025 1017
1026 1018 self.dstFlag = None
1027 1019
1028 1020 self.errorCount = None
1029 1021
1030 1022 self.blocksize = None
1031 1023
1032 1024 self.flagDecodeData = False #asumo q la data no esta decodificada
1033 1025
1034 1026 self.flagDeflipData = False #asumo q la data no esta sin flip
1035 1027
1036 1028 self.pairsList = None
1037 1029
1038 1030 self.nPoints = None
1039 1031
1040 1032 def getPairsList(self):
1041 1033
1042 1034 return self.pairsList
1043 1035
1044 1036 def getNoise(self, mode = 2):
1045 1037
1046 1038 indR = numpy.where(self.lagR == 0)[0][0]
1047 1039 indT = numpy.where(self.lagT == 0)[0][0]
1048 1040
1049 1041 jspectra0 = self.data_corr[:,:,indR,:]
1050 1042 jspectra = copy.copy(jspectra0)
1051 1043
1052 1044 num_chan = jspectra.shape[0]
1053 1045 num_hei = jspectra.shape[2]
1054 1046
1055 1047 freq_dc = jspectra.shape[1]/2
1056 1048 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
1057 1049
1058 1050 if ind_vel[0]<0:
1059 1051 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
1060 1052
1061 1053 if mode == 1:
1062 1054 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
1063 1055
1064 1056 if mode == 2:
1065 1057
1066 1058 vel = numpy.array([-2,-1,1,2])
1067 1059 xx = numpy.zeros([4,4])
1068 1060
1069 1061 for fil in range(4):
1070 1062 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
1071 1063
1072 1064 xx_inv = numpy.linalg.inv(xx)
1073 1065 xx_aux = xx_inv[0,:]
1074 1066
1075 1067 for ich in range(num_chan):
1076 1068 yy = jspectra[ich,ind_vel,:]
1077 1069 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
1078 1070
1079 1071 junkid = jspectra[ich,freq_dc,:]<=0
1080 1072 cjunkid = sum(junkid)
1081 1073
1082 1074 if cjunkid.any():
1083 1075 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
1084 1076
1085 1077 noise = jspectra0[:,freq_dc,:] - jspectra[:,freq_dc,:]
1086 1078
1087 1079 return noise
1088 1080
1089 1081 def getTimeInterval(self):
1090 1082
1091 1083 timeInterval = self.ippSeconds * self.nCohInt * self.nProfiles
1092 1084
1093 1085 return timeInterval
1094 1086
1095 1087 def splitFunctions(self):
1096 1088
1097 1089 pairsList = self.pairsList
1098 1090 ccf_pairs = []
1099 1091 acf_pairs = []
1100 1092 ccf_ind = []
1101 1093 acf_ind = []
1102 1094 for l in range(len(pairsList)):
1103 1095 chan0 = pairsList[l][0]
1104 1096 chan1 = pairsList[l][1]
1105 1097
1106 1098 #Obteniendo pares de Autocorrelacion
1107 1099 if chan0 == chan1:
1108 1100 acf_pairs.append(chan0)
1109 1101 acf_ind.append(l)
1110 1102 else:
1111 1103 ccf_pairs.append(pairsList[l])
1112 1104 ccf_ind.append(l)
1113 1105
1114 1106 data_acf = self.data_cf[acf_ind]
1115 1107 data_ccf = self.data_cf[ccf_ind]
1116 1108
1117 1109 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
1118 1110
1119 1111 def getNormFactor(self):
1120 1112 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
1121 1113 acf_pairs = numpy.array(acf_pairs)
1122 1114 normFactor = numpy.zeros((self.nPairs,self.nHeights))
1123 1115
1124 1116 for p in range(self.nPairs):
1125 1117 pair = self.pairsList[p]
1126 1118
1127 1119 ch0 = pair[0]
1128 1120 ch1 = pair[1]
1129 1121
1130 1122 ch0_max = numpy.max(data_acf[acf_pairs==ch0,:,:], axis=1)
1131 1123 ch1_max = numpy.max(data_acf[acf_pairs==ch1,:,:], axis=1)
1132 1124 normFactor[p,:] = numpy.sqrt(ch0_max*ch1_max)
1133 1125
1134 1126 return normFactor
1135 1127
1136 1128 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
1137 1129 normFactor = property(getNormFactor, "I'm the 'normFactor property'")
1138 1130
1139 1131 class Parameters(Spectra):
1140 1132
1141 1133 experimentInfo = None #Information about the experiment
1142 1134
1143 1135 #Information from previous data
1144 1136
1145 1137 inputUnit = None #Type of data to be processed
1146 1138
1147 1139 operation = None #Type of operation to parametrize
1148 1140
1149 1141 #normFactor = None #Normalization Factor
1150 1142
1151 1143 groupList = None #List of Pairs, Groups, etc
1152 1144
1153 1145 #Parameters
1154 1146
1155 1147 data_param = None #Parameters obtained
1156 1148
1157 1149 data_pre = None #Data Pre Parametrization
1158 1150
1159 1151 data_SNR = None #Signal to Noise Ratio
1160 1152
1161 1153 # heightRange = None #Heights
1162 1154
1163 1155 abscissaList = None #Abscissa, can be velocities, lags or time
1164 1156
1165 1157 # noise = None #Noise Potency
1166 1158
1167 1159 utctimeInit = None #Initial UTC time
1168 1160
1169 1161 paramInterval = None #Time interval to calculate Parameters in seconds
1170 1162
1171 1163 useLocalTime = True
1172 1164
1173 1165 #Fitting
1174 1166
1175 1167 data_error = None #Error of the estimation
1176 1168
1177 1169 constants = None
1178 1170
1179 1171 library = None
1180 1172
1181 1173 #Output signal
1182 1174
1183 1175 outputInterval = None #Time interval to calculate output signal in seconds
1184 1176
1185 1177 data_output = None #Out signal
1186 1178
1187 1179 nAvg = None
1188 1180
1189 1181 noise_estimation = None
1190 1182
1191 1183
1192 1184 def __init__(self):
1193 1185 '''
1194 1186 Constructor
1195 1187 '''
1196 1188 self.radarControllerHeaderObj = RadarControllerHeader()
1197 1189
1198 1190 self.systemHeaderObj = SystemHeader()
1199 1191
1200 1192 self.type = "Parameters"
1201 1193
1202 1194 def getTimeRange1(self, interval):
1203 1195
1204 1196 datatime = []
1205 1197
1206 1198 if self.useLocalTime:
1207 1199 time1 = self.utctimeInit - self.timeZone*60
1208 1200 else:
1209 1201 time1 = self.utctimeInit
1210 1202
1211 1203 datatime.append(time1)
1212 1204 datatime.append(time1 + interval)
1213 1205 datatime = numpy.array(datatime)
1214 1206
1215 1207 return datatime
1216 1208
1217 1209 def getTimeInterval(self):
1218 1210
1211 if hasattr(self, 'timeInterval1'):
1219 1212 return self.timeInterval1
1213 else:
1214 return self.paramInterval
1220 1215
1221 1216 def getNoise(self):
1222 1217
1223 1218 return self.spc_noise
1224 1219
1225 1220 timeInterval = property(getTimeInterval)
@@ -1,736 +1,811
1 1
2 2 import os
3 3 import zmq
4 4 import time
5 5 import numpy
6 6 import datetime
7 7 import numpy as np
8 8 import matplotlib
9 9 matplotlib.use('TkAgg')
10 10 import matplotlib.pyplot as plt
11 11 from mpl_toolkits.axes_grid1 import make_axes_locatable
12 12 from matplotlib.ticker import FuncFormatter, LinearLocator
13 13 from multiprocessing import Process
14 14
15 15 from schainpy.model.proc.jroproc_base import Operation
16 16
17 plt.ioff()
17 plt.ion()
18 18
19 19 func = lambda x, pos: ('%s') %(datetime.datetime.fromtimestamp(x).strftime('%H:%M'))
20 20
21 21 d1970 = datetime.datetime(1970,1,1)
22 22
23 23 class PlotData(Operation, Process):
24 24
25 25 CODE = 'Figure'
26 26 colormap = 'jro'
27 27 CONFLATE = False
28 28 __MAXNUMX = 80
29 29 __missing = 1E30
30 30
31 31 def __init__(self, **kwargs):
32 32
33 33 Operation.__init__(self, plot=True, **kwargs)
34 34 Process.__init__(self)
35 35 self.kwargs['code'] = self.CODE
36 36 self.mp = False
37 37 self.dataOut = None
38 38 self.isConfig = False
39 39 self.figure = None
40 40 self.figure2 = None #JM modificatiom
41 41 self.axes = []
42 42 self.localtime = kwargs.pop('localtime', True)
43 43 self.show = kwargs.get('show', True)
44 44 self.save = kwargs.get('save', False)
45 45 self.colormap = kwargs.get('colormap', self.colormap)
46 46 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
47 47 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
48 48 self.showprofile = kwargs.get('showprofile', True)
49 49 self.title = kwargs.get('wintitle', '')
50 50 self.xaxis = kwargs.get('xaxis', 'frequency')
51 51 self.zmin = kwargs.get('zmin', None)
52 52 self.zmax = kwargs.get('zmax', None)
53 53 self.xmin = kwargs.get('xmin', None)
54 54 self.xmax = kwargs.get('xmax', None)
55 55 self.xrange = kwargs.get('xrange', 24)
56 56 self.ymin = kwargs.get('ymin', None)
57 57 self.ymax = kwargs.get('ymax', None)
58 58 self.__MAXNUMY = kwargs.get('decimation', 80)
59 59 self.throttle_value = 5
60 60 self.times = []
61 #self.interactive = self.kwargs['parent']
62
61 63
62 64 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
63 65
64 66 if x_buffer.shape[0] < 2:
65 67 return x_buffer, y_buffer, z_buffer
66 68
67 69 deltas = x_buffer[1:] - x_buffer[0:-1]
68 70 x_median = np.median(deltas)
69 71
70 72 index = np.where(deltas > 5*x_median)
71 73
72 74 if len(index[0]) != 0:
73 75 z_buffer[::, index[0], ::] = self.__missing
74 76 z_buffer = np.ma.masked_inside(z_buffer,
75 77 0.99*self.__missing,
76 78 1.01*self.__missing)
77 79
78 80 return x_buffer, y_buffer, z_buffer
79 81
80 82 def decimate(self):
81 83
82 84 # dx = int(len(self.x)/self.__MAXNUMX) + 1
83 85 dy = int(len(self.y)/self.__MAXNUMY) + 1
84 86
85 87 # x = self.x[::dx]
86 88 x = self.x
87 89 y = self.y[::dy]
88 90 z = self.z[::, ::, ::dy]
89 91
90 92 return x, y, z
91 93
92 94 def __plot(self):
93 95
94 96 print 'plotting...{}'.format(self.CODE)
95 97
96 98 if self.show:
97 print 'showing'
98 99 self.figure.show()
99 100 self.figure2.show()
100 101
101 102 self.plot()
102 103 plt.tight_layout()
103 self.figure.canvas.manager.set_window_title('{} {} - Date:{}'.format(self.title, self.CODE.upper(),
104 datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')))
105 self.figure2.canvas.manager.set_window_title('{} {} - Date:{}'.format(self.title, self.CODE.upper(),
106 datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')))
104
105 # self.figure.canvas.manager.set_window_title('{} {} - Date:{}'.format(self.title, self.CODE.upper(),
106 # datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')))
107 # self.figure2.canvas.manager.set_window_title('{} {} - Date:{}'.format(self.title, self.CODE.upper(),
108 # datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')))
109 # =======
110 self.figure.canvas.manager.set_window_title('{} {} - {}'.format(self.title, self.CODE.upper(),
111 datetime.datetime.fromtimestamp(self.max_time).strftime('%Y/%m/%d')))
112 self.figure2.canvas.manager.set_window_title('{} {} - {}'.format(self.title, self.CODE.upper(),
113 datetime.datetime.fromtimestamp(self.max_time).strftime('%Y/%m/%d')))
114
107 115
108 116 if self.save:
109 117 figname = os.path.join(self.save, '{}_{}.png'.format(self.CODE,
110 118 datetime.datetime.fromtimestamp(self.saveTime).strftime('%y%m%d_%H%M%S')))
111 119 print 'Saving figure: {}'.format(figname)
112 120 self.figure.savefig(figname)
113 121 figname2 = os.path.join(self.save, '{}_{}2.png'.format(self.CODE,
114 122 datetime.datetime.fromtimestamp(self.saveTime).strftime('%y%m%d_%H%M%S')))
115 123 print 'Saving figure: {}'.format(figname2)
116 124 self.figure2.savefig(figname2)
117 125
118 126 self.figure.canvas.draw()
119 127 self.figure2.canvas.draw()
120 128
121 129 def plot(self):
122 130
123 131 print 'plotting...{}'.format(self.CODE.upper())
124 132 return
125 133
126 134 def run(self):
127 135
128 136 print '[Starting] {}'.format(self.name)
137
129 138 context = zmq.Context()
130 139 receiver = context.socket(zmq.SUB)
131 140 receiver.setsockopt(zmq.SUBSCRIBE, '')
132 141 receiver.setsockopt(zmq.CONFLATE, self.CONFLATE)
142
143 if 'server' in self.kwargs['parent']:
144 receiver.connect('ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server']))
145 else:
133 146 receiver.connect("ipc:///tmp/zmq.plots")
147
134 148 seconds_passed = 0
149
135 150 while True:
136 151 try:
137 152 self.data = receiver.recv_pyobj(flags=zmq.NOBLOCK)#flags=zmq.NOBLOCK
138 153 self.started = self.data['STARTED']
139 154 self.dataOut = self.data['dataOut']
140 155
141 156 if (len(self.times) < len(self.data['times']) and not self.started and self.data['ENDED']):
142 157 continue
143 158
144 159 self.times = self.data['times']
145 160 self.times.sort()
146 161 self.throttle_value = self.data['throttle']
147 162 self.min_time = self.times[0]
148 163 self.max_time = self.times[-1]
149 164
150 165 if self.isConfig is False:
151 166 print 'setting up'
152 167 self.setup()
153 168 self.isConfig = True
154 169 self.__plot()
155 170
156 171 if self.data['ENDED'] is True:
157 172 print '********GRAPHIC ENDED********'
158 173 self.ended = True
159 174 self.isConfig = False
160 175 self.__plot()
161 176 elif seconds_passed >= self.data['throttle']:
162 177 print 'passed', seconds_passed
163 178 self.__plot()
164 179 seconds_passed = 0
165 180
166 181 except zmq.Again as e:
167 182 print 'Waiting for data...'
168 183 plt.pause(2)
169 184 seconds_passed += 2
170 185
171 186 def close(self):
172 187 if self.dataOut:
173 188 self.__plot()
174 189
175 190
176 191 class PlotSpectraData(PlotData):
177 192
178 193 CODE = 'spc'
179 194 colormap = 'jro'
180 195 CONFLATE = False
181 196
182 197 def setup(self):
183 198
184 199 ncolspan = 1
185 200 colspan = 1
186 201 self.ncols = int(numpy.sqrt(self.dataOut.nChannels)+0.9)
187 202 self.nrows = int(self.dataOut.nChannels*1./self.ncols + 0.9)
188 203 self.width = 3.6*self.ncols
189 204 self.height = 3.2*self.nrows
190 205 if self.showprofile:
191 206 ncolspan = 3
192 207 colspan = 2
193 208 self.width += 1.2*self.ncols
194 209
195 210 self.ylabel = 'Range [Km]'
196 211 self.titles = ['Channel {}'.format(x) for x in self.dataOut.channelList]
197 212
198 213 if self.figure is None:
199 214 self.figure = plt.figure(figsize=(self.width, self.height),
200 215 edgecolor='k',
201 216 facecolor='w')
202 217 else:
203 218 self.figure.clf()
204 219
205 220 n = 0
206 221 for y in range(self.nrows):
207 222 for x in range(self.ncols):
208 223 if n >= self.dataOut.nChannels:
209 224 break
210 225 ax = plt.subplot2grid((self.nrows, self.ncols*ncolspan), (y, x*ncolspan), 1, colspan)
211 226 if self.showprofile:
212 227 ax.ax_profile = plt.subplot2grid((self.nrows, self.ncols*ncolspan), (y, x*ncolspan+colspan), 1, 1)
213 228
214 229 ax.firsttime = True
215 230 self.axes.append(ax)
216 231 n += 1
217 232
218 233 def plot(self):
219 234
220 235 if self.xaxis == "frequency":
221 236 x = self.dataOut.getFreqRange(1)/1000.
222 237 xlabel = "Frequency (kHz)"
223 238 elif self.xaxis == "time":
224 239 x = self.dataOut.getAcfRange(1)
225 240 xlabel = "Time (ms)"
226 241 else:
227 242 x = self.dataOut.getVelRange(1)
228 243 xlabel = "Velocity (m/s)"
229 244
230 245 y = self.dataOut.getHeiRange()
231 246 z = self.data[self.CODE]
232 247
233 248 for n, ax in enumerate(self.axes):
234 249
235 250 if ax.firsttime:
236 251 self.xmax = self.xmax if self.xmax else np.nanmax(x)
237 252 self.xmin = self.xmin if self.xmin else -self.xmax
238 253 self.ymin = self.ymin if self.ymin else np.nanmin(y)
239 254 self.ymax = self.ymax if self.ymax else np.nanmax(y)
240 255 self.zmin = self.zmin if self.zmin else np.nanmin(z)
241 256 self.zmax = self.zmax if self.zmax else np.nanmax(z)
242 257 ax.plot = ax.pcolormesh(x, y, z[n].T,
243 258 vmin=self.zmin,
244 259 vmax=self.zmax,
245 260 cmap=plt.get_cmap(self.colormap)
246 261 )
247 262 divider = make_axes_locatable(ax)
248 263 cax = divider.new_horizontal(size='3%', pad=0.05)
249 264 self.figure.add_axes(cax)
250 265 plt.colorbar(ax.plot, cax)
251 266
252 267 ax.set_xlim(self.xmin, self.xmax)
253 268 ax.set_ylim(self.ymin, self.ymax)
254 269
255 270 ax.set_ylabel(self.ylabel)
256 271 ax.set_xlabel(xlabel)
257 272
258 273 ax.firsttime = False
259 274
260 275 if self.showprofile:
261 276 ax.plot_profile= ax.ax_profile.plot(self.data['rti'][self.max_time][n], y)[0]
262 277 ax.ax_profile.set_xlim(self.zmin, self.zmax)
263 278 ax.ax_profile.set_ylim(self.ymin, self.ymax)
264 279 ax.ax_profile.set_xlabel('dB')
265 280 ax.ax_profile.grid(b=True, axis='x')
266 281 ax.plot_noise = ax.ax_profile.plot(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y,
267 282 color="k", linestyle="dashed", lw=2)[0]
268 283 [tick.set_visible(False) for tick in ax.ax_profile.get_yticklabels()]
269 284 else:
270 285 ax.plot.set_array(z[n].T.ravel())
271 286 if self.showprofile:
272 287 ax.plot_profile.set_data(self.data['rti'][self.max_time][n], y)
273 288 ax.plot_noise.set_data(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y)
274 289
275 290 ax.set_title('{} - Noise: {:.2f} dB'.format(self.titles[n], self.data['noise'][self.max_time][n]),
276 291 size=8)
277 292 self.saveTime = self.max_time
278 293
279 294
280 295 class PlotCrossSpectraData(PlotData):
281 296
282 297 CODE = 'cspc'
283 298 zmin_coh = None
284 299 zmax_coh = None
285 300 zmin_phase = None
286 301 zmax_phase = None
287 302 CONFLATE = False
288 303
289 304 def setup(self):
290 305
291 306 ncolspan = 1
292 307 colspan = 1
293 308 self.ncols = 2
294 309 self.nrows = self.dataOut.nPairs
295 310 self.width = 3.6*self.ncols
296 311 self.height = 3.2*self.nrows
297 312
298 313 self.ylabel = 'Range [Km]'
299 314 self.titles = ['Channel {}'.format(x) for x in self.dataOut.channelList]
300 315
301 316 if self.figure is None:
302 317 self.figure = plt.figure(figsize=(self.width, self.height),
303 318 edgecolor='k',
304 319 facecolor='w')
305 320 else:
306 321 self.figure.clf()
307 322
308 323 for y in range(self.nrows):
309 324 for x in range(self.ncols):
310 325 ax = plt.subplot2grid((self.nrows, self.ncols), (y, x), 1, 1)
311 326 ax.firsttime = True
312 327 self.axes.append(ax)
313 328
314 329 def plot(self):
315 330
316 331 if self.xaxis == "frequency":
317 332 x = self.dataOut.getFreqRange(1)/1000.
318 333 xlabel = "Frequency (kHz)"
319 334 elif self.xaxis == "time":
320 335 x = self.dataOut.getAcfRange(1)
321 336 xlabel = "Time (ms)"
322 337 else:
323 338 x = self.dataOut.getVelRange(1)
324 339 xlabel = "Velocity (m/s)"
325 340
326 341 y = self.dataOut.getHeiRange()
327 342 z_coh = self.data['cspc_coh']
328 343 z_phase = self.data['cspc_phase']
329 344
330 345 for n in range(self.nrows):
331 346 ax = self.axes[2*n]
332 347 ax1 = self.axes[2*n+1]
333 348 if ax.firsttime:
334 349 self.xmax = self.xmax if self.xmax else np.nanmax(x)
335 350 self.xmin = self.xmin if self.xmin else -self.xmax
336 351 self.ymin = self.ymin if self.ymin else np.nanmin(y)
337 352 self.ymax = self.ymax if self.ymax else np.nanmax(y)
338 353 self.zmin_coh = self.zmin_coh if self.zmin_coh else 0.0
339 354 self.zmax_coh = self.zmax_coh if self.zmax_coh else 1.0
340 355 self.zmin_phase = self.zmin_phase if self.zmin_phase else -180
341 356 self.zmax_phase = self.zmax_phase if self.zmax_phase else 180
342 357
343 358 ax.plot = ax.pcolormesh(x, y, z_coh[n].T,
344 359 vmin=self.zmin_coh,
345 360 vmax=self.zmax_coh,
346 361 cmap=plt.get_cmap(self.colormap_coh)
347 362 )
348 363 divider = make_axes_locatable(ax)
349 364 cax = divider.new_horizontal(size='3%', pad=0.05)
350 365 self.figure.add_axes(cax)
351 366 plt.colorbar(ax.plot, cax)
352 367
353 368 ax.set_xlim(self.xmin, self.xmax)
354 369 ax.set_ylim(self.ymin, self.ymax)
355 370
356 371 ax.set_ylabel(self.ylabel)
357 372 ax.set_xlabel(xlabel)
358 373 ax.firsttime = False
359 374
360 375 ax1.plot = ax1.pcolormesh(x, y, z_phase[n].T,
361 376 vmin=self.zmin_phase,
362 377 vmax=self.zmax_phase,
363 378 cmap=plt.get_cmap(self.colormap_phase)
364 379 )
365 380 divider = make_axes_locatable(ax1)
366 381 cax = divider.new_horizontal(size='3%', pad=0.05)
367 382 self.figure.add_axes(cax)
368 383 plt.colorbar(ax1.plot, cax)
369 384
370 385 ax1.set_xlim(self.xmin, self.xmax)
371 386 ax1.set_ylim(self.ymin, self.ymax)
372 387
373 388 ax1.set_ylabel(self.ylabel)
374 389 ax1.set_xlabel(xlabel)
375 390 ax1.firsttime = False
376 391 else:
377 392 ax.plot.set_array(z_coh[n].T.ravel())
378 393 ax1.plot.set_array(z_phase[n].T.ravel())
379 394
380 395 ax.set_title('Coherence Ch{} * Ch{}'.format(self.dataOut.pairsList[n][0], self.dataOut.pairsList[n][1]), size=8)
381 396 ax1.set_title('Phase Ch{} * Ch{}'.format(self.dataOut.pairsList[n][0], self.dataOut.pairsList[n][1]), size=8)
382 397 self.saveTime = self.max_time
383 398
384 399
385 400 class PlotSpectraMeanData(PlotSpectraData):
386 401
387 402 CODE = 'spc_mean'
388 403 colormap = 'jet'
389 404
390 405 def plot(self):
391 406
392 407 if self.xaxis == "frequency":
393 408 x = self.dataOut.getFreqRange(1)/1000.
394 409 xlabel = "Frequency (kHz)"
395 410 elif self.xaxis == "time":
396 411 x = self.dataOut.getAcfRange(1)
397 412 xlabel = "Time (ms)"
398 413 else:
399 414 x = self.dataOut.getVelRange(1)
400 415 xlabel = "Velocity (m/s)"
401 416
402 417 y = self.dataOut.getHeiRange()
403 418 z = self.data['spc']
404 419 mean = self.data['mean'][self.max_time]
405 420
406 421 for n, ax in enumerate(self.axes):
407 422
408 423 if ax.firsttime:
409 424 self.xmax = self.xmax if self.xmax else np.nanmax(x)
410 425 self.xmin = self.xmin if self.xmin else -self.xmax
411 426 self.ymin = self.ymin if self.ymin else np.nanmin(y)
412 427 self.ymax = self.ymax if self.ymax else np.nanmax(y)
413 428 self.zmin = self.zmin if self.zmin else np.nanmin(z)
414 429 self.zmax = self.zmax if self.zmax else np.nanmax(z)
415 430 ax.plt = ax.pcolormesh(x, y, z[n].T,
416 431 vmin=self.zmin,
417 432 vmax=self.zmax,
418 433 cmap=plt.get_cmap(self.colormap)
419 434 )
420 435 ax.plt_dop = ax.plot(mean[n], y,
421 436 color='k')[0]
422 437
423 438 divider = make_axes_locatable(ax)
424 439 cax = divider.new_horizontal(size='3%', pad=0.05)
425 440 self.figure.add_axes(cax)
426 441 plt.colorbar(ax.plt, cax)
427 442
428 443 ax.set_xlim(self.xmin, self.xmax)
429 444 ax.set_ylim(self.ymin, self.ymax)
430 445
431 446 ax.set_ylabel(self.ylabel)
432 447 ax.set_xlabel(xlabel)
433 448
434 449 ax.firsttime = False
435 450
436 451 if self.showprofile:
437 452 ax.plt_profile= ax.ax_profile.plot(self.data['rti'][self.max_time][n], y)[0]
438 453 ax.ax_profile.set_xlim(self.zmin, self.zmax)
439 454 ax.ax_profile.set_ylim(self.ymin, self.ymax)
440 455 ax.ax_profile.set_xlabel('dB')
441 456 ax.ax_profile.grid(b=True, axis='x')
442 457 ax.plt_noise = ax.ax_profile.plot(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y,
443 458 color="k", linestyle="dashed", lw=2)[0]
444 459 [tick.set_visible(False) for tick in ax.ax_profile.get_yticklabels()]
445 460 else:
446 461 ax.plt.set_array(z[n].T.ravel())
447 462 ax.plt_dop.set_data(mean[n], y)
448 463 if self.showprofile:
449 464 ax.plt_profile.set_data(self.data['rti'][self.max_time][n], y)
450 465 ax.plt_noise.set_data(numpy.repeat(self.data['noise'][self.max_time][n], len(y)), y)
451 466
452 467 ax.set_title('{} - Noise: {:.2f} dB'.format(self.titles[n], self.data['noise'][self.max_time][n]),
453 468 size=8)
454 469 self.saveTime = self.max_time
455 470
456 471
457 472 class PlotRTIData(PlotData):
458 473
459 474 CODE = 'rti'
460 475 colormap = 'jro'
461 476
462 477 def setup(self):
463 478 self.ncols = 1
464 479 self.nrows = self.dataOut.nChannels
465 480 self.width = 10
466 481 self.height = 2.2*self.nrows if self.nrows<6 else 12
467 482 if self.nrows==1:
468 483 self.height += 1
469 484 self.ylabel = 'Range [Km]'
470 485 self.titles = ['Channel {}'.format(x) for x in self.dataOut.channelList]
471 486
472 487 if self.figure is None:
473 488 self.figure = plt.figure(figsize=(self.width, self.height),
474 489 edgecolor='k',
475 490 facecolor='w')
476 491 else:
477 492 self.figure.clf()
478 493 self.axes = []
479 494
480 495 if self.figure2 is None:
481 496 self.figure2 = plt.figure(figsize=(self.width, self.height),
482 497 edgecolor='k',
483 498 facecolor='w')
484 499 else:
485 500 self.figure2.clf()
486 501 self.axes = []
487 502
488 503 ax = self.figure.add_subplot(1,1,1)
489 504 #ax = self.figure( n+1)
490 505 ax.firsttime = True
491 506 self.axes.append(ax)
492 507
493 508 ax = self.figure2.add_subplot(1,1,1)
494 509 #ax = self.figure( n+1)
495 510 ax.firsttime = True
496 511 self.axes.append(ax)
497 512 # for n in range(self.nrows):
498 513 # ax = self.figure.add_subplot(self.nrows, self.ncols, n+1)
499 514 # #ax = self.figure( n+1)
500 515 # ax.firsttime = True
501 516 # self.axes.append(ax)
502 517
503 518
504 519 def plot(self):
505 520
506 521 self.x = np.array(self.times)
507 522 self.y = self.dataOut.getHeiRange()
508 523 self.z = []
509 524
510 525 for ch in range(self.nrows):
511 526 self.z.append([self.data[self.CODE][t][ch] for t in self.times])
512 527
513 528 self.z = np.array(self.z)
514 529 for n, ax in enumerate(self.axes):
515 530 x, y, z = self.fill_gaps(*self.decimate())
516 531 xmin = self.min_time
517 532 xmax = xmin+self.xrange*60*60
518 533 self.zmin = self.zmin if self.zmin else np.min(self.z)
519 534 self.zmax = self.zmax if self.zmax else np.max(self.z)
520 535 if ax.firsttime:
521 536 self.ymin = self.ymin if self.ymin else np.nanmin(self.y)
522 537 self.ymax = self.ymax if self.ymax else np.nanmax(self.y)
523 538 plot = ax.pcolormesh(x, y, z[n].T,
524 539 vmin=self.zmin,
525 540 vmax=self.zmax,
526 541 cmap=plt.get_cmap(self.colormap)
527 542 )
528 543 divider = make_axes_locatable(ax)
529 544 cax = divider.new_horizontal(size='2%', pad=0.05)
530 545 #self.figure.add_axes(cax)
531 546 #self.figure2.add_axes(cax)
532 547 plt.colorbar(plot, cax)
533 548 ax.set_ylim(self.ymin, self.ymax)
534 549
535 550 ax.xaxis.set_major_formatter(FuncFormatter(func))
536 551 ax.xaxis.set_major_locator(LinearLocator(6))
537 552
538 553 ax.set_ylabel(self.ylabel)
539 554
540 555 # if self.xmin is None:
541 556 # xmin = self.min_time
542 557 # else:
543 558 # xmin = (datetime.datetime.combine(self.dataOut.datatime.date(),
544 559 # datetime.time(self.xmin, 0, 0))-d1970).total_seconds()
545 560
546 561 ax.set_xlim(xmin, xmax)
547 562 ax.firsttime = False
548 563 else:
549 564 ax.collections.remove(ax.collections[0])
550 565 ax.set_xlim(xmin, xmax)
551 566 plot = ax.pcolormesh(x, y, z[n].T,
552 567 vmin=self.zmin,
553 568 vmax=self.zmax,
554 569 cmap=plt.get_cmap(self.colormap)
555 570 )
556 571 ax.set_title('{} {}'.format(self.titles[n],
557 572 datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')),
558 573 size=8)
559 574
560 575 self.saveTime = self.min_time
561 576
562 577
563 578 class PlotCOHData(PlotRTIData):
564 579
565 580 CODE = 'coh'
566 581
567 582 def setup(self):
568 583
569 584 self.ncols = 1
570 585 self.nrows = self.dataOut.nPairs
571 586 self.width = 10
572 587 self.height = 2.2*self.nrows if self.nrows<6 else 12
573 588 if self.nrows==1:
574 589 self.height += 1
575 590 self.ylabel = 'Range [Km]'
576 591 self.titles = ['{} Ch{} * Ch{}'.format(self.CODE.upper(), x[0], x[1]) for x in self.dataOut.pairsList]
577 592
578 593 if self.figure is None:
579 594 self.figure = plt.figure(figsize=(self.width, self.height),
580 595 edgecolor='k',
581 596 facecolor='w')
582 597 else:
583 598 self.figure.clf()
584 599 self.axes = []
585 600
586 601 for n in range(self.nrows):
587 602 ax = self.figure.add_subplot(self.nrows, self.ncols, n+1)
588 603 ax.firsttime = True
589 604 self.axes.append(ax)
590 605
591 606
592 607 class PlotNoiseData(PlotData):
593 608 CODE = 'noise'
594 609
595 610 def setup(self):
596 611
597 612 self.ncols = 1
598 613 self.nrows = 1
599 614 self.width = 10
600 615 self.height = 3.2
601 616 self.ylabel = 'Intensity [dB]'
602 617 self.titles = ['Noise']
603 618
604 619 if self.figure is None:
605 620 self.figure = plt.figure(figsize=(self.width, self.height),
606 621 edgecolor='k',
607 622 facecolor='w')
608 623 else:
609 624 self.figure.clf()
610 625 self.axes = []
611 626
612 627 self.ax = self.figure.add_subplot(self.nrows, self.ncols, 1)
613 628 self.ax.firsttime = True
614 629
615 630 def plot(self):
616 631
617 632 x = self.times
618 633 xmin = self.min_time
619 634 xmax = xmin+self.xrange*60*60
620 635 if self.ax.firsttime:
621 636 for ch in self.dataOut.channelList:
622 637 y = [self.data[self.CODE][t][ch] for t in self.times]
623 638 self.ax.plot(x, y, lw=1, label='Ch{}'.format(ch))
624 639 self.ax.firsttime = False
625 640 self.ax.xaxis.set_major_formatter(FuncFormatter(func))
626 641 self.ax.xaxis.set_major_locator(LinearLocator(6))
627 642 self.ax.set_ylabel(self.ylabel)
628 643 plt.legend()
629 644 else:
630 645 for ch in self.dataOut.channelList:
631 646 y = [self.data[self.CODE][t][ch] for t in self.times]
632 647 self.ax.lines[ch].set_data(x, y)
633 648
634 649 self.ax.set_xlim(xmin, xmax)
635 650 self.ax.set_ylim(min(y)-5, max(y)+5)
636 651 self.saveTime = self.min_time
637 652
638 653
639 654 class PlotWindProfilerData(PlotRTIData):
655
640 656 CODE = 'wind'
641 657 colormap = 'seismic'
642 658
643 659 def setup(self):
644 660 self.ncols = 1
645 661 self.nrows = self.dataOut.data_output.shape[0]
646 662 self.width = 10
647 663 self.height = 2.2*self.nrows
648 664 self.ylabel = 'Height [Km]'
649 self.titles = ['Zonal' ,'Meridional', 'Vertical']
665 self.titles = ['Zonal Wind' ,'Meridional Wind', 'Vertical Wind']
650 666 self.clabels = ['Velocity (m/s)','Velocity (m/s)','Velocity (cm/s)']
651 667 self.windFactor = [1, 1, 100]
652 668
653 669 if self.figure is None:
654 670 self.figure = plt.figure(figsize=(self.width, self.height),
655 671 edgecolor='k',
656 672 facecolor='w')
657 673 else:
658 674 self.figure.clf()
659 675 self.axes = []
660 676
661 677 for n in range(self.nrows):
662 678 ax = self.figure.add_subplot(self.nrows, self.ncols, n+1)
663 679 ax.firsttime = True
664 680 self.axes.append(ax)
665 681
666 682 def plot(self):
667 683
668 684 self.x = np.array(self.times)
669 685 self.y = self.dataOut.heightList
670 686 self.z = []
671 687
672 688 for ch in range(self.nrows):
673 self.z.append([self.data[self.CODE][t][ch] for t in self.times])
689 self.z.append([self.data['output'][t][ch] for t in self.times])
674 690
675 691 self.z = np.array(self.z)
676 692 self.z = numpy.ma.masked_invalid(self.z)
677 693
678 694 cmap=plt.get_cmap(self.colormap)
679 cmap.set_bad('white', 1.)
695 cmap.set_bad('black', 1.)
680 696
681 697 for n, ax in enumerate(self.axes):
682 698 x, y, z = self.fill_gaps(*self.decimate())
683 699 xmin = self.min_time
684 700 xmax = xmin+self.xrange*60*60
685 701 if ax.firsttime:
686 702 self.ymin = self.ymin if self.ymin else np.nanmin(self.y)
687 703 self.ymax = self.ymax if self.ymax else np.nanmax(self.y)
688 704 self.zmax = self.zmax if self.zmax else numpy.nanmax(abs(self.z[:-1, :]))
689 705 self.zmin = self.zmin if self.zmin else -self.zmax
690 706
691 707 plot = ax.pcolormesh(x, y, z[n].T*self.windFactor[n],
692 708 vmin=self.zmin,
693 709 vmax=self.zmax,
694 710 cmap=cmap
695 711 )
696 712 divider = make_axes_locatable(ax)
697 713 cax = divider.new_horizontal(size='2%', pad=0.05)
698 cax.set_ylabel(self.clabels[n])
699 714 self.figure.add_axes(cax)
700 plt.colorbar(plot, cax)
715 cb = plt.colorbar(plot, cax)
716 cb.set_label(self.clabels[n])
701 717 ax.set_ylim(self.ymin, self.ymax)
702 718
703 719 ax.xaxis.set_major_formatter(FuncFormatter(func))
704 720 ax.xaxis.set_major_locator(LinearLocator(6))
705 721
706 722 ax.set_ylabel(self.ylabel)
707 723
708 724 ax.set_xlim(xmin, xmax)
709 725 ax.firsttime = False
710 726 else:
711 727 ax.collections.remove(ax.collections[0])
712 728 ax.set_xlim(xmin, xmax)
713 729 plot = ax.pcolormesh(x, y, z[n].T*self.windFactor[n],
714 730 vmin=self.zmin,
715 731 vmax=self.zmax,
716 732 cmap=plt.get_cmap(self.colormap)
717 733 )
718 734 ax.set_title('{} {}'.format(self.titles[n],
719 735 datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')),
720 736 size=8)
721 737
722 738 self.saveTime = self.min_time
723 739
724 740
725 741 class PlotSNRData(PlotRTIData):
726 742 CODE = 'snr'
727 743 colormap = 'jet'
728 744
729 745 class PlotDOPData(PlotRTIData):
730 746 CODE = 'dop'
731 747 colormap = 'jet'
732 748
733 749
734 750 class PlotPHASEData(PlotCOHData):
735 751 CODE = 'phase'
736 752 colormap = 'seismic'
753
754
755 class PlotSkyMapData(PlotData):
756
757 CODE = 'met'
758
759 def setup(self):
760
761 self.ncols = 1
762 self.nrows = 1
763 self.width = 7.2
764 self.height = 7.2
765
766 self.xlabel = 'Zonal Zenith Angle (deg)'
767 self.ylabel = 'Meridional Zenith Angle (deg)'
768
769 if self.figure is None:
770 self.figure = plt.figure(figsize=(self.width, self.height),
771 edgecolor='k',
772 facecolor='w')
773 else:
774 self.figure.clf()
775
776 self.ax = plt.subplot2grid((self.nrows, self.ncols), (0, 0), 1, 1, polar=True)
777 self.ax.firsttime = True
778
779
780 def plot(self):
781
782 arrayParameters = np.concatenate([self.data['param'][t] for t in self.times])
783 error = arrayParameters[:,-1]
784 indValid = numpy.where(error == 0)[0]
785 finalMeteor = arrayParameters[indValid,:]
786 finalAzimuth = finalMeteor[:,3]
787 finalZenith = finalMeteor[:,4]
788
789 x = finalAzimuth*numpy.pi/180
790 y = finalZenith
791
792 if self.ax.firsttime:
793 self.ax.plot = self.ax.plot(x, y, 'bo', markersize=5)[0]
794 self.ax.set_ylim(0,90)
795 self.ax.set_yticks(numpy.arange(0,90,20))
796 self.ax.set_xlabel(self.xlabel)
797 self.ax.set_ylabel(self.ylabel)
798 self.ax.yaxis.labelpad = 40
799 self.ax.firsttime = False
800 else:
801 self.ax.plot.set_data(x, y)
802
803
804 dt1 = datetime.datetime.fromtimestamp(self.min_time).strftime('%y/%m/%d %H:%M:%S')
805 dt2 = datetime.datetime.fromtimestamp(self.max_time).strftime('%y/%m/%d %H:%M:%S')
806 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
807 dt2,
808 len(x))
809 self.ax.set_title(title, size=8)
810
811 self.saveTime = self.max_time
@@ -1,468 +1,468
1 1 import numpy
2 2 import datetime
3 3 import sys
4 4 import matplotlib
5 5
6 6 if 'linux' in sys.platform:
7 7 matplotlib.use("TKAgg")
8 8
9 9 if 'darwin' in sys.platform:
10 10 matplotlib.use('TKAgg')
11 11 #Qt4Agg', 'GTK', 'GTKAgg', 'ps', 'agg', 'cairo', 'MacOSX', 'GTKCairo', 'WXAgg', 'template', 'TkAgg', 'GTK3Cairo', 'GTK3Agg', 'svg', 'WebAgg', 'CocoaAgg', 'emf', 'gdk', 'WX'
12 12 import matplotlib.pyplot
13 13
14 14 from mpl_toolkits.axes_grid1 import make_axes_locatable
15 15 from matplotlib.ticker import FuncFormatter, LinearLocator
16 16
17 17 ###########################################
18 18 #Actualizacion de las funciones del driver
19 19 ###########################################
20 20
21 21 # create jro colormap
22 22
23 23 jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90]
24 24 blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15]
25 25 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values)))
26 26 matplotlib.pyplot.register_cmap(cmap=ncmap)
27 27
28 28 def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi = 80):
29 29
30 30 matplotlib.pyplot.ioff()
31 31
32 32 fig = matplotlib.pyplot.figure(num=id, facecolor=facecolor, figsize=(1.0*width/dpi, 1.0*height/dpi))
33 33 fig.canvas.manager.set_window_title(wintitle)
34 34 # fig.canvas.manager.resize(width, height)
35 35 matplotlib.pyplot.ion()
36 36
37 37 if show:
38 38 matplotlib.pyplot.show()
39 39
40 40 return fig
41 41
42 42 def closeFigure(show=False, fig=None):
43 43
44 44 # matplotlib.pyplot.ioff()
45 45 # matplotlib.pyplot.pause(0)
46 46
47 47 if show:
48 48 matplotlib.pyplot.show()
49 49
50 50 if fig != None:
51 51 matplotlib.pyplot.close(fig)
52 52 # matplotlib.pyplot.pause(0)
53 53 # matplotlib.pyplot.ion()
54 54
55 55 return
56 56
57 57 matplotlib.pyplot.close("all")
58 58 # matplotlib.pyplot.pause(0)
59 59 # matplotlib.pyplot.ion()
60 60
61 61 return
62 62
63 63 def saveFigure(fig, filename):
64 64
65 65 # matplotlib.pyplot.ioff()
66 66 fig.savefig(filename, dpi=matplotlib.pyplot.gcf().dpi)
67 67 # matplotlib.pyplot.ion()
68 68
69 69 def clearFigure(fig):
70 70
71 71 fig.clf()
72 72
73 73 def setWinTitle(fig, title):
74 74
75 75 fig.canvas.manager.set_window_title(title)
76 76
77 77 def setTitle(fig, title):
78 78
79 79 fig.suptitle(title)
80 80
81 81 def createAxes(fig, nrow, ncol, xpos, ypos, colspan, rowspan, polar=False):
82 82
83 83 matplotlib.pyplot.ioff()
84 84 matplotlib.pyplot.figure(fig.number)
85 85 axes = matplotlib.pyplot.subplot2grid((nrow, ncol),
86 86 (xpos, ypos),
87 87 colspan=colspan,
88 88 rowspan=rowspan,
89 89 polar=polar)
90 90
91 91 matplotlib.pyplot.ion()
92 92 return axes
93 93
94 94 def setAxesText(ax, text):
95 95
96 96 ax.annotate(text,
97 97 xy = (.1, .99),
98 98 xycoords = 'figure fraction',
99 99 horizontalalignment = 'left',
100 100 verticalalignment = 'top',
101 101 fontsize = 10)
102 102
103 103 def printLabels(ax, xlabel, ylabel, title):
104 104
105 105 ax.set_xlabel(xlabel, size=11)
106 106 ax.set_ylabel(ylabel, size=11)
107 107 ax.set_title(title, size=8)
108 108
109 109 def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='',
110 110 ticksize=9, xtick_visible=True, ytick_visible=True,
111 111 nxticks=4, nyticks=10,
112 112 grid=None,color='blue'):
113 113
114 114 """
115 115
116 116 Input:
117 117 grid : None, 'both', 'x', 'y'
118 118 """
119 119
120 120 matplotlib.pyplot.ioff()
121 121
122 122 ax.set_xlim([xmin,xmax])
123 123 ax.set_ylim([ymin,ymax])
124 124
125 125 printLabels(ax, xlabel, ylabel, title)
126 126
127 127 ######################################################
128 128 if (xmax-xmin)<=1:
129 129 xtickspos = numpy.linspace(xmin,xmax,nxticks)
130 130 xtickspos = numpy.array([float("%.1f"%i) for i in xtickspos])
131 131 ax.set_xticks(xtickspos)
132 132 else:
133 133 xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin)
134 134 # xtickspos = numpy.arange(nxticks)*float(xmax-xmin)/float(nxticks) + int(xmin)
135 135 ax.set_xticks(xtickspos)
136 136
137 137 for tick in ax.get_xticklabels():
138 138 tick.set_visible(xtick_visible)
139 139
140 140 for tick in ax.xaxis.get_major_ticks():
141 141 tick.label.set_fontsize(ticksize)
142 142
143 143 ######################################################
144 144 for tick in ax.get_yticklabels():
145 145 tick.set_visible(ytick_visible)
146 146
147 147 for tick in ax.yaxis.get_major_ticks():
148 148 tick.label.set_fontsize(ticksize)
149 149
150 150 ax.plot(x, y, color=color)
151 151 iplot = ax.lines[-1]
152 152
153 153 ######################################################
154 154 if '0.' in matplotlib.__version__[0:2]:
155 155 print "The matplotlib version has to be updated to 1.1 or newer"
156 156 return iplot
157 157
158 158 if '1.0.' in matplotlib.__version__[0:4]:
159 159 print "The matplotlib version has to be updated to 1.1 or newer"
160 160 return iplot
161 161
162 162 if grid != None:
163 163 ax.grid(b=True, which='major', axis=grid)
164 164
165 165 matplotlib.pyplot.tight_layout()
166 166
167 167 matplotlib.pyplot.ion()
168 168
169 169 return iplot
170 170
171 171 def set_linedata(ax, x, y, idline):
172 172
173 173 ax.lines[idline].set_data(x,y)
174 174
175 175 def pline(iplot, x, y, xlabel='', ylabel='', title=''):
176 176
177 177 ax = iplot.axes
178 178
179 179 printLabels(ax, xlabel, ylabel, title)
180 180
181 181 set_linedata(ax, x, y, idline=0)
182 182
183 183 def addpline(ax, x, y, color, linestyle, lw):
184 184
185 185 ax.plot(x,y,color=color,linestyle=linestyle,lw=lw)
186 186
187 187
188 188 def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax,
189 189 xlabel='', ylabel='', title='', ticksize = 9,
190 190 colormap='jet',cblabel='', cbsize="5%",
191 191 XAxisAsTime=False):
192 192
193 193 matplotlib.pyplot.ioff()
194 194
195 195 divider = make_axes_locatable(ax)
196 196 ax_cb = divider.new_horizontal(size=cbsize, pad=0.05)
197 197 fig = ax.get_figure()
198 198 fig.add_axes(ax_cb)
199 199
200 200 ax.set_xlim([xmin,xmax])
201 201 ax.set_ylim([ymin,ymax])
202 202
203 203 printLabels(ax, xlabel, ylabel, title)
204 204
205 205 z = numpy.ma.masked_invalid(z)
206 206 cmap=matplotlib.pyplot.get_cmap(colormap)
207 cmap.set_bad('white', 1.)
207 cmap.set_bad('black', 1.)
208 208 imesh = ax.pcolormesh(x,y,z.T, vmin=zmin, vmax=zmax, cmap=cmap)
209 209 cb = matplotlib.pyplot.colorbar(imesh, cax=ax_cb)
210 210 cb.set_label(cblabel)
211 211
212 212 # for tl in ax_cb.get_yticklabels():
213 213 # tl.set_visible(True)
214 214
215 215 for tick in ax.yaxis.get_major_ticks():
216 216 tick.label.set_fontsize(ticksize)
217 217
218 218 for tick in ax.xaxis.get_major_ticks():
219 219 tick.label.set_fontsize(ticksize)
220 220
221 221 for tick in cb.ax.get_yticklabels():
222 222 tick.set_fontsize(ticksize)
223 223
224 224 ax_cb.yaxis.tick_right()
225 225
226 226 if '0.' in matplotlib.__version__[0:2]:
227 227 print "The matplotlib version has to be updated to 1.1 or newer"
228 228 return imesh
229 229
230 230 if '1.0.' in matplotlib.__version__[0:4]:
231 231 print "The matplotlib version has to be updated to 1.1 or newer"
232 232 return imesh
233 233
234 234 matplotlib.pyplot.tight_layout()
235 235
236 236 if XAxisAsTime:
237 237
238 238 func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S"))
239 239 ax.xaxis.set_major_formatter(FuncFormatter(func))
240 240 ax.xaxis.set_major_locator(LinearLocator(7))
241 241
242 242 matplotlib.pyplot.ion()
243 243 return imesh
244 244
245 245 def pcolor(imesh, z, xlabel='', ylabel='', title=''):
246 246
247 247 z = z.T
248 248 ax = imesh.axes
249 249 printLabels(ax, xlabel, ylabel, title)
250 250 imesh.set_array(z.ravel())
251 251
252 252 def addpcolor(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'):
253 253
254 254 printLabels(ax, xlabel, ylabel, title)
255 255
256 256 ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=matplotlib.pyplot.get_cmap(colormap))
257 257
258 258 def addpcolorbuffer(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'):
259 259
260 260 printLabels(ax, xlabel, ylabel, title)
261 261
262 262 ax.collections.remove(ax.collections[0])
263 263
264 264 z = numpy.ma.masked_invalid(z)
265 265
266 266 cmap=matplotlib.pyplot.get_cmap(colormap)
267 cmap.set_bad('white', 1.)
267 cmap.set_bad('black', 1.)
268 268
269 269
270 270 ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=cmap)
271 271
272 272 def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None,
273 273 ticksize=9, xtick_visible=True, ytick_visible=True,
274 274 nxticks=4, nyticks=10,
275 275 grid=None):
276 276
277 277 """
278 278
279 279 Input:
280 280 grid : None, 'both', 'x', 'y'
281 281 """
282 282
283 283 matplotlib.pyplot.ioff()
284 284
285 285 lines = ax.plot(x.T, y)
286 286 leg = ax.legend(lines, legendlabels, loc='upper right')
287 287 leg.get_frame().set_alpha(0.5)
288 288 ax.set_xlim([xmin,xmax])
289 289 ax.set_ylim([ymin,ymax])
290 290 printLabels(ax, xlabel, ylabel, title)
291 291
292 292 xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin)
293 293 ax.set_xticks(xtickspos)
294 294
295 295 for tick in ax.get_xticklabels():
296 296 tick.set_visible(xtick_visible)
297 297
298 298 for tick in ax.xaxis.get_major_ticks():
299 299 tick.label.set_fontsize(ticksize)
300 300
301 301 for tick in ax.get_yticklabels():
302 302 tick.set_visible(ytick_visible)
303 303
304 304 for tick in ax.yaxis.get_major_ticks():
305 305 tick.label.set_fontsize(ticksize)
306 306
307 307 iplot = ax.lines[-1]
308 308
309 309 if '0.' in matplotlib.__version__[0:2]:
310 310 print "The matplotlib version has to be updated to 1.1 or newer"
311 311 return iplot
312 312
313 313 if '1.0.' in matplotlib.__version__[0:4]:
314 314 print "The matplotlib version has to be updated to 1.1 or newer"
315 315 return iplot
316 316
317 317 if grid != None:
318 318 ax.grid(b=True, which='major', axis=grid)
319 319
320 320 matplotlib.pyplot.tight_layout()
321 321
322 322 matplotlib.pyplot.ion()
323 323
324 324 return iplot
325 325
326 326
327 327 def pmultiline(iplot, x, y, xlabel='', ylabel='', title=''):
328 328
329 329 ax = iplot.axes
330 330
331 331 printLabels(ax, xlabel, ylabel, title)
332 332
333 333 for i in range(len(ax.lines)):
334 334 line = ax.lines[i]
335 335 line.set_data(x[i,:],y)
336 336
337 337 def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None,
338 338 ticksize=9, xtick_visible=True, ytick_visible=True,
339 339 nxticks=4, nyticks=10, marker='.', markersize=10, linestyle="None",
340 340 grid=None, XAxisAsTime=False):
341 341
342 342 """
343 343
344 344 Input:
345 345 grid : None, 'both', 'x', 'y'
346 346 """
347 347
348 348 matplotlib.pyplot.ioff()
349 349
350 350 # lines = ax.plot(x, y.T, marker=marker,markersize=markersize,linestyle=linestyle)
351 351 lines = ax.plot(x, y.T)
352 352 # leg = ax.legend(lines, legendlabels, loc=2, bbox_to_anchor=(1.01, 1.00), numpoints=1, handlelength=1.5, \
353 353 # handletextpad=0.5, borderpad=0.5, labelspacing=0.5, borderaxespad=0.)
354 354
355 355 leg = ax.legend(lines, legendlabels,
356 356 loc='upper right', bbox_to_anchor=(1.16, 1), borderaxespad=0)
357 357
358 358 for label in leg.get_texts(): label.set_fontsize(9)
359 359
360 360 ax.set_xlim([xmin,xmax])
361 361 ax.set_ylim([ymin,ymax])
362 362 printLabels(ax, xlabel, ylabel, title)
363 363
364 364 # xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin)
365 365 # ax.set_xticks(xtickspos)
366 366
367 367 for tick in ax.get_xticklabels():
368 368 tick.set_visible(xtick_visible)
369 369
370 370 for tick in ax.xaxis.get_major_ticks():
371 371 tick.label.set_fontsize(ticksize)
372 372
373 373 for tick in ax.get_yticklabels():
374 374 tick.set_visible(ytick_visible)
375 375
376 376 for tick in ax.yaxis.get_major_ticks():
377 377 tick.label.set_fontsize(ticksize)
378 378
379 379 iplot = ax.lines[-1]
380 380
381 381 if '0.' in matplotlib.__version__[0:2]:
382 382 print "The matplotlib version has to be updated to 1.1 or newer"
383 383 return iplot
384 384
385 385 if '1.0.' in matplotlib.__version__[0:4]:
386 386 print "The matplotlib version has to be updated to 1.1 or newer"
387 387 return iplot
388 388
389 389 if grid != None:
390 390 ax.grid(b=True, which='major', axis=grid)
391 391
392 392 matplotlib.pyplot.tight_layout()
393 393
394 394 if XAxisAsTime:
395 395
396 396 func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S"))
397 397 ax.xaxis.set_major_formatter(FuncFormatter(func))
398 398 ax.xaxis.set_major_locator(LinearLocator(7))
399 399
400 400 matplotlib.pyplot.ion()
401 401
402 402 return iplot
403 403
404 404 def pmultilineyaxis(iplot, x, y, xlabel='', ylabel='', title=''):
405 405
406 406 ax = iplot.axes
407 407
408 408 printLabels(ax, xlabel, ylabel, title)
409 409
410 410 for i in range(len(ax.lines)):
411 411 line = ax.lines[i]
412 412 line.set_data(x,y[i,:])
413 413
414 414 def createPolar(ax, x, y,
415 415 xlabel='', ylabel='', title='', ticksize = 9,
416 416 colormap='jet',cblabel='', cbsize="5%",
417 417 XAxisAsTime=False):
418 418
419 419 matplotlib.pyplot.ioff()
420 420
421 421 ax.plot(x,y,'bo', markersize=5)
422 422 # ax.set_rmax(90)
423 423 ax.set_ylim(0,90)
424 424 ax.set_yticks(numpy.arange(0,90,20))
425 425 # ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center' ,size='11')
426 426 # ax.text(0, 50, ylabel, rotation='vertical', va ='center', ha = 'left' ,size='11')
427 427 # ax.text(100, 100, 'example', ha='left', va='center', rotation='vertical')
428 428 ax.yaxis.labelpad = 40
429 429 printLabels(ax, xlabel, ylabel, title)
430 430 iplot = ax.lines[-1]
431 431
432 432 if '0.' in matplotlib.__version__[0:2]:
433 433 print "The matplotlib version has to be updated to 1.1 or newer"
434 434 return iplot
435 435
436 436 if '1.0.' in matplotlib.__version__[0:4]:
437 437 print "The matplotlib version has to be updated to 1.1 or newer"
438 438 return iplot
439 439
440 440 # if grid != None:
441 441 # ax.grid(b=True, which='major', axis=grid)
442 442
443 443 matplotlib.pyplot.tight_layout()
444 444
445 445 matplotlib.pyplot.ion()
446 446
447 447
448 448 return iplot
449 449
450 450 def polar(iplot, x, y, xlabel='', ylabel='', title=''):
451 451
452 452 ax = iplot.axes
453 453
454 454 # ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center',size='11')
455 455 printLabels(ax, xlabel, ylabel, title)
456 456
457 457 set_linedata(ax, x, y, idline=0)
458 458
459 459 def draw(fig):
460 460
461 461 if type(fig) == 'int':
462 462 raise ValueError, "Error drawing: Fig parameter should be a matplotlib figure object figure"
463 463
464 464 fig.canvas.draw()
465 465
466 466 def pause(interval=0.000001):
467 467
468 468 matplotlib.pyplot.pause(interval)
@@ -1,1750 +1,1794
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 import inspect
12 13 import time, datetime
13 14 #import h5py
14 15 import traceback
15 16
16 17 try:
17 18 from gevent import sleep
18 19 except:
19 20 from time import sleep
20 21
21 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
23 24
24 25 LOCALTIME = True
25 26
26 27 def isNumber(cad):
27 28 """
28 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
29 30
30 31 Excepciones:
31 32 Si un determinado string no puede ser convertido a numero
32 33 Input:
33 34 str, string al cual se le analiza para determinar si convertible a un numero o no
34 35
35 36 Return:
36 37 True : si el string es uno numerico
37 38 False : no es un string numerico
38 39 """
39 40 try:
40 41 float( cad )
41 42 return True
42 43 except:
43 44 return False
44 45
45 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
46 47 """
47 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
48 49
49 50 Inputs:
50 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
51 52
52 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
53 54 segundos contados desde 01/01/1970.
54 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
55 56 segundos contados desde 01/01/1970.
56 57
57 58 Return:
58 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
59 60 fecha especificado, de lo contrario retorna False.
60 61
61 62 Excepciones:
62 63 Si el archivo no existe o no puede ser abierto
63 64 Si la cabecera no puede ser leida.
64 65
65 66 """
66 67 basicHeaderObj = BasicHeader(LOCALTIME)
67 68
68 69 try:
69 70 fp = open(filename,'rb')
70 71 except IOError:
71 72 print "The file %s can't be opened" %(filename)
72 73 return 0
73 74
74 75 sts = basicHeaderObj.read(fp)
75 76 fp.close()
76 77
77 78 if not(sts):
78 79 print "Skipping the file %s because it has not a valid header" %(filename)
79 80 return 0
80 81
81 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
82 83 return 0
83 84
84 85 return 1
85 86
86 87 def isTimeInRange(thisTime, startTime, endTime):
87 88
88 89 if endTime >= startTime:
89 90 if (thisTime < startTime) or (thisTime > endTime):
90 91 return 0
91 92
92 93 return 1
93 94 else:
94 95 if (thisTime < startTime) and (thisTime > endTime):
95 96 return 0
96 97
97 98 return 1
98 99
99 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
100 101 """
101 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
102 103
103 104 Inputs:
104 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
105 106
106 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
107 108
108 109 endDate : fecha final del rango seleccionado en formato datetime.date
109 110
110 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
111 112
112 113 endTime : tiempo final del rango seleccionado en formato datetime.time
113 114
114 115 Return:
115 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
116 117 fecha especificado, de lo contrario retorna False.
117 118
118 119 Excepciones:
119 120 Si el archivo no existe o no puede ser abierto
120 121 Si la cabecera no puede ser leida.
121 122
122 123 """
123 124
124 125
125 126 try:
126 127 fp = open(filename,'rb')
127 128 except IOError:
128 129 print "The file %s can't be opened" %(filename)
129 130 return None
130 131
131 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
132 133 systemHeaderObj = SystemHeader()
133 134 radarControllerHeaderObj = RadarControllerHeader()
134 135 processingHeaderObj = ProcessingHeader()
135 136
136 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
137 138
138 139 sts = firstBasicHeaderObj.read(fp)
139 140
140 141 if not(sts):
141 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
142 143 return None
143 144
144 145 if not systemHeaderObj.read(fp):
145 146 return None
146 147
147 148 if not radarControllerHeaderObj.read(fp):
148 149 return None
149 150
150 151 if not processingHeaderObj.read(fp):
151 152 return None
152 153
153 154 filesize = os.path.getsize(filename)
154 155
155 156 offset = processingHeaderObj.blockSize + 24 #header size
156 157
157 158 if filesize <= offset:
158 159 print "[Reading] %s: This file has not enough data" %filename
159 160 return None
160 161
161 162 fp.seek(-offset, 2)
162 163
163 164 sts = lastBasicHeaderObj.read(fp)
164 165
165 166 fp.close()
166 167
167 168 thisDatetime = lastBasicHeaderObj.datatime
168 169 thisTime_last_block = thisDatetime.time()
169 170
170 171 thisDatetime = firstBasicHeaderObj.datatime
171 172 thisDate = thisDatetime.date()
172 173 thisTime_first_block = thisDatetime.time()
173 174
174 175 #General case
175 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
176 177 #-----------o----------------------------o-----------
177 178 # startTime endTime
178 179
179 180 if endTime >= startTime:
180 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
181 182 return None
182 183
183 184 return thisDatetime
184 185
185 186 #If endTime < startTime then endTime belongs to the next day
186 187
187 188
188 189 #<<<<<<<<<<<o o>>>>>>>>>>>
189 190 #-----------o----------------------------o-----------
190 191 # endTime startTime
191 192
192 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
193 194 return None
194 195
195 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
196 197 return None
197 198
198 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
199 200 return None
200 201
201 202 return thisDatetime
202 203
203 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
204 205 """
205 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
206 207
207 208 Inputs:
208 209 folder : nombre completo del directorio.
209 210 Su formato deberia ser "/path_root/?YYYYDDD"
210 211
211 212 siendo:
212 213 YYYY : Anio (ejemplo 2015)
213 214 DDD : Dia del anio (ejemplo 305)
214 215
215 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
216 217
217 218 endDate : fecha final del rango seleccionado en formato datetime.date
218 219
219 220 Return:
220 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
221 222 fecha especificado, de lo contrario retorna False.
222 223 Excepciones:
223 224 Si el directorio no tiene el formato adecuado
224 225 """
225 226
226 227 basename = os.path.basename(folder)
227 228
228 229 if not isRadarFolder(basename):
229 230 print "The folder %s has not the rigth format" %folder
230 231 return 0
231 232
232 233 if startDate and endDate:
233 234 thisDate = getDateFromRadarFolder(basename)
234 235
235 236 if thisDate < startDate:
236 237 return 0
237 238
238 239 if thisDate > endDate:
239 240 return 0
240 241
241 242 return 1
242 243
243 244 def isFileInDateRange(filename, startDate=None, endDate=None):
244 245 """
245 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
246 247
247 248 Inputs:
248 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
249 250
250 251 Su formato deberia ser "?YYYYDDDsss"
251 252
252 253 siendo:
253 254 YYYY : Anio (ejemplo 2015)
254 255 DDD : Dia del anio (ejemplo 305)
255 256 sss : set
256 257
257 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
258 259
259 260 endDate : fecha final del rango seleccionado en formato datetime.date
260 261
261 262 Return:
262 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
263 264 fecha especificado, de lo contrario retorna False.
264 265 Excepciones:
265 266 Si el archivo no tiene el formato adecuado
266 267 """
267 268
268 269 basename = os.path.basename(filename)
269 270
270 271 if not isRadarFile(basename):
271 272 print "The filename %s has not the rigth format" %filename
272 273 return 0
273 274
274 275 if startDate and endDate:
275 276 thisDate = getDateFromRadarFile(basename)
276 277
277 278 if thisDate < startDate:
278 279 return 0
279 280
280 281 if thisDate > endDate:
281 282 return 0
282 283
283 284 return 1
284 285
285 286 def getFileFromSet(path, ext, set):
286 287 validFilelist = []
287 288 fileList = os.listdir(path)
288 289
289 290 # 0 1234 567 89A BCDE
290 291 # H YYYY DDD SSS .ext
291 292
292 293 for thisFile in fileList:
293 294 try:
294 295 year = int(thisFile[1:5])
295 296 doy = int(thisFile[5:8])
296 297 except:
297 298 continue
298 299
299 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
300 301 continue
301 302
302 303 validFilelist.append(thisFile)
303 304
304 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
305 306
306 307 if len(myfile)!= 0:
307 308 return myfile[0]
308 309 else:
309 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
310 311 print 'the filename %s does not exist'%filename
311 312 print '...going to the last file: '
312 313
313 314 if validFilelist:
314 315 validFilelist = sorted( validFilelist, key=str.lower )
315 316 return validFilelist[-1]
316 317
317 318 return None
318 319
319 320 def getlastFileFromPath(path, ext):
320 321 """
321 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
322 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
323 324
324 325 Input:
325 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
326 327 ext : extension de los files contenidos en una carpeta
327 328
328 329 Return:
329 330 El ultimo file de una determinada carpeta, no se considera el path.
330 331 """
331 332 validFilelist = []
332 333 fileList = os.listdir(path)
333 334
334 335 # 0 1234 567 89A BCDE
335 336 # H YYYY DDD SSS .ext
336 337
337 338 for thisFile in fileList:
338 339
339 340 year = thisFile[1:5]
340 341 if not isNumber(year):
341 342 continue
342 343
343 344 doy = thisFile[5:8]
344 345 if not isNumber(doy):
345 346 continue
346 347
347 348 year = int(year)
348 349 doy = int(doy)
349 350
350 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
351 352 continue
352 353
353 354 validFilelist.append(thisFile)
354 355
355 356 if validFilelist:
356 357 validFilelist = sorted( validFilelist, key=str.lower )
357 358 return validFilelist[-1]
358 359
359 360 return None
360 361
361 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
362 363 """
363 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
364 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
365 366 el path exacto de un determinado file.
366 367
367 368 Example :
368 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
369 370
370 371 Entonces la funcion prueba con las siguientes combinaciones
371 372 .../.../y2009307367.ext
372 373 .../.../Y2009307367.ext
373 374 .../.../x2009307/y2009307367.ext
374 375 .../.../x2009307/Y2009307367.ext
375 376 .../.../X2009307/y2009307367.ext
376 377 .../.../X2009307/Y2009307367.ext
377 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
378 379
379 380 Return:
380 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
381 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
382 383 para el filename
383 384 """
384 385 fullfilename = None
385 386 find_flag = False
386 387 filename = None
387 388
388 389 prefixDirList = [None,'d','D']
389 390 if ext.lower() == ".r": #voltage
390 391 prefixFileList = ['d','D']
391 392 elif ext.lower() == ".pdata": #spectra
392 393 prefixFileList = ['p','P']
393 394 else:
394 395 return None, filename
395 396
396 397 #barrido por las combinaciones posibles
397 398 for prefixDir in prefixDirList:
398 399 thispath = path
399 400 if prefixDir != None:
400 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
401 402 if foldercounter == 0:
402 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
403 404 else:
404 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
405 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
406 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
407 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
408 409
409 410 if os.path.exists( fullfilename ): #verifico que exista
410 411 find_flag = True
411 412 break
412 413 if find_flag:
413 414 break
414 415
415 416 if not(find_flag):
416 417 return None, filename
417 418
418 419 return fullfilename, filename
419 420
420 421 def isRadarFolder(folder):
421 422 try:
422 423 year = int(folder[1:5])
423 424 doy = int(folder[5:8])
424 425 except:
425 426 return 0
426 427
427 428 return 1
428 429
429 430 def isRadarFile(file):
430 431 try:
431 432 year = int(file[1:5])
432 433 doy = int(file[5:8])
433 434 set = int(file[8:11])
434 435 except:
435 436 return 0
436 437
437 438 return 1
438 439
439 440 def getDateFromRadarFile(file):
440 441 try:
441 442 year = int(file[1:5])
442 443 doy = int(file[5:8])
443 444 set = int(file[8:11])
444 445 except:
445 446 return None
446 447
447 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
448 449 return thisDate
449 450
450 451 def getDateFromRadarFolder(folder):
451 452 try:
452 453 year = int(folder[1:5])
453 454 doy = int(folder[5:8])
454 455 except:
455 456 return None
456 457
457 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
458 459 return thisDate
459 460
460 461 class JRODataIO:
461 462
462 463 c = 3E8
463 464
464 465 isConfig = False
465 466
466 467 basicHeaderObj = None
467 468
468 469 systemHeaderObj = None
469 470
470 471 radarControllerHeaderObj = None
471 472
472 473 processingHeaderObj = None
473 474
474 475 dtype = None
475 476
476 477 pathList = []
477 478
478 479 filenameList = []
479 480
480 481 filename = None
481 482
482 483 ext = None
483 484
484 485 flagIsNewFile = 1
485 486
486 487 flagDiscontinuousBlock = 0
487 488
488 489 flagIsNewBlock = 0
489 490
490 491 fp = None
491 492
492 493 firstHeaderSize = 0
493 494
494 495 basicHeaderSize = 24
495 496
496 497 versionFile = 1103
497 498
498 499 fileSize = None
499 500
500 501 # ippSeconds = None
501 502
502 503 fileSizeByHeader = None
503 504
504 505 fileIndex = None
505 506
506 507 profileIndex = None
507 508
508 509 blockIndex = None
509 510
510 511 nTotalBlocks = None
511 512
512 513 maxTimeStep = 30
513 514
514 515 lastUTTime = None
515 516
516 517 datablock = None
517 518
518 519 dataOut = None
519 520
520 521 blocksize = None
521 522
522 523 getByBlock = False
523 524
524 525 def __init__(self):
525 526
526 527 raise NotImplementedError
527 528
528 529 def run(self):
529 530
530 531 raise NotImplementedError
531 532
532 533 def getDtypeWidth(self):
533 534
534 535 dtype_index = get_dtype_index(self.dtype)
535 536 dtype_width = get_dtype_width(dtype_index)
536 537
537 538 return dtype_width
538 539
540 def getAllowedArgs(self):
541 return inspect.getargspec(self.run).args
542
539 543 class JRODataReader(JRODataIO):
540 544
541 545
542 546 online = 0
543 547
544 548 realtime = 0
545 549
546 550 nReadBlocks = 0
547 551
548 552 delay = 10 #number of seconds waiting a new file
549 553
550 554 nTries = 3 #quantity tries
551 555
552 556 nFiles = 3 #number of files for searching
553 557
554 558 path = None
555 559
556 560 foldercounter = 0
557 561
558 562 flagNoMoreFiles = 0
559 563
560 564 datetimeList = []
561 565
562 566 __isFirstTimeOnline = 1
563 567
564 568 __printInfo = True
565 569
566 570 profileIndex = None
567 571
568 572 nTxs = 1
569 573
570 574 txIndex = None
571 575
572 576 #Added--------------------
573 577
574 578 selBlocksize = None
575 579
576 580 selBlocktime = None
577 581
578 582
579 583 def __init__(self):
580 584
581 585 """
582 586 This class is used to find data files
583 587
584 588 Example:
585 589 reader = JRODataReader()
586 590 fileList = reader.findDataFiles()
587 591
588 592 """
589 593 pass
590 594
591 595
592 596 def createObjByDefault(self):
593 597 """
594 598
595 599 """
596 600 raise NotImplementedError
597 601
598 602 def getBlockDimension(self):
599 603
600 604 raise NotImplementedError
601 605
602 606 def __searchFilesOffLine(self,
603 607 path,
604 608 startDate=None,
605 609 endDate=None,
606 610 startTime=datetime.time(0,0,0),
607 611 endTime=datetime.time(23,59,59),
608 612 set=None,
609 613 expLabel='',
610 614 ext='.r',
611 615 queue=None,
612 616 cursor=None,
613 617 skip=None,
614 618 walk=True):
615 619
616 620 self.filenameList = []
617 621 self.datetimeList = []
618 622
619 623 pathList = []
620 624
621 625 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
622 626
623 627 if dateList == []:
624 628 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
625 629 return None, None
626 630
627 631 if len(dateList) > 1:
628 632 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
629 633 else:
630 634 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
631 635
632 636 filenameList = []
633 637 datetimeList = []
634 638
635 639 for thisPath in pathList:
636 640 # thisPath = pathList[pathDict[file]]
637 641
638 642 fileList = glob.glob1(thisPath, "*%s" %ext)
639 643 fileList.sort()
640 644
641 645 skippedFileList = []
642 646
643 647 if cursor is not None and skip is not None:
644 648 # if cursor*skip > len(fileList):
645 649 if skip == 0:
646 650 if queue is not None:
647 651 queue.put(len(fileList))
648 652 skippedFileList = []
649 653 else:
650 654 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
651 655
652 656 else:
653 657 skippedFileList = fileList
654 658
655 659 for file in skippedFileList:
656 660
657 661 filename = os.path.join(thisPath,file)
658 662
659 663 if not isFileInDateRange(filename, startDate, endDate):
660 664 continue
661 665
662 666 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
663 667
664 668 if not(thisDatetime):
665 669 continue
666 670
667 671 filenameList.append(filename)
668 672 datetimeList.append(thisDatetime)
669 673
670 674 if not(filenameList):
671 675 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
672 676 return None, None
673 677
674 678 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
675 679 print
676 680
677 681 for i in range(len(filenameList)):
678 682 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
679 683
680 684 self.filenameList = filenameList
681 685 self.datetimeList = datetimeList
682 686
683 687 return pathList, filenameList
684 688
685 689 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
686 690
687 691 """
688 692 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
689 693 devuelve el archivo encontrado ademas de otros datos.
690 694
691 695 Input:
692 696 path : carpeta donde estan contenidos los files que contiene data
693 697
694 698 expLabel : Nombre del subexperimento (subfolder)
695 699
696 700 ext : extension de los files
697 701
698 702 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
699 703
700 704 Return:
701 705 directory : eL directorio donde esta el file encontrado
702 706 filename : el ultimo file de una determinada carpeta
703 707 year : el anho
704 708 doy : el numero de dia del anho
705 709 set : el set del archivo
706 710
707 711
708 712 """
709 713 if not os.path.isdir(path):
710 714 return None, None, None, None, None, None
711 715
712 716 dirList = []
713 717
714 718 if not walk:
715 719 fullpath = path
716 720 foldercounter = 0
717 721 else:
718 722 #Filtra solo los directorios
719 723 for thisPath in os.listdir(path):
720 724 if not os.path.isdir(os.path.join(path,thisPath)):
721 725 continue
722 726 if not isRadarFolder(thisPath):
723 727 continue
724 728
725 729 dirList.append(thisPath)
726 730
727 731 if not(dirList):
728 732 return None, None, None, None, None, None
729 733
730 734 dirList = sorted( dirList, key=str.lower )
731 735
732 736 doypath = dirList[-1]
733 737 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
734 738 fullpath = os.path.join(path, doypath, expLabel)
735 739
736 740
737 741 print "[Reading] %s folder was found: " %(fullpath )
738 742
739 743 if set == None:
740 744 filename = getlastFileFromPath(fullpath, ext)
741 745 else:
742 746 filename = getFileFromSet(fullpath, ext, set)
743 747
744 748 if not(filename):
745 749 return None, None, None, None, None, None
746 750
747 751 print "[Reading] %s file was found" %(filename)
748 752
749 753 if not(self.__verifyFile(os.path.join(fullpath, filename))):
750 754 return None, None, None, None, None, None
751 755
752 756 year = int( filename[1:5] )
753 757 doy = int( filename[5:8] )
754 758 set = int( filename[8:11] )
755 759
756 760 return fullpath, foldercounter, filename, year, doy, set
757 761
758 762 def __setNextFileOffline(self):
759 763
760 764 idFile = self.fileIndex
761 765
762 766 while (True):
763 767 idFile += 1
764 768 if not(idFile < len(self.filenameList)):
765 769 self.flagNoMoreFiles = 1
766 770 # print "[Reading] No more Files"
767 771 return 0
768 772
769 773 filename = self.filenameList[idFile]
770 774
771 775 if not(self.__verifyFile(filename)):
772 776 continue
773 777
774 778 fileSize = os.path.getsize(filename)
775 779 fp = open(filename,'rb')
776 780 break
777 781
778 782 self.flagIsNewFile = 1
779 783 self.fileIndex = idFile
780 784 self.filename = filename
781 785 self.fileSize = fileSize
782 786 self.fp = fp
783 787
784 788 # print "[Reading] Setting the file: %s"%self.filename
785 789
786 790 return 1
787 791
788 792 def __setNextFileOnline(self):
789 793 """
790 794 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
791 795 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
792 796 siguientes.
793 797
794 798 Affected:
795 799 self.flagIsNewFile
796 800 self.filename
797 801 self.fileSize
798 802 self.fp
799 803 self.set
800 804 self.flagNoMoreFiles
801 805
802 806 Return:
803 807 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
804 808 1 : si el file fue abierto con exito y esta listo a ser leido
805 809
806 810 Excepciones:
807 811 Si un determinado file no puede ser abierto
808 812 """
809 813 nFiles = 0
810 814 fileOk_flag = False
811 815 firstTime_flag = True
812 816
813 817 self.set += 1
814 818
815 819 if self.set > 999:
816 820 self.set = 0
817 821 self.foldercounter += 1
818 822
819 823 #busca el 1er file disponible
820 824 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
821 825 if fullfilename:
822 826 if self.__verifyFile(fullfilename, False):
823 827 fileOk_flag = True
824 828
825 829 #si no encuentra un file entonces espera y vuelve a buscar
826 830 if not(fileOk_flag):
827 831 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
828 832
829 833 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
830 834 tries = self.nTries
831 835 else:
832 836 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
833 837
834 838 for nTries in range( tries ):
835 839 if firstTime_flag:
836 840 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
837 841 sleep( self.delay )
838 842 else:
839 843 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
840 844
841 845 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
842 846 if fullfilename:
843 847 if self.__verifyFile(fullfilename):
844 848 fileOk_flag = True
845 849 break
846 850
847 851 if fileOk_flag:
848 852 break
849 853
850 854 firstTime_flag = False
851 855
852 856 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
853 857 self.set += 1
854 858
855 859 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
856 860 self.set = 0
857 861 self.doy += 1
858 862 self.foldercounter = 0
859 863
860 864 if fileOk_flag:
861 865 self.fileSize = os.path.getsize( fullfilename )
862 866 self.filename = fullfilename
863 867 self.flagIsNewFile = 1
864 868 if self.fp != None: self.fp.close()
865 869 self.fp = open(fullfilename, 'rb')
866 870 self.flagNoMoreFiles = 0
867 871 # print '[Reading] Setting the file: %s' % fullfilename
868 872 else:
869 873 self.fileSize = 0
870 874 self.filename = None
871 875 self.flagIsNewFile = 0
872 876 self.fp = None
873 877 self.flagNoMoreFiles = 1
874 878 # print '[Reading] No more files to read'
875 879
876 880 return fileOk_flag
877 881
878 882 def setNextFile(self):
879 883 if self.fp != None:
880 884 self.fp.close()
881 885
882 886 if self.online:
883 887 newFile = self.__setNextFileOnline()
884 888 else:
885 889 newFile = self.__setNextFileOffline()
886 890
887 891 if not(newFile):
888 892 print '[Reading] No more files to read'
889 893 return 0
890 894
891 895 if self.verbose:
892 896 print '[Reading] Setting the file: %s' % self.filename
893 897
894 898 self.__readFirstHeader()
895 899 self.nReadBlocks = 0
896 900 return 1
897 901
898 902 def __waitNewBlock(self):
899 903 """
900 904 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
901 905
902 906 Si el modo de lectura es OffLine siempre retorn 0
903 907 """
904 908 if not self.online:
905 909 return 0
906 910
907 911 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
908 912 return 0
909 913
910 914 currentPointer = self.fp.tell()
911 915
912 916 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
913 917
914 918 for nTries in range( self.nTries ):
915 919
916 920 self.fp.close()
917 921 self.fp = open( self.filename, 'rb' )
918 922 self.fp.seek( currentPointer )
919 923
920 924 self.fileSize = os.path.getsize( self.filename )
921 925 currentSize = self.fileSize - currentPointer
922 926
923 927 if ( currentSize >= neededSize ):
924 928 self.basicHeaderObj.read(self.fp)
925 929 return 1
926 930
927 931 if self.fileSize == self.fileSizeByHeader:
928 932 # self.flagEoF = True
929 933 return 0
930 934
931 935 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
932 936 sleep( self.delay )
933 937
934 938
935 939 return 0
936 940
937 941 def waitDataBlock(self,pointer_location):
938 942
939 943 currentPointer = pointer_location
940 944
941 945 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
942 946
943 947 for nTries in range( self.nTries ):
944 948 self.fp.close()
945 949 self.fp = open( self.filename, 'rb' )
946 950 self.fp.seek( currentPointer )
947 951
948 952 self.fileSize = os.path.getsize( self.filename )
949 953 currentSize = self.fileSize - currentPointer
950 954
951 955 if ( currentSize >= neededSize ):
952 956 return 1
953 957
954 958 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
955 959 sleep( self.delay )
956 960
957 961 return 0
958 962
959 963 def __jumpToLastBlock(self):
960 964
961 965 if not(self.__isFirstTimeOnline):
962 966 return
963 967
964 968 csize = self.fileSize - self.fp.tell()
965 969 blocksize = self.processingHeaderObj.blockSize
966 970
967 971 #salta el primer bloque de datos
968 972 if csize > self.processingHeaderObj.blockSize:
969 973 self.fp.seek(self.fp.tell() + blocksize)
970 974 else:
971 975 return
972 976
973 977 csize = self.fileSize - self.fp.tell()
974 978 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
975 979 while True:
976 980
977 981 if self.fp.tell()<self.fileSize:
978 982 self.fp.seek(self.fp.tell() + neededsize)
979 983 else:
980 984 self.fp.seek(self.fp.tell() - neededsize)
981 985 break
982 986
983 987 # csize = self.fileSize - self.fp.tell()
984 988 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
985 989 # factor = int(csize/neededsize)
986 990 # if factor > 0:
987 991 # self.fp.seek(self.fp.tell() + factor*neededsize)
988 992
989 993 self.flagIsNewFile = 0
990 994 self.__isFirstTimeOnline = 0
991 995
992 996 def __setNewBlock(self):
993 997
994 998 if self.fp == None:
995 999 return 0
996 1000
997 1001 # if self.online:
998 1002 # self.__jumpToLastBlock()
999 1003
1000 1004 if self.flagIsNewFile:
1001 1005 self.lastUTTime = self.basicHeaderObj.utc
1002 1006 return 1
1003 1007
1004 1008 if self.realtime:
1005 1009 self.flagDiscontinuousBlock = 1
1006 1010 if not(self.setNextFile()):
1007 1011 return 0
1008 1012 else:
1009 1013 return 1
1010 1014
1011 1015 currentSize = self.fileSize - self.fp.tell()
1012 1016 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1013 1017
1014 1018 if (currentSize >= neededSize):
1015 1019 self.basicHeaderObj.read(self.fp)
1016 1020 self.lastUTTime = self.basicHeaderObj.utc
1017 1021 return 1
1018 1022
1019 1023 if self.__waitNewBlock():
1020 1024 self.lastUTTime = self.basicHeaderObj.utc
1021 1025 return 1
1022 1026
1023 1027 if not(self.setNextFile()):
1024 1028 return 0
1025 1029
1026 1030 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1027 1031 self.lastUTTime = self.basicHeaderObj.utc
1028 1032
1029 1033 self.flagDiscontinuousBlock = 0
1030 1034
1031 1035 if deltaTime > self.maxTimeStep:
1032 1036 self.flagDiscontinuousBlock = 1
1033 1037
1034 1038 return 1
1035 1039
1036 1040 def readNextBlock(self):
1037 1041
1038 1042 #Skip block out of startTime and endTime
1039 1043 while True:
1040 1044 if not(self.__setNewBlock()):
1041 1045 return 0
1042 1046
1043 1047 if not(self.readBlock()):
1044 1048 return 0
1045 1049
1046 1050 self.getBasicHeader()
1047 1051
1048 1052 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1049 1053
1050 1054 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1051 1055 self.processingHeaderObj.dataBlocksPerFile,
1052 1056 self.dataOut.datatime.ctime())
1053 1057 continue
1054 1058
1055 1059 break
1056 1060
1057 1061 if self.verbose:
1058 1062 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1059 1063 self.processingHeaderObj.dataBlocksPerFile,
1060 1064 self.dataOut.datatime.ctime())
1061 1065 return 1
1062 1066
1063 1067 def __readFirstHeader(self):
1064 1068
1065 1069 self.basicHeaderObj.read(self.fp)
1066 1070 self.systemHeaderObj.read(self.fp)
1067 1071 self.radarControllerHeaderObj.read(self.fp)
1068 1072 self.processingHeaderObj.read(self.fp)
1069 1073
1070 1074 self.firstHeaderSize = self.basicHeaderObj.size
1071 1075
1072 1076 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1073 1077 if datatype == 0:
1074 1078 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1075 1079 elif datatype == 1:
1076 1080 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1077 1081 elif datatype == 2:
1078 1082 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1079 1083 elif datatype == 3:
1080 1084 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1081 1085 elif datatype == 4:
1082 1086 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1083 1087 elif datatype == 5:
1084 1088 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1085 1089 else:
1086 1090 raise ValueError, 'Data type was not defined'
1087 1091
1088 1092 self.dtype = datatype_str
1089 1093 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1090 1094 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1091 1095 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1092 1096 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1093 1097 self.getBlockDimension()
1094 1098
1095 1099 def __verifyFile(self, filename, msgFlag=True):
1096 1100
1097 1101 msg = None
1098 1102
1099 1103 try:
1100 1104 fp = open(filename, 'rb')
1101 1105 except IOError:
1102 1106
1103 1107 if msgFlag:
1104 1108 print "[Reading] File %s can't be opened" % (filename)
1105 1109
1106 1110 return False
1107 1111
1108 1112 currentPosition = fp.tell()
1109 1113 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1110 1114
1111 1115 if neededSize == 0:
1112 1116 basicHeaderObj = BasicHeader(LOCALTIME)
1113 1117 systemHeaderObj = SystemHeader()
1114 1118 radarControllerHeaderObj = RadarControllerHeader()
1115 1119 processingHeaderObj = ProcessingHeader()
1116 1120
1117 1121 if not( basicHeaderObj.read(fp) ):
1118 1122 fp.close()
1119 1123 return False
1120 1124
1121 1125 if not( systemHeaderObj.read(fp) ):
1122 1126 fp.close()
1123 1127 return False
1124 1128
1125 1129 if not( radarControllerHeaderObj.read(fp) ):
1126 1130 fp.close()
1127 1131 return False
1128 1132
1129 1133 if not( processingHeaderObj.read(fp) ):
1130 1134 fp.close()
1131 1135 return False
1132 1136
1133 1137 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1134 1138 else:
1135 1139 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1136 1140
1137 1141 fp.close()
1138 1142
1139 1143 fileSize = os.path.getsize(filename)
1140 1144 currentSize = fileSize - currentPosition
1141 1145
1142 1146 if currentSize < neededSize:
1143 1147 if msgFlag and (msg != None):
1144 1148 print msg
1145 1149 return False
1146 1150
1147 1151 return True
1148 1152
1149 1153 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1150 1154
1151 1155 path_empty = True
1152 1156
1153 1157 dateList = []
1154 1158 pathList = []
1155 1159
1156 1160 multi_path = path.split(',')
1157 1161
1158 1162 if not walk:
1159 1163
1160 1164 for single_path in multi_path:
1161 1165
1162 1166 if not os.path.isdir(single_path):
1163 1167 continue
1164 1168
1165 1169 fileList = glob.glob1(single_path, "*"+ext)
1166 1170
1167 1171 if not fileList:
1168 1172 continue
1169 1173
1170 1174 path_empty = False
1171 1175
1172 1176 fileList.sort()
1173 1177
1174 1178 for thisFile in fileList:
1175 1179
1176 1180 if not os.path.isfile(os.path.join(single_path, thisFile)):
1177 1181 continue
1178 1182
1179 1183 if not isRadarFile(thisFile):
1180 1184 continue
1181 1185
1182 1186 if not isFileInDateRange(thisFile, startDate, endDate):
1183 1187 continue
1184 1188
1185 1189 thisDate = getDateFromRadarFile(thisFile)
1186 1190
1187 1191 if thisDate in dateList:
1188 1192 continue
1189 1193
1190 1194 dateList.append(thisDate)
1191 1195 pathList.append(single_path)
1192 1196
1193 1197 else:
1194 1198 for single_path in multi_path:
1195 1199
1196 1200 if not os.path.isdir(single_path):
1197 1201 continue
1198 1202
1199 1203 dirList = []
1200 1204
1201 1205 for thisPath in os.listdir(single_path):
1202 1206
1203 1207 if not os.path.isdir(os.path.join(single_path,thisPath)):
1204 1208 continue
1205 1209
1206 1210 if not isRadarFolder(thisPath):
1207 1211 continue
1208 1212
1209 1213 if not isFolderInDateRange(thisPath, startDate, endDate):
1210 1214 continue
1211 1215
1212 1216 dirList.append(thisPath)
1213 1217
1214 1218 if not dirList:
1215 1219 continue
1216 1220
1217 1221 dirList.sort()
1218 1222
1219 1223 for thisDir in dirList:
1220 1224
1221 1225 datapath = os.path.join(single_path, thisDir, expLabel)
1222 1226 fileList = glob.glob1(datapath, "*"+ext)
1223 1227
1224 1228 if not fileList:
1225 1229 continue
1226 1230
1227 1231 path_empty = False
1228 1232
1229 1233 thisDate = getDateFromRadarFolder(thisDir)
1230 1234
1231 1235 pathList.append(datapath)
1232 1236 dateList.append(thisDate)
1233 1237
1234 1238 dateList.sort()
1235 1239
1236 1240 if walk:
1237 1241 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1238 1242 else:
1239 1243 pattern_path = multi_path[0]
1240 1244
1241 1245 if path_empty:
1242 1246 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1243 1247 else:
1244 1248 if not dateList:
1245 1249 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1246 1250
1247 1251 if include_path:
1248 1252 return dateList, pathList
1249 1253
1250 1254 return dateList
1251 1255
1252 1256 def setup(self,
1253 1257 path=None,
1254 1258 startDate=None,
1255 1259 endDate=None,
1256 1260 startTime=datetime.time(0,0,0),
1257 1261 endTime=datetime.time(23,59,59),
1258 1262 set=None,
1259 1263 expLabel = "",
1260 1264 ext = None,
1261 1265 online = False,
1262 1266 delay = 60,
1263 1267 walk = True,
1264 1268 getblock = False,
1265 1269 nTxs = 1,
1266 1270 realtime=False,
1267 1271 blocksize=None,
1268 1272 blocktime=None,
1269 1273 queue=None,
1270 1274 skip=None,
1271 1275 cursor=None,
1272 1276 warnings=True,
1273 1277 verbose=True):
1274 1278
1275 1279 if path == None:
1276 1280 raise ValueError, "[Reading] The path is not valid"
1277 1281
1278 1282 if ext == None:
1279 1283 ext = self.ext
1280 1284
1281 1285 if online:
1282 1286 print "[Reading] Searching files in online mode..."
1283 1287
1284 1288 for nTries in range( self.nTries ):
1285 1289 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1286 1290
1287 1291 if fullpath:
1288 1292 break
1289 1293
1290 1294 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1291 1295 sleep( self.delay )
1292 1296
1293 1297 if not(fullpath):
1294 1298 print "[Reading] There 'isn't any valid file in %s" % path
1295 1299 return
1296 1300
1297 1301 self.year = year
1298 1302 self.doy = doy
1299 1303 self.set = set - 1
1300 1304 self.path = path
1301 1305 self.foldercounter = foldercounter
1302 1306 last_set = None
1303 1307
1304 1308 else:
1305 1309 print "[Reading] Searching files in offline mode ..."
1306 1310 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1307 1311 startTime=startTime, endTime=endTime,
1308 1312 set=set, expLabel=expLabel, ext=ext,
1309 1313 walk=walk, cursor=cursor,
1310 1314 skip=skip, queue=queue)
1311 1315
1312 1316 if not(pathList):
1313 1317 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1314 1318 # datetime.datetime.combine(startDate,startTime).ctime(),
1315 1319 # datetime.datetime.combine(endDate,endTime).ctime())
1316 1320
1317 1321 # sys.exit(-1)
1318 1322
1319 1323 self.fileIndex = -1
1320 1324 self.pathList = []
1321 1325 self.filenameList = []
1322 1326 return
1323 1327
1324 1328 self.fileIndex = -1
1325 1329 self.pathList = pathList
1326 1330 self.filenameList = filenameList
1327 1331 file_name = os.path.basename(filenameList[-1])
1328 1332 basename, ext = os.path.splitext(file_name)
1329 1333 last_set = int(basename[-3:])
1330 1334
1331 1335 self.online = online
1332 1336 self.realtime = realtime
1333 1337 self.delay = delay
1334 1338 ext = ext.lower()
1335 1339 self.ext = ext
1336 1340 self.getByBlock = getblock
1337 1341 self.nTxs = nTxs
1338 1342 self.startTime = startTime
1339 1343 self.endTime = endTime
1340 1344
1341 1345 #Added-----------------
1342 1346 self.selBlocksize = blocksize
1343 1347 self.selBlocktime = blocktime
1344 1348
1345 1349 # Verbose-----------
1346 1350 self.verbose = verbose
1347 1351 self.warnings = warnings
1348 1352
1349 1353 if not(self.setNextFile()):
1350 1354 if (startDate!=None) and (endDate!=None):
1351 1355 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1352 1356 elif startDate != None:
1353 1357 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1354 1358 else:
1355 1359 print "[Reading] No files"
1356 1360
1357 1361 self.fileIndex = -1
1358 1362 self.pathList = []
1359 1363 self.filenameList = []
1360 1364 return
1361 1365
1362 1366 # self.getBasicHeader()
1363 1367
1364 1368 if last_set != None:
1365 1369 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1366 1370 return
1367 1371
1368 1372 def getBasicHeader(self):
1369 1373
1370 1374 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1371 1375
1372 1376 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1373 1377
1374 1378 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1375 1379
1376 1380 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1377 1381
1378 1382 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1379 1383
1380 1384 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1381 1385
1382 1386 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1383 1387
1384 1388 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1385 1389
1386 1390
1387 1391 def getFirstHeader(self):
1388 1392
1389 1393 raise NotImplementedError
1390 1394
1391 1395 def getData(self):
1392 1396
1393 1397 raise NotImplementedError
1394 1398
1395 1399 def hasNotDataInBuffer(self):
1396 1400
1397 1401 raise NotImplementedError
1398 1402
1399 1403 def readBlock(self):
1400 1404
1401 1405 raise NotImplementedError
1402 1406
1403 1407 def isEndProcess(self):
1404 1408
1405 1409 return self.flagNoMoreFiles
1406 1410
1407 1411 def printReadBlocks(self):
1408 1412
1409 1413 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1410 1414
1411 1415 def printTotalBlocks(self):
1412 1416
1413 1417 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1414 1418
1415 1419 def printNumberOfBlock(self):
1416 1420
1417 1421 if self.flagIsNewBlock:
1418 1422 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1419 1423 self.processingHeaderObj.dataBlocksPerFile,
1420 1424 self.dataOut.datatime.ctime())
1421 1425
1422 1426 def printInfo(self):
1423 1427
1424 1428 if self.__printInfo == False:
1425 1429 return
1426 1430
1427 1431 self.basicHeaderObj.printInfo()
1428 1432 self.systemHeaderObj.printInfo()
1429 1433 self.radarControllerHeaderObj.printInfo()
1430 1434 self.processingHeaderObj.printInfo()
1431 1435
1432 1436 self.__printInfo = False
1433 1437
1434 1438
1435 def run(self, **kwargs):
1439 def run(self,
1440 path=None,
1441 startDate=None,
1442 endDate=None,
1443 startTime=datetime.time(0,0,0),
1444 endTime=datetime.time(23,59,59),
1445 set=None,
1446 expLabel = "",
1447 ext = None,
1448 online = False,
1449 delay = 60,
1450 walk = True,
1451 getblock = False,
1452 nTxs = 1,
1453 realtime=False,
1454 blocksize=None,
1455 blocktime=None,
1456 queue=None,
1457 skip=None,
1458 cursor=None,
1459 warnings=True,
1460 verbose=True, **kwargs):
1436 1461
1437 1462 if not(self.isConfig):
1438
1439 1463 # self.dataOut = dataOut
1440 self.setup(**kwargs)
1464 self.setup( path=path,
1465 startDate=startDate,
1466 endDate=endDate,
1467 startTime=startTime,
1468 endTime=endTime,
1469 set=set,
1470 expLabel=expLabel,
1471 ext=ext,
1472 online=online,
1473 delay=delay,
1474 walk=walk,
1475 getblock=getblock,
1476 nTxs=nTxs,
1477 realtime=realtime,
1478 blocksize=blocksize,
1479 blocktime=blocktime,
1480 queue=queue,
1481 skip=skip,
1482 cursor=cursor,
1483 warnings=warnings,
1484 verbose=verbose)
1441 1485 self.isConfig = True
1442 1486
1443 1487 self.getData()
1444 1488
1445 1489 class JRODataWriter(JRODataIO):
1446 1490
1447 1491 """
1448 1492 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1449 1493 de los datos siempre se realiza por bloques.
1450 1494 """
1451 1495
1452 1496 blockIndex = 0
1453 1497
1454 1498 path = None
1455 1499
1456 1500 setFile = None
1457 1501
1458 1502 profilesPerBlock = None
1459 1503
1460 1504 blocksPerFile = None
1461 1505
1462 1506 nWriteBlocks = 0
1463 1507
1464 1508 fileDate = None
1465 1509
1466 1510 def __init__(self, dataOut=None):
1467 1511 raise NotImplementedError
1468 1512
1469 1513
1470 1514 def hasAllDataInBuffer(self):
1471 1515 raise NotImplementedError
1472 1516
1473 1517
1474 1518 def setBlockDimension(self):
1475 1519 raise NotImplementedError
1476 1520
1477 1521
1478 1522 def writeBlock(self):
1479 1523 raise NotImplementedError
1480 1524
1481 1525
1482 1526 def putData(self):
1483 1527 raise NotImplementedError
1484 1528
1485 1529
1486 1530 def getProcessFlags(self):
1487 1531
1488 1532 processFlags = 0
1489 1533
1490 1534 dtype_index = get_dtype_index(self.dtype)
1491 1535 procflag_dtype = get_procflag_dtype(dtype_index)
1492 1536
1493 1537 processFlags += procflag_dtype
1494 1538
1495 1539 if self.dataOut.flagDecodeData:
1496 1540 processFlags += PROCFLAG.DECODE_DATA
1497 1541
1498 1542 if self.dataOut.flagDeflipData:
1499 1543 processFlags += PROCFLAG.DEFLIP_DATA
1500 1544
1501 1545 if self.dataOut.code is not None:
1502 1546 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1503 1547
1504 1548 if self.dataOut.nCohInt > 1:
1505 1549 processFlags += PROCFLAG.COHERENT_INTEGRATION
1506 1550
1507 1551 if self.dataOut.type == "Spectra":
1508 1552 if self.dataOut.nIncohInt > 1:
1509 1553 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1510 1554
1511 1555 if self.dataOut.data_dc is not None:
1512 1556 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1513 1557
1514 1558 if self.dataOut.flagShiftFFT:
1515 1559 processFlags += PROCFLAG.SHIFT_FFT_DATA
1516 1560
1517 1561 return processFlags
1518 1562
1519 1563 def setBasicHeader(self):
1520 1564
1521 1565 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1522 1566 self.basicHeaderObj.version = self.versionFile
1523 1567 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1524 1568
1525 1569 utc = numpy.floor(self.dataOut.utctime)
1526 1570 milisecond = (self.dataOut.utctime - utc)* 1000.0
1527 1571
1528 1572 self.basicHeaderObj.utc = utc
1529 1573 self.basicHeaderObj.miliSecond = milisecond
1530 1574 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1531 1575 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1532 1576 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1533 1577
1534 1578 def setFirstHeader(self):
1535 1579 """
1536 1580 Obtiene una copia del First Header
1537 1581
1538 1582 Affected:
1539 1583
1540 1584 self.basicHeaderObj
1541 1585 self.systemHeaderObj
1542 1586 self.radarControllerHeaderObj
1543 1587 self.processingHeaderObj self.
1544 1588
1545 1589 Return:
1546 1590 None
1547 1591 """
1548 1592
1549 1593 raise NotImplementedError
1550 1594
1551 1595 def __writeFirstHeader(self):
1552 1596 """
1553 1597 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1554 1598
1555 1599 Affected:
1556 1600 __dataType
1557 1601
1558 1602 Return:
1559 1603 None
1560 1604 """
1561 1605
1562 1606 # CALCULAR PARAMETROS
1563 1607
1564 1608 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1565 1609 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1566 1610
1567 1611 self.basicHeaderObj.write(self.fp)
1568 1612 self.systemHeaderObj.write(self.fp)
1569 1613 self.radarControllerHeaderObj.write(self.fp)
1570 1614 self.processingHeaderObj.write(self.fp)
1571 1615
1572 1616 def __setNewBlock(self):
1573 1617 """
1574 1618 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1575 1619
1576 1620 Return:
1577 1621 0 : si no pudo escribir nada
1578 1622 1 : Si escribio el Basic el First Header
1579 1623 """
1580 1624 if self.fp == None:
1581 1625 self.setNextFile()
1582 1626
1583 1627 if self.flagIsNewFile:
1584 1628 return 1
1585 1629
1586 1630 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1587 1631 self.basicHeaderObj.write(self.fp)
1588 1632 return 1
1589 1633
1590 1634 if not( self.setNextFile() ):
1591 1635 return 0
1592 1636
1593 1637 return 1
1594 1638
1595 1639
1596 1640 def writeNextBlock(self):
1597 1641 """
1598 1642 Selecciona el bloque siguiente de datos y los escribe en un file
1599 1643
1600 1644 Return:
1601 1645 0 : Si no hizo pudo escribir el bloque de datos
1602 1646 1 : Si no pudo escribir el bloque de datos
1603 1647 """
1604 1648 if not( self.__setNewBlock() ):
1605 1649 return 0
1606 1650
1607 1651 self.writeBlock()
1608 1652
1609 1653 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1610 1654 self.processingHeaderObj.dataBlocksPerFile)
1611 1655
1612 1656 return 1
1613 1657
1614 1658 def setNextFile(self):
1615 1659 """
1616 1660 Determina el siguiente file que sera escrito
1617 1661
1618 1662 Affected:
1619 1663 self.filename
1620 1664 self.subfolder
1621 1665 self.fp
1622 1666 self.setFile
1623 1667 self.flagIsNewFile
1624 1668
1625 1669 Return:
1626 1670 0 : Si el archivo no puede ser escrito
1627 1671 1 : Si el archivo esta listo para ser escrito
1628 1672 """
1629 1673 ext = self.ext
1630 1674 path = self.path
1631 1675
1632 1676 if self.fp != None:
1633 1677 self.fp.close()
1634 1678
1635 1679 timeTuple = time.localtime( self.dataOut.utctime)
1636 1680 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1637 1681
1638 1682 fullpath = os.path.join( path, subfolder )
1639 1683 setFile = self.setFile
1640 1684
1641 1685 if not( os.path.exists(fullpath) ):
1642 1686 os.mkdir(fullpath)
1643 1687 setFile = -1 #inicializo mi contador de seteo
1644 1688 else:
1645 1689 filesList = os.listdir( fullpath )
1646 1690 if len( filesList ) > 0:
1647 1691 filesList = sorted( filesList, key=str.lower )
1648 1692 filen = filesList[-1]
1649 1693 # el filename debera tener el siguiente formato
1650 1694 # 0 1234 567 89A BCDE (hex)
1651 1695 # x YYYY DDD SSS .ext
1652 1696 if isNumber( filen[8:11] ):
1653 1697 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1654 1698 else:
1655 1699 setFile = -1
1656 1700 else:
1657 1701 setFile = -1 #inicializo mi contador de seteo
1658 1702
1659 1703 setFile += 1
1660 1704
1661 1705 #If this is a new day it resets some values
1662 1706 if self.dataOut.datatime.date() > self.fileDate:
1663 1707 setFile = 0
1664 1708 self.nTotalBlocks = 0
1665 1709
1666 1710 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1667 1711
1668 1712 filename = os.path.join( path, subfolder, filen )
1669 1713
1670 1714 fp = open( filename,'wb' )
1671 1715
1672 1716 self.blockIndex = 0
1673 1717
1674 1718 #guardando atributos
1675 1719 self.filename = filename
1676 1720 self.subfolder = subfolder
1677 1721 self.fp = fp
1678 1722 self.setFile = setFile
1679 1723 self.flagIsNewFile = 1
1680 1724 self.fileDate = self.dataOut.datatime.date()
1681 1725
1682 1726 self.setFirstHeader()
1683 1727
1684 1728 print '[Writing] Opening file: %s'%self.filename
1685 1729
1686 1730 self.__writeFirstHeader()
1687 1731
1688 1732 return 1
1689 1733
1690 1734 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1691 1735 """
1692 1736 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1693 1737
1694 1738 Inputs:
1695 1739 path : directory where data will be saved
1696 1740 profilesPerBlock : number of profiles per block
1697 1741 set : initial file set
1698 1742 datatype : An integer number that defines data type:
1699 1743 0 : int8 (1 byte)
1700 1744 1 : int16 (2 bytes)
1701 1745 2 : int32 (4 bytes)
1702 1746 3 : int64 (8 bytes)
1703 1747 4 : float32 (4 bytes)
1704 1748 5 : double64 (8 bytes)
1705 1749
1706 1750 Return:
1707 1751 0 : Si no realizo un buen seteo
1708 1752 1 : Si realizo un buen seteo
1709 1753 """
1710 1754
1711 1755 if ext == None:
1712 1756 ext = self.ext
1713 1757
1714 1758 self.ext = ext.lower()
1715 1759
1716 1760 self.path = path
1717 1761
1718 1762 if set is None:
1719 1763 self.setFile = -1
1720 1764 else:
1721 1765 self.setFile = set - 1
1722 1766
1723 1767 self.blocksPerFile = blocksPerFile
1724 1768
1725 1769 self.profilesPerBlock = profilesPerBlock
1726 1770
1727 1771 self.dataOut = dataOut
1728 1772 self.fileDate = self.dataOut.datatime.date()
1729 1773 #By default
1730 1774 self.dtype = self.dataOut.dtype
1731 1775
1732 1776 if datatype is not None:
1733 1777 self.dtype = get_numpy_dtype(datatype)
1734 1778
1735 1779 if not(self.setNextFile()):
1736 1780 print "[Writing] There isn't a next file"
1737 1781 return 0
1738 1782
1739 1783 self.setBlockDimension()
1740 1784
1741 1785 return 1
1742 1786
1743 def run(self, dataOut, **kwargs):
1787 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1744 1788
1745 1789 if not(self.isConfig):
1746 1790
1747 self.setup(dataOut, **kwargs)
1791 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1748 1792 self.isConfig = True
1749 1793
1750 1794 self.putData()
@@ -1,346 +1,349
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $
5 5 '''
6 6 import inspect
7 7 from fuzzywuzzy import process
8 8
9 9 def checkKwargs(method, kwargs):
10 10 currentKwargs = kwargs
11 11 choices = inspect.getargspec(method).args
12 12 try:
13 13 choices.remove('self')
14 14 except Exception as e:
15 15 pass
16 16
17 17 try:
18 18 choices.remove('dataOut')
19 19 except Exception as e:
20 20 pass
21 21
22 22 for kwarg in kwargs:
23 23 fuzz = process.extractOne(kwarg, choices)
24 24 if fuzz is None:
25 25 continue
26 26 if fuzz[1] < 100:
27 27 raise Exception('\x1b[0;32;40mDid you mean {} instead of {} in {}? \x1b[0m'.
28 28 format(fuzz[0], kwarg, method.__self__.__class__.__name__))
29 29
30 30 class ProcessingUnit(object):
31 31
32 32 """
33 33 Esta es la clase base para el procesamiento de datos.
34 34
35 35 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
36 36 - Metodos internos (callMethod)
37 37 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
38 38 tienen que ser agreagados con el metodo "add".
39 39
40 40 """
41 41 # objeto de datos de entrada (Voltage, Spectra o Correlation)
42 42 dataIn = None
43 43 dataInList = []
44 44
45 45 # objeto de datos de entrada (Voltage, Spectra o Correlation)
46 46 dataOut = None
47 47
48 48 operations2RunDict = None
49 49
50 50 isConfig = False
51 51
52 52
53 53 def __init__(self, *args, **kwargs):
54 54
55 55 self.dataIn = None
56 56 self.dataInList = []
57 57
58 58 self.dataOut = None
59 59
60 60 self.operations2RunDict = {}
61 61 self.operationKwargs = {}
62 62
63 63 self.isConfig = False
64 64
65 65 self.args = args
66 66 self.kwargs = kwargs
67 67 checkKwargs(self.run, kwargs)
68 68
69 69 def getAllowedArgs(self):
70 70 return inspect.getargspec(self.run).args
71 71
72 72 def addOperationKwargs(self, objId, **kwargs):
73 73 '''
74 74 '''
75 75
76 76 self.operationKwargs[objId] = kwargs
77 77
78 78
79 79 def addOperation(self, opObj, objId):
80 80
81 81 """
82 82 Agrega un objeto del tipo "Operation" (opObj) a la lista de objetos "self.objectList" y retorna el
83 83 identificador asociado a este objeto.
84 84
85 85 Input:
86 86
87 87 object : objeto de la clase "Operation"
88 88
89 89 Return:
90 90
91 91 objId : identificador del objeto, necesario para ejecutar la operacion
92 92 """
93 93
94 94 self.operations2RunDict[objId] = opObj
95 95
96 96 return objId
97 97
98 98 def getOperationObj(self, objId):
99 99
100 100 if objId not in self.operations2RunDict.keys():
101 101 return None
102 102
103 103 return self.operations2RunDict[objId]
104 104
105 105 def operation(self, **kwargs):
106 106
107 107 """
108 108 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
109 109 atributos del objeto dataOut
110 110
111 111 Input:
112 112
113 113 **kwargs : Diccionario de argumentos de la funcion a ejecutar
114 114 """
115 115
116 116 raise NotImplementedError
117 117
118 118 def callMethod(self, name, opId):
119 119
120 120 """
121 121 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
122 122
123 123 Input:
124 124 name : nombre del metodo a ejecutar
125 125
126 126 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
127 127
128 128 """
129 129
130 130 #Checking the inputs
131 131 if name == 'run':
132 132
133 133 if not self.checkInputs():
134 134 self.dataOut.flagNoData = True
135 135 return False
136 136 else:
137 137 #Si no es un metodo RUN la entrada es la misma dataOut (interna)
138 138 if self.dataOut is not None and self.dataOut.isEmpty():
139 139 return False
140 140
141 141 #Getting the pointer to method
142 142 methodToCall = getattr(self, name)
143 143
144 144 #Executing the self method
145 145
146 146 if hasattr(self, 'mp'):
147 147 if name=='run':
148 148 if self.mp is False:
149 149 self.mp = True
150 150 self.start()
151 151 else:
152 self.operationKwargs[opId]['parent'] = self.kwargs
152 153 methodToCall(**self.operationKwargs[opId])
153 154 else:
154 155 if name=='run':
155 156 methodToCall(**self.kwargs)
156 157 else:
157 158 methodToCall(**self.operationKwargs[opId])
158 159
159 160 if self.dataOut is None:
160 161 return False
161 162
162 163 if self.dataOut.isEmpty():
163 164 return False
164 165
165 166 return True
166 167
167 168 def callObject(self, objId):
168 169
169 170 """
170 171 Ejecuta la operacion asociada al identificador del objeto "objId"
171 172
172 173 Input:
173 174
174 175 objId : identificador del objeto a ejecutar
175 176
176 177 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
177 178
178 179 Return:
179 180
180 181 None
181 182 """
182 183
183 184 if self.dataOut is not None and self.dataOut.isEmpty():
184 185 return False
185 186
186 187 externalProcObj = self.operations2RunDict[objId]
187 188
188 189 if hasattr(externalProcObj, 'mp'):
189 190 if externalProcObj.mp is False:
191 externalProcObj.kwargs['parent'] = self.kwargs
190 192 self.operationKwargs[objId] = externalProcObj.kwargs
191 193 externalProcObj.mp = True
192 194 externalProcObj.start()
193 195 else:
194 196 externalProcObj.run(self.dataOut, **externalProcObj.kwargs)
195 197 self.operationKwargs[objId] = externalProcObj.kwargs
196 198
199
197 200 return True
198 201
199 202 def call(self, opType, opName=None, opId=None):
200 203
201 204 """
202 205 Return True si ejecuta la operacion interna nombrada "opName" o la operacion externa
203 206 identificada con el id "opId"; con los argumentos "**kwargs".
204 207
205 208 False si la operacion no se ha ejecutado.
206 209
207 210 Input:
208 211
209 212 opType : Puede ser "self" o "external"
210 213
211 214 Depende del tipo de operacion para llamar a:callMethod or callObject:
212 215
213 216 1. If opType = "self": Llama a un metodo propio de esta clase:
214 217
215 218 name_method = getattr(self, name)
216 219 name_method(**kwargs)
217 220
218 221
219 222 2. If opType = "other" o"external": Llama al metodo "run()" de una instancia de la
220 223 clase "Operation" o de un derivado de ella:
221 224
222 225 instanceName = self.operationList[opId]
223 226 instanceName.run(**kwargs)
224 227
225 228 opName : Si la operacion es interna (opType = 'self'), entonces el "opName" sera
226 229 usada para llamar a un metodo interno de la clase Processing
227 230
228 231 opId : Si la operacion es externa (opType = 'other' o 'external), entonces el
229 232 "opId" sera usada para llamar al metodo "run" de la clase Operation
230 233 registrada anteriormente con ese Id
231 234
232 235 Exception:
233 236 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
234 237 "addOperation" e identificado con el valor "opId" = el id de la operacion.
235 238 De lo contrario retornara un error del tipo ValueError
236 239
237 240 """
238 241
239 242 if opType == 'self':
240 243
241 244 if not opName:
242 245 raise ValueError, "opName parameter should be defined"
243 246
244 247 sts = self.callMethod(opName, opId)
245 248
246 249 elif opType == 'other' or opType == 'external' or opType == 'plotter':
247 250
248 251 if not opId:
249 252 raise ValueError, "opId parameter should be defined"
250 253
251 254 if opId not in self.operations2RunDict.keys():
252 255 raise ValueError, "Any operation with id=%s has been added" %str(opId)
253 256
254 257 sts = self.callObject(opId)
255 258
256 259 else:
257 260 raise ValueError, "opType should be 'self', 'external' or 'plotter'; and not '%s'" %opType
258 261
259 262 return sts
260 263
261 264 def setInput(self, dataIn):
262 265
263 266 self.dataIn = dataIn
264 267 self.dataInList.append(dataIn)
265 268
266 269 def getOutputObj(self):
267 270
268 271 return self.dataOut
269 272
270 273 def checkInputs(self):
271 274
272 275 for thisDataIn in self.dataInList:
273 276
274 277 if thisDataIn.isEmpty():
275 278 return False
276 279
277 280 return True
278 281
279 282 def setup(self):
280 283
281 284 raise NotImplementedError
282 285
283 286 def run(self):
284 287
285 288 raise NotImplementedError
286 289
287 290 def close(self):
288 291 #Close every thread, queue or any other object here is it is neccesary.
289 292 return
290 293
291 294 class Operation(object):
292 295
293 296 """
294 297 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
295 298 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
296 299 acumulacion dentro de esta clase
297 300
298 301 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
299 302
300 303 """
301 304
302 305 __buffer = None
303 306 isConfig = False
304 307
305 308 def __init__(self, **kwargs):
306 309
307 310 self.__buffer = None
308 311 self.isConfig = False
309 312 self.kwargs = kwargs
310 313 checkKwargs(self.run, kwargs)
311 314
312 315 def getAllowedArgs(self):
313 316 return inspect.getargspec(self.run).args
314 317
315 318 def setup(self):
316 319
317 320 self.isConfig = True
318 321
319 322 raise NotImplementedError
320 323
321 324 def run(self, dataIn, **kwargs):
322 325
323 326 """
324 327 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los
325 328 atributos del objeto dataIn.
326 329
327 330 Input:
328 331
329 332 dataIn : objeto del tipo JROData
330 333
331 334 Return:
332 335
333 336 None
334 337
335 338 Affected:
336 339 __buffer : buffer de recepcion de datos.
337 340
338 341 """
339 342 if not self.isConfig:
340 343 self.setup(**kwargs)
341 344
342 345 raise NotImplementedError
343 346
344 347 def close(self):
345 348
346 349 pass
@@ -1,2749 +1,2749
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import re
5 5 import datetime
6 6 import copy
7 7 import sys
8 8 import importlib
9 9 import itertools
10 10
11 11 from jroproc_base import ProcessingUnit, Operation
12 12 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
13 13
14 14
15 15 class ParametersProc(ProcessingUnit):
16 16
17 17 nSeconds = None
18 18
19 19 def __init__(self):
20 20 ProcessingUnit.__init__(self)
21 21
22 22 # self.objectDict = {}
23 23 self.buffer = None
24 24 self.firstdatatime = None
25 25 self.profIndex = 0
26 26 self.dataOut = Parameters()
27 27
28 28 def __updateObjFromInput(self):
29 29
30 30 self.dataOut.inputUnit = self.dataIn.type
31 31
32 32 self.dataOut.timeZone = self.dataIn.timeZone
33 33 self.dataOut.dstFlag = self.dataIn.dstFlag
34 34 self.dataOut.errorCount = self.dataIn.errorCount
35 35 self.dataOut.useLocalTime = self.dataIn.useLocalTime
36 36
37 37 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
38 38 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 self.dataOut.heightList = self.dataIn.heightList
41 41 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
42 42 # self.dataOut.nHeights = self.dataIn.nHeights
43 43 # self.dataOut.nChannels = self.dataIn.nChannels
44 44 self.dataOut.nBaud = self.dataIn.nBaud
45 45 self.dataOut.nCode = self.dataIn.nCode
46 46 self.dataOut.code = self.dataIn.code
47 47 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
48 48 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
49 49 # self.dataOut.utctime = self.firstdatatime
50 50 self.dataOut.utctime = self.dataIn.utctime
51 51 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
52 52 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
53 53 self.dataOut.nCohInt = self.dataIn.nCohInt
54 54 # self.dataOut.nIncohInt = 1
55 55 self.dataOut.ippSeconds = self.dataIn.ippSeconds
56 56 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
57 57 self.dataOut.timeInterval1 = self.dataIn.timeInterval
58 58 self.dataOut.heightList = self.dataIn.getHeiRange()
59 59 self.dataOut.frequency = self.dataIn.frequency
60 60 #self.dataOut.noise = self.dataIn.noise
61 61
62 62 def run(self):
63 63
64 64 #---------------------- Voltage Data ---------------------------
65 65
66 66 if self.dataIn.type == "Voltage":
67 67
68 68 self.__updateObjFromInput()
69 69 self.dataOut.data_pre = self.dataIn.data.copy()
70 70 self.dataOut.flagNoData = False
71 71 self.dataOut.utctimeInit = self.dataIn.utctime
72 72 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
73 73 return
74 74
75 75 #---------------------- Spectra Data ---------------------------
76 76
77 77 if self.dataIn.type == "Spectra":
78 78
79 79 self.dataOut.data_pre = (self.dataIn.data_spc, self.dataIn.data_cspc)
80 80 self.dataOut.data_spc = self.dataIn.data_spc
81 81 self.dataOut.data_cspc = self.dataIn.data_cspc
82 82 self.dataOut.nProfiles = self.dataIn.nProfiles
83 83 self.dataOut.nIncohInt = self.dataIn.nIncohInt
84 84 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
85 85 self.dataOut.ippFactor = self.dataIn.ippFactor
86 86 #self.dataOut.normFactor = self.dataIn.getNormFactor()
87 87 self.dataOut.pairsList = self.dataIn.pairsList
88 88 self.dataOut.groupList = self.dataIn.pairsList
89 89 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
90 90 self.dataOut.flagNoData = False
91 91
92 92 #---------------------- Correlation Data ---------------------------
93 93
94 94 if self.dataIn.type == "Correlation":
95 95 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
96 96
97 97 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
98 98 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
99 99 self.dataOut.groupList = (acf_pairs, ccf_pairs)
100 100
101 101 self.dataOut.abscissaList = self.dataIn.lagRange
102 102 self.dataOut.noise = self.dataIn.noise
103 103 self.dataOut.data_SNR = self.dataIn.SNR
104 104 self.dataOut.flagNoData = False
105 105 self.dataOut.nAvg = self.dataIn.nAvg
106 106
107 107 #---------------------- Parameters Data ---------------------------
108 108
109 109 if self.dataIn.type == "Parameters":
110 110 self.dataOut.copy(self.dataIn)
111 111 self.dataOut.utctimeInit = self.dataIn.utctime
112 112 self.dataOut.flagNoData = False
113 113
114 114 return True
115 115
116 116 self.__updateObjFromInput()
117 117 self.dataOut.utctimeInit = self.dataIn.utctime
118 118 self.dataOut.paramInterval = self.dataIn.timeInterval
119 119
120 120 return
121 121
122 122 class SpectralMoments(Operation):
123 123
124 124 '''
125 125 Function SpectralMoments()
126 126
127 127 Calculates moments (power, mean, standard deviation) and SNR of the signal
128 128
129 129 Type of dataIn: Spectra
130 130
131 131 Configuration Parameters:
132 132
133 133 dirCosx : Cosine director in X axis
134 134 dirCosy : Cosine director in Y axis
135 135
136 136 elevation :
137 137 azimuth :
138 138
139 139 Input:
140 140 channelList : simple channel list to select e.g. [2,3,7]
141 141 self.dataOut.data_pre : Spectral data
142 142 self.dataOut.abscissaList : List of frequencies
143 143 self.dataOut.noise : Noise level per channel
144 144
145 145 Affected:
146 146 self.dataOut.data_param : Parameters per channel
147 147 self.dataOut.data_SNR : SNR per channel
148 148
149 149 '''
150 150
151 151 def run(self, dataOut):
152 152
153 153 #dataOut.data_pre = dataOut.data_pre[0]
154 154 data = dataOut.data_pre[0]
155 155 absc = dataOut.abscissaList[:-1]
156 156 noise = dataOut.noise
157 157 nChannel = data.shape[0]
158 158 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
159 159
160 160 for ind in range(nChannel):
161 161 data_param[ind,:,:] = self.__calculateMoments(data[ind,:,:], absc, noise[ind])
162 162
163 163 dataOut.data_param = data_param[:,1:,:]
164 164 dataOut.data_SNR = data_param[:,0]
165 165 dataOut.data_DOP = data_param[:,1]
166 166 dataOut.data_MEAN = data_param[:,2]
167 167 dataOut.data_STD = data_param[:,3]
168 168 return
169 169
170 170 def __calculateMoments(self, oldspec, oldfreq, n0, nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
171 171
172 172 if (nicoh is None): nicoh = 1
173 173 if (graph is None): graph = 0
174 174 if (smooth is None): smooth = 0
175 175 elif (self.smooth < 3): smooth = 0
176 176
177 177 if (type1 is None): type1 = 0
178 178 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
179 179 if (snrth is None): snrth = -3
180 180 if (dc is None): dc = 0
181 181 if (aliasing is None): aliasing = 0
182 182 if (oldfd is None): oldfd = 0
183 183 if (wwauto is None): wwauto = 0
184 184
185 185 if (n0 < 1.e-20): n0 = 1.e-20
186 186
187 187 freq = oldfreq
188 188 vec_power = numpy.zeros(oldspec.shape[1])
189 189 vec_fd = numpy.zeros(oldspec.shape[1])
190 190 vec_w = numpy.zeros(oldspec.shape[1])
191 191 vec_snr = numpy.zeros(oldspec.shape[1])
192 192
193 193 for ind in range(oldspec.shape[1]):
194 194
195 195 spec = oldspec[:,ind]
196 196 aux = spec*fwindow
197 197 max_spec = aux.max()
198 198 m = list(aux).index(max_spec)
199 199
200 200 #Smooth
201 201 if (smooth == 0): spec2 = spec
202 202 else: spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
203 203
204 204 # Calculo de Momentos
205 205 bb = spec2[range(m,spec2.size)]
206 206 bb = (bb<n0).nonzero()
207 207 bb = bb[0]
208 208
209 209 ss = spec2[range(0,m + 1)]
210 210 ss = (ss<n0).nonzero()
211 211 ss = ss[0]
212 212
213 213 if (bb.size == 0):
214 214 bb0 = spec.size - 1 - m
215 215 else:
216 216 bb0 = bb[0] - 1
217 217 if (bb0 < 0):
218 218 bb0 = 0
219 219
220 220 if (ss.size == 0): ss1 = 1
221 221 else: ss1 = max(ss) + 1
222 222
223 223 if (ss1 > m): ss1 = m
224 224
225 225 valid = numpy.asarray(range(int(m + bb0 - ss1 + 1))) + ss1
226 226 power = ((spec2[valid] - n0)*fwindow[valid]).sum()
227 227 fd = ((spec2[valid]- n0)*freq[valid]*fwindow[valid]).sum()/power
228 228 w = math.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum()/power)
229 229 snr = (spec2.mean()-n0)/n0
230 230
231 231 if (snr < 1.e-20) :
232 232 snr = 1.e-20
233 233
234 234 vec_power[ind] = power
235 235 vec_fd[ind] = fd
236 236 vec_w[ind] = w
237 237 vec_snr[ind] = snr
238 238
239 239 moments = numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
240 240 return moments
241 241
242 242 #------------------ Get SA Parameters --------------------------
243 243
244 244 def GetSAParameters(self):
245 245 #SA en frecuencia
246 246 pairslist = self.dataOut.groupList
247 247 num_pairs = len(pairslist)
248 248
249 249 vel = self.dataOut.abscissaList
250 250 spectra = self.dataOut.data_pre[0]
251 251 cspectra = self.dataOut.data_pre[1]
252 252 delta_v = vel[1] - vel[0]
253 253
254 254 #Calculating the power spectrum
255 255 spc_pow = numpy.sum(spectra, 3)*delta_v
256 256 #Normalizing Spectra
257 257 norm_spectra = spectra/spc_pow
258 258 #Calculating the norm_spectra at peak
259 259 max_spectra = numpy.max(norm_spectra, 3)
260 260
261 261 #Normalizing Cross Spectra
262 262 norm_cspectra = numpy.zeros(cspectra.shape)
263 263
264 264 for i in range(num_chan):
265 265 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
266 266
267 267 max_cspectra = numpy.max(norm_cspectra,2)
268 268 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
269 269
270 270 for i in range(num_pairs):
271 271 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
272 272 #------------------- Get Lags ----------------------------------
273 273
274 274 class SALags(Operation):
275 275 '''
276 276 Function GetMoments()
277 277
278 278 Input:
279 279 self.dataOut.data_pre
280 280 self.dataOut.abscissaList
281 281 self.dataOut.noise
282 282 self.dataOut.normFactor
283 283 self.dataOut.data_SNR
284 284 self.dataOut.groupList
285 285 self.dataOut.nChannels
286 286
287 287 Affected:
288 288 self.dataOut.data_param
289 289
290 290 '''
291 291 def run(self, dataOut):
292 292 data_acf = dataOut.data_pre[0]
293 293 data_ccf = dataOut.data_pre[1]
294 294 normFactor_acf = dataOut.normFactor[0]
295 295 normFactor_ccf = dataOut.normFactor[1]
296 296 pairs_acf = dataOut.groupList[0]
297 297 pairs_ccf = dataOut.groupList[1]
298 298
299 299 nHeights = dataOut.nHeights
300 300 absc = dataOut.abscissaList
301 301 noise = dataOut.noise
302 302 SNR = dataOut.data_SNR
303 303 nChannels = dataOut.nChannels
304 304 # pairsList = dataOut.groupList
305 305 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
306 306
307 307 for l in range(len(pairs_acf)):
308 308 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
309 309
310 310 for l in range(len(pairs_ccf)):
311 311 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
312 312
313 313 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
314 314 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
315 315 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
316 316 return
317 317
318 318 # def __getPairsAutoCorr(self, pairsList, nChannels):
319 319 #
320 320 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
321 321 #
322 322 # for l in range(len(pairsList)):
323 323 # firstChannel = pairsList[l][0]
324 324 # secondChannel = pairsList[l][1]
325 325 #
326 326 # #Obteniendo pares de Autocorrelacion
327 327 # if firstChannel == secondChannel:
328 328 # pairsAutoCorr[firstChannel] = int(l)
329 329 #
330 330 # pairsAutoCorr = pairsAutoCorr.astype(int)
331 331 #
332 332 # pairsCrossCorr = range(len(pairsList))
333 333 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
334 334 #
335 335 # return pairsAutoCorr, pairsCrossCorr
336 336
337 337 def __calculateTaus(self, data_acf, data_ccf, lagRange):
338 338
339 339 lag0 = data_acf.shape[1]/2
340 340 #Funcion de Autocorrelacion
341 341 mean_acf = stats.nanmean(data_acf, axis = 0)
342 342
343 343 #Obtencion Indice de TauCross
344 344 ind_ccf = data_ccf.argmax(axis = 1)
345 345 #Obtencion Indice de TauAuto
346 346 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
347 347 ccf_lag0 = data_ccf[:,lag0,:]
348 348
349 349 for i in range(ccf_lag0.shape[0]):
350 350 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
351 351
352 352 #Obtencion de TauCross y TauAuto
353 353 tau_ccf = lagRange[ind_ccf]
354 354 tau_acf = lagRange[ind_acf]
355 355
356 356 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
357 357
358 358 tau_ccf[Nan1,Nan2] = numpy.nan
359 359 tau_acf[Nan1,Nan2] = numpy.nan
360 360 tau = numpy.vstack((tau_ccf,tau_acf))
361 361
362 362 return tau
363 363
364 364 def __calculateLag1Phase(self, data, lagTRange):
365 365 data1 = stats.nanmean(data, axis = 0)
366 366 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
367 367
368 368 phase = numpy.angle(data1[lag1,:])
369 369
370 370 return phase
371 371
372 372 class SpectralFitting(Operation):
373 373 '''
374 374 Function GetMoments()
375 375
376 376 Input:
377 377 Output:
378 378 Variables modified:
379 379 '''
380 380
381 381 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
382 382
383 383
384 384 if path != None:
385 385 sys.path.append(path)
386 386 self.dataOut.library = importlib.import_module(file)
387 387
388 388 #To be inserted as a parameter
389 389 groupArray = numpy.array(groupList)
390 390 # groupArray = numpy.array([[0,1],[2,3]])
391 391 self.dataOut.groupList = groupArray
392 392
393 393 nGroups = groupArray.shape[0]
394 394 nChannels = self.dataIn.nChannels
395 395 nHeights=self.dataIn.heightList.size
396 396
397 397 #Parameters Array
398 398 self.dataOut.data_param = None
399 399
400 400 #Set constants
401 401 constants = self.dataOut.library.setConstants(self.dataIn)
402 402 self.dataOut.constants = constants
403 403 M = self.dataIn.normFactor
404 404 N = self.dataIn.nFFTPoints
405 405 ippSeconds = self.dataIn.ippSeconds
406 406 K = self.dataIn.nIncohInt
407 407 pairsArray = numpy.array(self.dataIn.pairsList)
408 408
409 409 #List of possible combinations
410 410 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
411 411 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
412 412
413 413 if getSNR:
414 414 listChannels = groupArray.reshape((groupArray.size))
415 415 listChannels.sort()
416 416 noise = self.dataIn.getNoise()
417 417 self.dataOut.data_SNR = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
418 418
419 419 for i in range(nGroups):
420 420 coord = groupArray[i,:]
421 421
422 422 #Input data array
423 423 data = self.dataIn.data_spc[coord,:,:]/(M*N)
424 424 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
425 425
426 426 #Cross Spectra data array for Covariance Matrixes
427 427 ind = 0
428 428 for pairs in listComb:
429 429 pairsSel = numpy.array([coord[x],coord[y]])
430 430 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
431 431 ind += 1
432 432 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
433 433 dataCross = dataCross**2/K
434 434
435 435 for h in range(nHeights):
436 436 # print self.dataOut.heightList[h]
437 437
438 438 #Input
439 439 d = data[:,h]
440 440
441 441 #Covariance Matrix
442 442 D = numpy.diag(d**2/K)
443 443 ind = 0
444 444 for pairs in listComb:
445 445 #Coordinates in Covariance Matrix
446 446 x = pairs[0]
447 447 y = pairs[1]
448 448 #Channel Index
449 449 S12 = dataCross[ind,:,h]
450 450 D12 = numpy.diag(S12)
451 451 #Completing Covariance Matrix with Cross Spectras
452 452 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
453 453 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
454 454 ind += 1
455 455 Dinv=numpy.linalg.inv(D)
456 456 L=numpy.linalg.cholesky(Dinv)
457 457 LT=L.T
458 458
459 459 dp = numpy.dot(LT,d)
460 460
461 461 #Initial values
462 462 data_spc = self.dataIn.data_spc[coord,:,h]
463 463
464 464 if (h>0)and(error1[3]<5):
465 465 p0 = self.dataOut.data_param[i,:,h-1]
466 466 else:
467 467 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
468 468
469 469 try:
470 470 #Least Squares
471 471 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
472 472 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
473 473 #Chi square error
474 474 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
475 475 #Error with Jacobian
476 476 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
477 477 except:
478 478 minp = p0*numpy.nan
479 479 error0 = numpy.nan
480 480 error1 = p0*numpy.nan
481 481
482 482 #Save
483 483 if self.dataOut.data_param is None:
484 484 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
485 485 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
486 486
487 487 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
488 488 self.dataOut.data_param[i,:,h] = minp
489 489 return
490 490
491 491 def __residFunction(self, p, dp, LT, constants):
492 492
493 493 fm = self.dataOut.library.modelFunction(p, constants)
494 494 fmp=numpy.dot(LT,fm)
495 495
496 496 return dp-fmp
497 497
498 498 def __getSNR(self, z, noise):
499 499
500 500 avg = numpy.average(z, axis=1)
501 501 SNR = (avg.T-noise)/noise
502 502 SNR = SNR.T
503 503 return SNR
504 504
505 505 def __chisq(p,chindex,hindex):
506 506 #similar to Resid but calculates CHI**2
507 507 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
508 508 dp=numpy.dot(LT,d)
509 509 fmp=numpy.dot(LT,fm)
510 510 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
511 511 return chisq
512 512
513 513 class WindProfiler(Operation):
514 514
515 515 __isConfig = False
516 516
517 517 __initime = None
518 518 __lastdatatime = None
519 519 __integrationtime = None
520 520
521 521 __buffer = None
522 522
523 523 __dataReady = False
524 524
525 525 __firstdata = None
526 526
527 527 n = None
528 528
529 529 def __calculateCosDir(self, elev, azim):
530 530 zen = (90 - elev)*numpy.pi/180
531 531 azim = azim*numpy.pi/180
532 532 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
533 533 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
534 534
535 535 signX = numpy.sign(numpy.cos(azim))
536 536 signY = numpy.sign(numpy.sin(azim))
537 537
538 538 cosDirX = numpy.copysign(cosDirX, signX)
539 539 cosDirY = numpy.copysign(cosDirY, signY)
540 540 return cosDirX, cosDirY
541 541
542 542 def __calculateAngles(self, theta_x, theta_y, azimuth):
543 543
544 544 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
545 545 zenith_arr = numpy.arccos(dir_cosw)
546 546 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
547 547
548 548 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
549 549 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
550 550
551 551 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
552 552
553 553 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
554 554
555 555 #
556 556 if horOnly:
557 557 A = numpy.c_[dir_cosu,dir_cosv]
558 558 else:
559 559 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
560 560 A = numpy.asmatrix(A)
561 561 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
562 562
563 563 return A1
564 564
565 565 def __correctValues(self, heiRang, phi, velRadial, SNR):
566 566 listPhi = phi.tolist()
567 567 maxid = listPhi.index(max(listPhi))
568 568 minid = listPhi.index(min(listPhi))
569 569
570 570 rango = range(len(phi))
571 571 # rango = numpy.delete(rango,maxid)
572 572
573 573 heiRang1 = heiRang*math.cos(phi[maxid])
574 574 heiRangAux = heiRang*math.cos(phi[minid])
575 575 indOut = (heiRang1 < heiRangAux[0]).nonzero()
576 576 heiRang1 = numpy.delete(heiRang1,indOut)
577 577
578 578 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
579 579 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
580 580
581 581 for i in rango:
582 582 x = heiRang*math.cos(phi[i])
583 583 y1 = velRadial[i,:]
584 584 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
585 585
586 586 x1 = heiRang1
587 587 y11 = f1(x1)
588 588
589 589 y2 = SNR[i,:]
590 590 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
591 591 y21 = f2(x1)
592 592
593 593 velRadial1[i,:] = y11
594 594 SNR1[i,:] = y21
595 595
596 596 return heiRang1, velRadial1, SNR1
597 597
598 598 def __calculateVelUVW(self, A, velRadial):
599 599
600 600 #Operacion Matricial
601 601 # velUVW = numpy.zeros((velRadial.shape[1],3))
602 602 # for ind in range(velRadial.shape[1]):
603 603 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
604 604 # velUVW = velUVW.transpose()
605 605 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
606 606 velUVW[:,:] = numpy.dot(A,velRadial)
607 607
608 608
609 609 return velUVW
610 610
611 611 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
612 612
613 613 def techniqueDBS(self, kwargs):
614 614 """
615 615 Function that implements Doppler Beam Swinging (DBS) technique.
616 616
617 617 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
618 618 Direction correction (if necessary), Ranges and SNR
619 619
620 620 Output: Winds estimation (Zonal, Meridional and Vertical)
621 621
622 622 Parameters affected: Winds, height range, SNR
623 623 """
624 624 velRadial0 = kwargs['velRadial']
625 625 heiRang = kwargs['heightList']
626 626 SNR0 = kwargs['SNR']
627 627
628 628 if kwargs.has_key('dirCosx') and kwargs.has_key('dirCosy'):
629 629 theta_x = numpy.array(kwargs['dirCosx'])
630 630 theta_y = numpy.array(kwargs['dirCosy'])
631 631 else:
632 632 elev = numpy.array(kwargs['elevation'])
633 633 azim = numpy.array(kwargs['azimuth'])
634 634 theta_x, theta_y = self.__calculateCosDir(elev, azim)
635 635 azimuth = kwargs['correctAzimuth']
636 636 if kwargs.has_key('horizontalOnly'):
637 637 horizontalOnly = kwargs['horizontalOnly']
638 638 else: horizontalOnly = False
639 639 if kwargs.has_key('correctFactor'):
640 640 correctFactor = kwargs['correctFactor']
641 641 else: correctFactor = 1
642 642 if kwargs.has_key('channelList'):
643 643 channelList = kwargs['channelList']
644 644 if len(channelList) == 2:
645 645 horizontalOnly = True
646 646 arrayChannel = numpy.array(channelList)
647 647 param = param[arrayChannel,:,:]
648 648 theta_x = theta_x[arrayChannel]
649 649 theta_y = theta_y[arrayChannel]
650 650
651 651 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
652 652 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
653 653 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
654 654
655 655 #Calculo de Componentes de la velocidad con DBS
656 656 winds = self.__calculateVelUVW(A,velRadial1)
657 657
658 658 return winds, heiRang1, SNR1
659 659
660 660 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
661 661
662 662 nPairs = len(pairs_ccf)
663 663 posx = numpy.asarray(posx)
664 664 posy = numpy.asarray(posy)
665 665
666 666 #Rotacion Inversa para alinear con el azimuth
667 667 if azimuth!= None:
668 668 azimuth = azimuth*math.pi/180
669 669 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
670 670 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
671 671 else:
672 672 posx1 = posx
673 673 posy1 = posy
674 674
675 675 #Calculo de Distancias
676 676 distx = numpy.zeros(nPairs)
677 677 disty = numpy.zeros(nPairs)
678 678 dist = numpy.zeros(nPairs)
679 679 ang = numpy.zeros(nPairs)
680 680
681 681 for i in range(nPairs):
682 682 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
683 683 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
684 684 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
685 685 ang[i] = numpy.arctan2(disty[i],distx[i])
686 686
687 687 return distx, disty, dist, ang
688 688 #Calculo de Matrices
689 689 # nPairs = len(pairs)
690 690 # ang1 = numpy.zeros((nPairs, 2, 1))
691 691 # dist1 = numpy.zeros((nPairs, 2, 1))
692 692 #
693 693 # for j in range(nPairs):
694 694 # dist1[j,0,0] = dist[pairs[j][0]]
695 695 # dist1[j,1,0] = dist[pairs[j][1]]
696 696 # ang1[j,0,0] = ang[pairs[j][0]]
697 697 # ang1[j,1,0] = ang[pairs[j][1]]
698 698 #
699 699 # return distx,disty, dist1,ang1
700 700
701 701
702 702 def __calculateVelVer(self, phase, lagTRange, _lambda):
703 703
704 704 Ts = lagTRange[1] - lagTRange[0]
705 705 velW = -_lambda*phase/(4*math.pi*Ts)
706 706
707 707 return velW
708 708
709 709 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
710 710 nPairs = tau1.shape[0]
711 711 nHeights = tau1.shape[1]
712 712 vel = numpy.zeros((nPairs,3,nHeights))
713 713 dist1 = numpy.reshape(dist, (dist.size,1))
714 714
715 715 angCos = numpy.cos(ang)
716 716 angSin = numpy.sin(ang)
717 717
718 718 vel0 = dist1*tau1/(2*tau2**2)
719 719 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
720 720 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
721 721
722 722 ind = numpy.where(numpy.isinf(vel))
723 723 vel[ind] = numpy.nan
724 724
725 725 return vel
726 726
727 727 # def __getPairsAutoCorr(self, pairsList, nChannels):
728 728 #
729 729 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
730 730 #
731 731 # for l in range(len(pairsList)):
732 732 # firstChannel = pairsList[l][0]
733 733 # secondChannel = pairsList[l][1]
734 734 #
735 735 # #Obteniendo pares de Autocorrelacion
736 736 # if firstChannel == secondChannel:
737 737 # pairsAutoCorr[firstChannel] = int(l)
738 738 #
739 739 # pairsAutoCorr = pairsAutoCorr.astype(int)
740 740 #
741 741 # pairsCrossCorr = range(len(pairsList))
742 742 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
743 743 #
744 744 # return pairsAutoCorr, pairsCrossCorr
745 745
746 746 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
747 747 def techniqueSA(self, kwargs):
748 748
749 749 """
750 750 Function that implements Spaced Antenna (SA) technique.
751 751
752 752 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
753 753 Direction correction (if necessary), Ranges and SNR
754 754
755 755 Output: Winds estimation (Zonal, Meridional and Vertical)
756 756
757 757 Parameters affected: Winds
758 758 """
759 759 position_x = kwargs['positionX']
760 760 position_y = kwargs['positionY']
761 761 azimuth = kwargs['azimuth']
762 762
763 763 if kwargs.has_key('correctFactor'):
764 764 correctFactor = kwargs['correctFactor']
765 765 else:
766 766 correctFactor = 1
767 767
768 768 groupList = kwargs['groupList']
769 769 pairs_ccf = groupList[1]
770 770 tau = kwargs['tau']
771 771 _lambda = kwargs['_lambda']
772 772
773 773 #Cross Correlation pairs obtained
774 774 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
775 775 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
776 776 # pairsSelArray = numpy.array(pairsSelected)
777 777 # pairs = []
778 778 #
779 779 # #Wind estimation pairs obtained
780 780 # for i in range(pairsSelArray.shape[0]/2):
781 781 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
782 782 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
783 783 # pairs.append((ind1,ind2))
784 784
785 785 indtau = tau.shape[0]/2
786 786 tau1 = tau[:indtau,:]
787 787 tau2 = tau[indtau:-1,:]
788 788 # tau1 = tau1[pairs,:]
789 789 # tau2 = tau2[pairs,:]
790 790 phase1 = tau[-1,:]
791 791
792 792 #---------------------------------------------------------------------
793 793 #Metodo Directo
794 794 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
795 795 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
796 796 winds = stats.nanmean(winds, axis=0)
797 797 #---------------------------------------------------------------------
798 798 #Metodo General
799 799 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
800 800 # #Calculo Coeficientes de Funcion de Correlacion
801 801 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
802 802 # #Calculo de Velocidades
803 803 # winds = self.calculateVelUV(F,G,A,B,H)
804 804
805 805 #---------------------------------------------------------------------
806 806 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
807 807 winds = correctFactor*winds
808 808 return winds
809 809
810 810 def __checkTime(self, currentTime, paramInterval, outputInterval):
811 811
812 812 dataTime = currentTime + paramInterval
813 813 deltaTime = dataTime - self.__initime
814 814
815 815 if deltaTime >= outputInterval or deltaTime < 0:
816 816 self.__dataReady = True
817 817 return
818 818
819 819 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax, binkm=2):
820 820 '''
821 821 Function that implements winds estimation technique with detected meteors.
822 822
823 823 Input: Detected meteors, Minimum meteor quantity to wind estimation
824 824
825 825 Output: Winds estimation (Zonal and Meridional)
826 826
827 827 Parameters affected: Winds
828 828 '''
829 829 # print arrayMeteor.shape
830 830 #Settings
831 831 nInt = (heightMax - heightMin)/binkm
832 832 # print nInt
833 833 nInt = int(nInt)
834 834 # print nInt
835 835 winds = numpy.zeros((2,nInt))*numpy.nan
836 836
837 837 #Filter errors
838 838 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
839 839 finalMeteor = arrayMeteor[error,:]
840 840
841 841 #Meteor Histogram
842 842 finalHeights = finalMeteor[:,2]
843 843 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
844 844 nMeteorsPerI = hist[0]
845 845 heightPerI = hist[1]
846 846
847 847 #Sort of meteors
848 848 indSort = finalHeights.argsort()
849 849 finalMeteor2 = finalMeteor[indSort,:]
850 850
851 851 # Calculating winds
852 852 ind1 = 0
853 853 ind2 = 0
854 854
855 855 for i in range(nInt):
856 856 nMet = nMeteorsPerI[i]
857 857 ind1 = ind2
858 858 ind2 = ind1 + nMet
859 859
860 860 meteorAux = finalMeteor2[ind1:ind2,:]
861 861
862 862 if meteorAux.shape[0] >= meteorThresh:
863 863 vel = meteorAux[:, 6]
864 864 zen = meteorAux[:, 4]*numpy.pi/180
865 865 azim = meteorAux[:, 3]*numpy.pi/180
866 866
867 867 n = numpy.cos(zen)
868 868 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
869 869 # l = m*numpy.tan(azim)
870 870 l = numpy.sin(zen)*numpy.sin(azim)
871 871 m = numpy.sin(zen)*numpy.cos(azim)
872 872
873 873 A = numpy.vstack((l, m)).transpose()
874 874 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
875 875 windsAux = numpy.dot(A1, vel)
876 876
877 877 winds[0,i] = windsAux[0]
878 878 winds[1,i] = windsAux[1]
879 879
880 880 return winds, heightPerI[:-1]
881 881
882 882 def techniqueNSM_SA(self, **kwargs):
883 883 metArray = kwargs['metArray']
884 884 heightList = kwargs['heightList']
885 885 timeList = kwargs['timeList']
886 886
887 887 rx_location = kwargs['rx_location']
888 888 groupList = kwargs['groupList']
889 889 azimuth = kwargs['azimuth']
890 890 dfactor = kwargs['dfactor']
891 891 k = kwargs['k']
892 892
893 893 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
894 894 d = dist*dfactor
895 895 #Phase calculation
896 896 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
897 897
898 898 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
899 899
900 900 velEst = numpy.zeros((heightList.size,2))*numpy.nan
901 901 azimuth1 = azimuth1*numpy.pi/180
902 902
903 903 for i in range(heightList.size):
904 904 h = heightList[i]
905 905 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
906 906 metHeight = metArray1[indH,:]
907 907 if metHeight.shape[0] >= 2:
908 908 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
909 909 iazim = metHeight[:,1].astype(int)
910 910 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
911 911 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
912 912 A = numpy.asmatrix(A)
913 913 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
914 914 velHor = numpy.dot(A1,velAux)
915 915
916 916 velEst[i,:] = numpy.squeeze(velHor)
917 917 return velEst
918 918
919 919 def __getPhaseSlope(self, metArray, heightList, timeList):
920 920 meteorList = []
921 921 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
922 922 #Putting back together the meteor matrix
923 923 utctime = metArray[:,0]
924 924 uniqueTime = numpy.unique(utctime)
925 925
926 926 phaseDerThresh = 0.5
927 927 ippSeconds = timeList[1] - timeList[0]
928 928 sec = numpy.where(timeList>1)[0][0]
929 929 nPairs = metArray.shape[1] - 6
930 930 nHeights = len(heightList)
931 931
932 932 for t in uniqueTime:
933 933 metArray1 = metArray[utctime==t,:]
934 934 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
935 935 tmet = metArray1[:,1].astype(int)
936 936 hmet = metArray1[:,2].astype(int)
937 937
938 938 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
939 939 metPhase[:,:] = numpy.nan
940 940 metPhase[:,hmet,tmet] = metArray1[:,6:].T
941 941
942 942 #Delete short trails
943 943 metBool = ~numpy.isnan(metPhase[0,:,:])
944 944 heightVect = numpy.sum(metBool, axis = 1)
945 945 metBool[heightVect<sec,:] = False
946 946 metPhase[:,heightVect<sec,:] = numpy.nan
947 947
948 948 #Derivative
949 949 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
950 950 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
951 951 metPhase[phDerAux] = numpy.nan
952 952
953 953 #--------------------------METEOR DETECTION -----------------------------------------
954 954 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
955 955
956 956 for p in numpy.arange(nPairs):
957 957 phase = metPhase[p,:,:]
958 958 phDer = metDer[p,:,:]
959 959
960 960 for h in indMet:
961 961 height = heightList[h]
962 962 phase1 = phase[h,:] #82
963 963 phDer1 = phDer[h,:]
964 964
965 965 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
966 966
967 967 indValid = numpy.where(~numpy.isnan(phase1))[0]
968 968 initMet = indValid[0]
969 969 endMet = 0
970 970
971 971 for i in range(len(indValid)-1):
972 972
973 973 #Time difference
974 974 inow = indValid[i]
975 975 inext = indValid[i+1]
976 976 idiff = inext - inow
977 977 #Phase difference
978 978 phDiff = numpy.abs(phase1[inext] - phase1[inow])
979 979
980 980 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
981 981 sizeTrail = inow - initMet + 1
982 982 if sizeTrail>3*sec: #Too short meteors
983 983 x = numpy.arange(initMet,inow+1)*ippSeconds
984 984 y = phase1[initMet:inow+1]
985 985 ynnan = ~numpy.isnan(y)
986 986 x = x[ynnan]
987 987 y = y[ynnan]
988 988 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
989 989 ylin = x*slope + intercept
990 990 rsq = r_value**2
991 991 if rsq > 0.5:
992 992 vel = slope#*height*1000/(k*d)
993 993 estAux = numpy.array([utctime,p,height, vel, rsq])
994 994 meteorList.append(estAux)
995 995 initMet = inext
996 996 metArray2 = numpy.array(meteorList)
997 997
998 998 return metArray2
999 999
1000 1000 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
1001 1001
1002 1002 azimuth1 = numpy.zeros(len(pairslist))
1003 1003 dist = numpy.zeros(len(pairslist))
1004 1004
1005 1005 for i in range(len(rx_location)):
1006 1006 ch0 = pairslist[i][0]
1007 1007 ch1 = pairslist[i][1]
1008 1008
1009 1009 diffX = rx_location[ch0][0] - rx_location[ch1][0]
1010 1010 diffY = rx_location[ch0][1] - rx_location[ch1][1]
1011 1011 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
1012 1012 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
1013 1013
1014 1014 azimuth1 -= azimuth0
1015 1015 return azimuth1, dist
1016 1016
1017 1017 def techniqueNSM_DBS(self, **kwargs):
1018 1018 metArray = kwargs['metArray']
1019 1019 heightList = kwargs['heightList']
1020 1020 timeList = kwargs['timeList']
1021 1021 zenithList = kwargs['zenithList']
1022 1022 nChan = numpy.max(cmet) + 1
1023 1023 nHeights = len(heightList)
1024 1024
1025 1025 utctime = metArray[:,0]
1026 1026 cmet = metArray[:,1]
1027 1027 hmet = metArray1[:,3].astype(int)
1028 1028 h1met = heightList[hmet]*zenithList[cmet]
1029 1029 vmet = metArray1[:,5]
1030 1030
1031 1031 for i in range(nHeights - 1):
1032 1032 hmin = heightList[i]
1033 1033 hmax = heightList[i + 1]
1034 1034
1035 1035 vthisH = vmet[(h1met>=hmin) & (h1met<hmax)]
1036 1036
1037 1037
1038 1038
1039 1039 return data_output
1040 1040
1041 def run(self, dataOut, technique, **kwargs):
1041 def run(self, dataOut, technique, hmin=70, hmax=110, nHours=1, **kwargs):
1042 1042
1043 1043 param = dataOut.data_param
1044 1044 if dataOut.abscissaList != None:
1045 1045 absc = dataOut.abscissaList[:-1]
1046 1046 #noise = dataOut.noise
1047 1047 heightList = dataOut.heightList
1048 1048 SNR = dataOut.data_SNR
1049 1049
1050 1050 if technique == 'DBS':
1051 1051
1052 1052 kwargs['velRadial'] = param[:,1,:] #Radial velocity
1053 1053 kwargs['heightList'] = heightList
1054 1054 kwargs['SNR'] = SNR
1055 1055
1056 1056 dataOut.data_output, dataOut.heightList, dataOut.data_SNR = self.techniqueDBS(kwargs) #DBS Function
1057 1057 dataOut.utctimeInit = dataOut.utctime
1058 1058 dataOut.outputInterval = dataOut.paramInterval
1059 1059
1060 1060 elif technique == 'SA':
1061 1061
1062 1062 #Parameters
1063 1063 # position_x = kwargs['positionX']
1064 1064 # position_y = kwargs['positionY']
1065 1065 # azimuth = kwargs['azimuth']
1066 1066 #
1067 1067 # if kwargs.has_key('crosspairsList'):
1068 1068 # pairs = kwargs['crosspairsList']
1069 1069 # else:
1070 1070 # pairs = None
1071 1071 #
1072 1072 # if kwargs.has_key('correctFactor'):
1073 1073 # correctFactor = kwargs['correctFactor']
1074 1074 # else:
1075 1075 # correctFactor = 1
1076 1076
1077 1077 # tau = dataOut.data_param
1078 1078 # _lambda = dataOut.C/dataOut.frequency
1079 1079 # pairsList = dataOut.groupList
1080 1080 # nChannels = dataOut.nChannels
1081 1081
1082 1082 kwargs['groupList'] = dataOut.groupList
1083 1083 kwargs['tau'] = dataOut.data_param
1084 1084 kwargs['_lambda'] = dataOut.C/dataOut.frequency
1085 1085 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
1086 1086 dataOut.data_output = self.techniqueSA(kwargs)
1087 1087 dataOut.utctimeInit = dataOut.utctime
1088 1088 dataOut.outputInterval = dataOut.timeInterval
1089 1089
1090 1090 elif technique == 'Meteors':
1091 1091 dataOut.flagNoData = True
1092 1092 self.__dataReady = False
1093 1093
1094 1094 if kwargs.has_key('nHours'):
1095 1095 nHours = kwargs['nHours']
1096 1096 else:
1097 1097 nHours = 1
1098 1098
1099 1099 if kwargs.has_key('meteorsPerBin'):
1100 1100 meteorThresh = kwargs['meteorsPerBin']
1101 1101 else:
1102 1102 meteorThresh = 6
1103 1103
1104 1104 if kwargs.has_key('hmin'):
1105 1105 hmin = kwargs['hmin']
1106 1106 else: hmin = 70
1107 1107 if kwargs.has_key('hmax'):
1108 1108 hmax = kwargs['hmax']
1109 1109 else: hmax = 110
1110 1110
1111 1111 if kwargs.has_key('BinKm'):
1112 1112 binkm = kwargs['BinKm']
1113 1113 else:
1114 1114 binkm = 2
1115 1115
1116 1116 dataOut.outputInterval = nHours*3600
1117 1117
1118 1118 if self.__isConfig == False:
1119 1119 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
1120 1120 #Get Initial LTC time
1121 1121 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
1122 1122 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
1123 1123
1124 1124 self.__isConfig = True
1125 1125
1126 1126 if self.__buffer is None:
1127 1127 self.__buffer = dataOut.data_param
1128 1128 self.__firstdata = copy.copy(dataOut)
1129 1129
1130 1130 else:
1131 1131 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
1132 1132
1133 1133 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
1134 1134
1135 1135 if self.__dataReady:
1136 1136 dataOut.utctimeInit = self.__initime
1137 1137
1138 1138 self.__initime += dataOut.outputInterval #to erase time offset
1139 1139
1140 1140 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax, binkm)
1141 1141 dataOut.flagNoData = False
1142 1142 self.__buffer = None
1143 1143
1144 1144 elif technique == 'Meteors1':
1145 1145 dataOut.flagNoData = True
1146 1146 self.__dataReady = False
1147 1147
1148 1148 if kwargs.has_key('nMins'):
1149 1149 nMins = kwargs['nMins']
1150 1150 else: nMins = 20
1151 1151 if kwargs.has_key('rx_location'):
1152 1152 rx_location = kwargs['rx_location']
1153 1153 else: rx_location = [(0,1),(1,1),(1,0)]
1154 1154 if kwargs.has_key('azimuth'):
1155 1155 azimuth = kwargs['azimuth']
1156 1156 else: azimuth = 51
1157 1157 if kwargs.has_key('dfactor'):
1158 1158 dfactor = kwargs['dfactor']
1159 1159 if kwargs.has_key('mode'):
1160 1160 mode = kwargs['mode']
1161 1161 else: mode = 'SA'
1162 1162
1163 1163 #Borrar luego esto
1164 1164 if dataOut.groupList is None:
1165 1165 dataOut.groupList = [(0,1),(0,2),(1,2)]
1166 1166 groupList = dataOut.groupList
1167 1167 C = 3e8
1168 1168 freq = 50e6
1169 1169 lamb = C/freq
1170 1170 k = 2*numpy.pi/lamb
1171 1171
1172 1172 timeList = dataOut.abscissaList
1173 1173 heightList = dataOut.heightList
1174 1174
1175 1175 if self.__isConfig == False:
1176 1176 dataOut.outputInterval = nMins*60
1177 1177 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
1178 1178 #Get Initial LTC time
1179 1179 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
1180 1180 minuteAux = initime.minute
1181 1181 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
1182 1182 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
1183 1183
1184 1184 self.__isConfig = True
1185 1185
1186 1186 if self.__buffer is None:
1187 1187 self.__buffer = dataOut.data_param
1188 1188 self.__firstdata = copy.copy(dataOut)
1189 1189
1190 1190 else:
1191 1191 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
1192 1192
1193 1193 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
1194 1194
1195 1195 if self.__dataReady:
1196 1196 dataOut.utctimeInit = self.__initime
1197 1197 self.__initime += dataOut.outputInterval #to erase time offset
1198 1198
1199 1199 metArray = self.__buffer
1200 1200 if mode == 'SA':
1201 1201 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
1202 1202 elif mode == 'DBS':
1203 1203 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList)
1204 1204 dataOut.data_output = dataOut.data_output.T
1205 1205 dataOut.flagNoData = False
1206 1206 self.__buffer = None
1207 1207
1208 1208 return
1209 1209
1210 1210 class EWDriftsEstimation(Operation):
1211 1211
1212 1212
1213 1213 def __correctValues(self, heiRang, phi, velRadial, SNR):
1214 1214 listPhi = phi.tolist()
1215 1215 maxid = listPhi.index(max(listPhi))
1216 1216 minid = listPhi.index(min(listPhi))
1217 1217
1218 1218 rango = range(len(phi))
1219 1219 # rango = numpy.delete(rango,maxid)
1220 1220
1221 1221 heiRang1 = heiRang*math.cos(phi[maxid])
1222 1222 heiRangAux = heiRang*math.cos(phi[minid])
1223 1223 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1224 1224 heiRang1 = numpy.delete(heiRang1,indOut)
1225 1225
1226 1226 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1227 1227 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1228 1228
1229 1229 for i in rango:
1230 1230 x = heiRang*math.cos(phi[i])
1231 1231 y1 = velRadial[i,:]
1232 1232 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1233 1233
1234 1234 x1 = heiRang1
1235 1235 y11 = f1(x1)
1236 1236
1237 1237 y2 = SNR[i,:]
1238 1238 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1239 1239 y21 = f2(x1)
1240 1240
1241 1241 velRadial1[i,:] = y11
1242 1242 SNR1[i,:] = y21
1243 1243
1244 1244 return heiRang1, velRadial1, SNR1
1245 1245
1246 1246 def run(self, dataOut, zenith, zenithCorrection):
1247 1247 heiRang = dataOut.heightList
1248 1248 velRadial = dataOut.data_param[:,3,:]
1249 1249 SNR = dataOut.data_SNR
1250 1250
1251 1251 zenith = numpy.array(zenith)
1252 1252 zenith -= zenithCorrection
1253 1253 zenith *= numpy.pi/180
1254 1254
1255 1255 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
1256 1256
1257 1257 alp = zenith[0]
1258 1258 bet = zenith[1]
1259 1259
1260 1260 w_w = velRadial1[0,:]
1261 1261 w_e = velRadial1[1,:]
1262 1262
1263 1263 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
1264 1264 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
1265 1265
1266 1266 winds = numpy.vstack((u,w))
1267 1267
1268 1268 dataOut.heightList = heiRang1
1269 1269 dataOut.data_output = winds
1270 1270 dataOut.data_SNR = SNR1
1271 1271
1272 1272 dataOut.utctimeInit = dataOut.utctime
1273 1273 dataOut.outputInterval = dataOut.timeInterval
1274 1274 return
1275 1275
1276 1276 #--------------- Non Specular Meteor ----------------
1277 1277
1278 1278 class NonSpecularMeteorDetection(Operation):
1279 1279
1280 1280 def run(self, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
1281 1281 data_acf = self.dataOut.data_pre[0]
1282 1282 data_ccf = self.dataOut.data_pre[1]
1283 1283
1284 1284 lamb = self.dataOut.C/self.dataOut.frequency
1285 1285 tSamp = self.dataOut.ippSeconds*self.dataOut.nCohInt
1286 1286 paramInterval = self.dataOut.paramInterval
1287 1287
1288 1288 nChannels = data_acf.shape[0]
1289 1289 nLags = data_acf.shape[1]
1290 1290 nProfiles = data_acf.shape[2]
1291 1291 nHeights = self.dataOut.nHeights
1292 1292 nCohInt = self.dataOut.nCohInt
1293 1293 sec = numpy.round(nProfiles/self.dataOut.paramInterval)
1294 1294 heightList = self.dataOut.heightList
1295 1295 ippSeconds = self.dataOut.ippSeconds*self.dataOut.nCohInt*self.dataOut.nAvg
1296 1296 utctime = self.dataOut.utctime
1297 1297
1298 1298 self.dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
1299 1299
1300 1300 #------------------------ SNR --------------------------------------
1301 1301 power = data_acf[:,0,:,:].real
1302 1302 noise = numpy.zeros(nChannels)
1303 1303 SNR = numpy.zeros(power.shape)
1304 1304 for i in range(nChannels):
1305 1305 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
1306 1306 SNR[i] = (power[i]-noise[i])/noise[i]
1307 1307 SNRm = numpy.nanmean(SNR, axis = 0)
1308 1308 SNRdB = 10*numpy.log10(SNR)
1309 1309
1310 1310 if mode == 'SA':
1311 1311 nPairs = data_ccf.shape[0]
1312 1312 #---------------------- Coherence and Phase --------------------------
1313 1313 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
1314 1314 # phase1 = numpy.copy(phase)
1315 1315 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
1316 1316
1317 1317 for p in range(nPairs):
1318 1318 ch0 = self.dataOut.groupList[p][0]
1319 1319 ch1 = self.dataOut.groupList[p][1]
1320 1320 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
1321 1321 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
1322 1322 # phase1[p,:,:] = numpy.angle(ccf) #median filter
1323 1323 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
1324 1324 # coh1[p,:,:] = numpy.abs(ccf) #median filter
1325 1325 coh = numpy.nanmax(coh1, axis = 0)
1326 1326 # struc = numpy.ones((5,1))
1327 1327 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
1328 1328 #---------------------- Radial Velocity ----------------------------
1329 1329 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
1330 1330 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
1331 1331
1332 1332 if allData:
1333 1333 boolMetFin = ~numpy.isnan(SNRm)
1334 1334 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
1335 1335 else:
1336 1336 #------------------------ Meteor mask ---------------------------------
1337 1337 # #SNR mask
1338 1338 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
1339 1339 #
1340 1340 # #Erase small objects
1341 1341 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
1342 1342 #
1343 1343 # auxEEJ = numpy.sum(boolMet1,axis=0)
1344 1344 # indOver = auxEEJ>nProfiles*0.8 #Use this later
1345 1345 # indEEJ = numpy.where(indOver)[0]
1346 1346 # indNEEJ = numpy.where(~indOver)[0]
1347 1347 #
1348 1348 # boolMetFin = boolMet1
1349 1349 #
1350 1350 # if indEEJ.size > 0:
1351 1351 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
1352 1352 #
1353 1353 # boolMet2 = coh > cohThresh
1354 1354 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
1355 1355 #
1356 1356 # #Final Meteor mask
1357 1357 # boolMetFin = boolMet1|boolMet2
1358 1358
1359 1359 #Coherence mask
1360 1360 boolMet1 = coh > 0.75
1361 1361 struc = numpy.ones((30,1))
1362 1362 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
1363 1363
1364 1364 #Derivative mask
1365 1365 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
1366 1366 boolMet2 = derPhase < 0.2
1367 1367 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
1368 1368 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
1369 1369 boolMet2 = ndimage.median_filter(boolMet2,size=5)
1370 1370 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
1371 1371 # #Final mask
1372 1372 # boolMetFin = boolMet2
1373 1373 boolMetFin = boolMet1&boolMet2
1374 1374 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
1375 1375 #Creating data_param
1376 1376 coordMet = numpy.where(boolMetFin)
1377 1377
1378 1378 tmet = coordMet[0]
1379 1379 hmet = coordMet[1]
1380 1380
1381 1381 data_param = numpy.zeros((tmet.size, 6 + nPairs))
1382 1382 data_param[:,0] = utctime
1383 1383 data_param[:,1] = tmet
1384 1384 data_param[:,2] = hmet
1385 1385 data_param[:,3] = SNRm[tmet,hmet]
1386 1386 data_param[:,4] = velRad[tmet,hmet]
1387 1387 data_param[:,5] = coh[tmet,hmet]
1388 1388 data_param[:,6:] = phase[:,tmet,hmet].T
1389 1389
1390 1390 elif mode == 'DBS':
1391 1391 self.dataOut.groupList = numpy.arange(nChannels)
1392 1392
1393 1393 #Radial Velocities
1394 1394 # phase = numpy.angle(data_acf[:,1,:,:])
1395 1395 phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
1396 1396 velRad = phase*lamb/(4*numpy.pi*tSamp)
1397 1397
1398 1398 #Spectral width
1399 1399 acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
1400 1400 acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
1401 1401
1402 1402 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
1403 1403 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
1404 1404 if allData:
1405 1405 boolMetFin = ~numpy.isnan(SNRdB)
1406 1406 else:
1407 1407 #SNR
1408 1408 boolMet1 = (SNRdB>SNRthresh) #SNR mask
1409 1409 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
1410 1410
1411 1411 #Radial velocity
1412 1412 boolMet2 = numpy.abs(velRad) < 30
1413 1413 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
1414 1414
1415 1415 #Spectral Width
1416 1416 boolMet3 = spcWidth < 30
1417 1417 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
1418 1418 # boolMetFin = self.__erase_small(boolMet1, 10,5)
1419 1419 boolMetFin = boolMet1&boolMet2&boolMet3
1420 1420
1421 1421 #Creating data_param
1422 1422 coordMet = numpy.where(boolMetFin)
1423 1423
1424 1424 cmet = coordMet[0]
1425 1425 tmet = coordMet[1]
1426 1426 hmet = coordMet[2]
1427 1427
1428 1428 data_param = numpy.zeros((tmet.size, 7))
1429 1429 data_param[:,0] = utctime
1430 1430 data_param[:,1] = cmet
1431 1431 data_param[:,2] = tmet
1432 1432 data_param[:,3] = hmet
1433 1433 data_param[:,4] = SNR[cmet,tmet,hmet].T
1434 1434 data_param[:,5] = velRad[cmet,tmet,hmet].T
1435 1435 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
1436 1436
1437 1437 # self.dataOut.data_param = data_int
1438 1438 if len(data_param) == 0:
1439 1439 self.dataOut.flagNoData = True
1440 1440 else:
1441 1441 self.dataOut.data_param = data_param
1442 1442
1443 1443 def __erase_small(self, binArray, threshX, threshY):
1444 1444 labarray, numfeat = ndimage.measurements.label(binArray)
1445 1445 binArray1 = numpy.copy(binArray)
1446 1446
1447 1447 for i in range(1,numfeat + 1):
1448 1448 auxBin = (labarray==i)
1449 1449 auxSize = auxBin.sum()
1450 1450
1451 1451 x,y = numpy.where(auxBin)
1452 1452 widthX = x.max() - x.min()
1453 1453 widthY = y.max() - y.min()
1454 1454
1455 1455 #width X: 3 seg -> 12.5*3
1456 1456 #width Y:
1457 1457
1458 1458 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
1459 1459 binArray1[auxBin] = False
1460 1460
1461 1461 return binArray1
1462 1462
1463 1463 #--------------- Specular Meteor ----------------
1464 1464
1465 1465 class SMDetection(Operation):
1466 1466 '''
1467 1467 Function DetectMeteors()
1468 1468 Project developed with paper:
1469 1469 HOLDSWORTH ET AL. 2004
1470 1470
1471 1471 Input:
1472 1472 self.dataOut.data_pre
1473 1473
1474 1474 centerReceiverIndex: From the channels, which is the center receiver
1475 1475
1476 1476 hei_ref: Height reference for the Beacon signal extraction
1477 1477 tauindex:
1478 1478 predefinedPhaseShifts: Predefined phase offset for the voltge signals
1479 1479
1480 1480 cohDetection: Whether to user Coherent detection or not
1481 1481 cohDet_timeStep: Coherent Detection calculation time step
1482 1482 cohDet_thresh: Coherent Detection phase threshold to correct phases
1483 1483
1484 1484 noise_timeStep: Noise calculation time step
1485 1485 noise_multiple: Noise multiple to define signal threshold
1486 1486
1487 1487 multDet_timeLimit: Multiple Detection Removal time limit in seconds
1488 1488 multDet_rangeLimit: Multiple Detection Removal range limit in km
1489 1489
1490 1490 phaseThresh: Maximum phase difference between receiver to be consider a meteor
1491 1491 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
1492 1492
1493 1493 hmin: Minimum Height of the meteor to use it in the further wind estimations
1494 1494 hmax: Maximum Height of the meteor to use it in the further wind estimations
1495 1495 azimuth: Azimuth angle correction
1496 1496
1497 1497 Affected:
1498 1498 self.dataOut.data_param
1499 1499
1500 1500 Rejection Criteria (Errors):
1501 1501 0: No error; analysis OK
1502 1502 1: SNR < SNR threshold
1503 1503 2: angle of arrival (AOA) ambiguously determined
1504 1504 3: AOA estimate not feasible
1505 1505 4: Large difference in AOAs obtained from different antenna baselines
1506 1506 5: echo at start or end of time series
1507 1507 6: echo less than 5 examples long; too short for analysis
1508 1508 7: echo rise exceeds 0.3s
1509 1509 8: echo decay time less than twice rise time
1510 1510 9: large power level before echo
1511 1511 10: large power level after echo
1512 1512 11: poor fit to amplitude for estimation of decay time
1513 1513 12: poor fit to CCF phase variation for estimation of radial drift velocity
1514 1514 13: height unresolvable echo: not valid height within 70 to 110 km
1515 1515 14: height ambiguous echo: more then one possible height within 70 to 110 km
1516 1516 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
1517 1517 16: oscilatory echo, indicating event most likely not an underdense echo
1518 1518
1519 1519 17: phase difference in meteor Reestimation
1520 1520
1521 1521 Data Storage:
1522 1522 Meteors for Wind Estimation (8):
1523 1523 Utc Time | Range Height
1524 1524 Azimuth Zenith errorCosDir
1525 1525 VelRad errorVelRad
1526 1526 Phase0 Phase1 Phase2 Phase3
1527 1527 TypeError
1528 1528
1529 1529 '''
1530 1530
1531 1531 def run(self, dataOut, hei_ref = None, tauindex = 0,
1532 1532 phaseOffsets = None,
1533 1533 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
1534 1534 noise_timeStep = 4, noise_multiple = 4,
1535 1535 multDet_timeLimit = 1, multDet_rangeLimit = 3,
1536 1536 phaseThresh = 20, SNRThresh = 5,
1537 1537 hmin = 50, hmax=150, azimuth = 0,
1538 1538 channelPositions = None) :
1539 1539
1540 1540
1541 1541 #Getting Pairslist
1542 1542 if channelPositions is None:
1543 1543 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
1544 1544 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
1545 1545 meteorOps = SMOperations()
1546 1546 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
1547 1547 heiRang = dataOut.getHeiRange()
1548 1548 #Get Beacon signal - No Beacon signal anymore
1549 1549 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
1550 1550 #
1551 1551 # if hei_ref != None:
1552 1552 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
1553 1553 #
1554 1554
1555 1555
1556 1556 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
1557 1557 # see if the user put in pre defined phase shifts
1558 1558 voltsPShift = dataOut.data_pre.copy()
1559 1559
1560 1560 # if predefinedPhaseShifts != None:
1561 1561 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
1562 1562 #
1563 1563 # # elif beaconPhaseShifts:
1564 1564 # # #get hardware phase shifts using beacon signal
1565 1565 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
1566 1566 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
1567 1567 #
1568 1568 # else:
1569 1569 # hardwarePhaseShifts = numpy.zeros(5)
1570 1570 #
1571 1571 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
1572 1572 # for i in range(self.dataOut.data_pre.shape[0]):
1573 1573 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
1574 1574
1575 1575 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
1576 1576
1577 1577 #Remove DC
1578 1578 voltsDC = numpy.mean(voltsPShift,1)
1579 1579 voltsDC = numpy.mean(voltsDC,1)
1580 1580 for i in range(voltsDC.shape[0]):
1581 1581 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
1582 1582
1583 1583 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
1584 1584 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
1585 1585
1586 1586 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
1587 1587 #Coherent Detection
1588 1588 if cohDetection:
1589 1589 #use coherent detection to get the net power
1590 1590 cohDet_thresh = cohDet_thresh*numpy.pi/180
1591 1591 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
1592 1592
1593 1593 #Non-coherent detection!
1594 1594 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
1595 1595 #********** END OF COH/NON-COH POWER CALCULATION**********************
1596 1596
1597 1597 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
1598 1598 #Get noise
1599 1599 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
1600 1600 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
1601 1601 #Get signal threshold
1602 1602 signalThresh = noise_multiple*noise
1603 1603 #Meteor echoes detection
1604 1604 listMeteors = self.__findMeteors(powerNet, signalThresh)
1605 1605 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
1606 1606
1607 1607 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
1608 1608 #Parameters
1609 1609 heiRange = dataOut.getHeiRange()
1610 1610 rangeInterval = heiRange[1] - heiRange[0]
1611 1611 rangeLimit = multDet_rangeLimit/rangeInterval
1612 1612 timeLimit = multDet_timeLimit/dataOut.timeInterval
1613 1613 #Multiple detection removals
1614 1614 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
1615 1615 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
1616 1616
1617 1617 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
1618 1618 #Parameters
1619 1619 phaseThresh = phaseThresh*numpy.pi/180
1620 1620 thresh = [phaseThresh, noise_multiple, SNRThresh]
1621 1621 #Meteor reestimation (Errors N 1, 6, 12, 17)
1622 1622 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
1623 1623 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
1624 1624 #Estimation of decay times (Errors N 7, 8, 11)
1625 1625 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
1626 1626 #******************* END OF METEOR REESTIMATION *******************
1627 1627
1628 1628 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
1629 1629 #Calculating Radial Velocity (Error N 15)
1630 1630 radialStdThresh = 10
1631 1631 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
1632 1632
1633 1633 if len(listMeteors4) > 0:
1634 1634 #Setting New Array
1635 1635 date = dataOut.utctime
1636 1636 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
1637 1637
1638 1638 #Correcting phase offset
1639 1639 if phaseOffsets != None:
1640 1640 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
1641 1641 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
1642 1642
1643 1643 #Second Pairslist
1644 1644 pairsList = []
1645 1645 pairx = (0,1)
1646 1646 pairy = (2,3)
1647 1647 pairsList.append(pairx)
1648 1648 pairsList.append(pairy)
1649 1649
1650 1650 jph = numpy.array([0,0,0,0])
1651 1651 h = (hmin,hmax)
1652 1652 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
1653 1653
1654 1654 # #Calculate AOA (Error N 3, 4)
1655 1655 # #JONES ET AL. 1998
1656 1656 # error = arrayParameters[:,-1]
1657 1657 # AOAthresh = numpy.pi/8
1658 1658 # phases = -arrayParameters[:,9:13]
1659 1659 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
1660 1660 #
1661 1661 # #Calculate Heights (Error N 13 and 14)
1662 1662 # error = arrayParameters[:,-1]
1663 1663 # Ranges = arrayParameters[:,2]
1664 1664 # zenith = arrayParameters[:,5]
1665 1665 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
1666 1666 # error = arrayParameters[:,-1]
1667 1667 #********************* END OF PARAMETERS CALCULATION **************************
1668 1668
1669 1669 #***************************+ PASS DATA TO NEXT STEP **********************
1670 1670 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
1671 1671 dataOut.data_param = arrayParameters
1672 1672
1673 1673 if arrayParameters is None:
1674 1674 dataOut.flagNoData = True
1675 1675 else:
1676 1676 dataOut.flagNoData = True
1677 1677
1678 1678 return
1679 1679
1680 1680 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
1681 1681
1682 1682 minIndex = min(newheis[0])
1683 1683 maxIndex = max(newheis[0])
1684 1684
1685 1685 voltage = voltage0[:,:,minIndex:maxIndex+1]
1686 1686 nLength = voltage.shape[1]/n
1687 1687 nMin = 0
1688 1688 nMax = 0
1689 1689 phaseOffset = numpy.zeros((len(pairslist),n))
1690 1690
1691 1691 for i in range(n):
1692 1692 nMax += nLength
1693 1693 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
1694 1694 phaseCCF = numpy.mean(phaseCCF, axis = 2)
1695 1695 phaseOffset[:,i] = phaseCCF.transpose()
1696 1696 nMin = nMax
1697 1697 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
1698 1698
1699 1699 #Remove Outliers
1700 1700 factor = 2
1701 1701 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
1702 1702 dw = numpy.std(wt,axis = 1)
1703 1703 dw = dw.reshape((dw.size,1))
1704 1704 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
1705 1705 phaseOffset[ind] = numpy.nan
1706 1706 phaseOffset = stats.nanmean(phaseOffset, axis=1)
1707 1707
1708 1708 return phaseOffset
1709 1709
1710 1710 def __shiftPhase(self, data, phaseShift):
1711 1711 #this will shift the phase of a complex number
1712 1712 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
1713 1713 return dataShifted
1714 1714
1715 1715 def __estimatePhaseDifference(self, array, pairslist):
1716 1716 nChannel = array.shape[0]
1717 1717 nHeights = array.shape[2]
1718 1718 numPairs = len(pairslist)
1719 1719 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
1720 1720 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
1721 1721
1722 1722 #Correct phases
1723 1723 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
1724 1724 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
1725 1725
1726 1726 if indDer[0].shape[0] > 0:
1727 1727 for i in range(indDer[0].shape[0]):
1728 1728 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
1729 1729 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
1730 1730
1731 1731 # for j in range(numSides):
1732 1732 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
1733 1733 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
1734 1734 #
1735 1735 #Linear
1736 1736 phaseInt = numpy.zeros((numPairs,1))
1737 1737 angAllCCF = phaseCCF[:,[0,1,3,4],0]
1738 1738 for j in range(numPairs):
1739 1739 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
1740 1740 phaseInt[j] = fit[1]
1741 1741 #Phase Differences
1742 1742 phaseDiff = phaseInt - phaseCCF[:,2,:]
1743 1743 phaseArrival = phaseInt.reshape(phaseInt.size)
1744 1744
1745 1745 #Dealias
1746 1746 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
1747 1747 # indAlias = numpy.where(phaseArrival > numpy.pi)
1748 1748 # phaseArrival[indAlias] -= 2*numpy.pi
1749 1749 # indAlias = numpy.where(phaseArrival < -numpy.pi)
1750 1750 # phaseArrival[indAlias] += 2*numpy.pi
1751 1751
1752 1752 return phaseDiff, phaseArrival
1753 1753
1754 1754 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
1755 1755 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
1756 1756 #find the phase shifts of each channel over 1 second intervals
1757 1757 #only look at ranges below the beacon signal
1758 1758 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
1759 1759 numBlocks = int(volts.shape[1]/numProfPerBlock)
1760 1760 numHeights = volts.shape[2]
1761 1761 nChannel = volts.shape[0]
1762 1762 voltsCohDet = volts.copy()
1763 1763
1764 1764 pairsarray = numpy.array(pairslist)
1765 1765 indSides = pairsarray[:,1]
1766 1766 # indSides = numpy.array(range(nChannel))
1767 1767 # indSides = numpy.delete(indSides, indCenter)
1768 1768 #
1769 1769 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
1770 1770 listBlocks = numpy.array_split(volts, numBlocks, 1)
1771 1771
1772 1772 startInd = 0
1773 1773 endInd = 0
1774 1774
1775 1775 for i in range(numBlocks):
1776 1776 startInd = endInd
1777 1777 endInd = endInd + listBlocks[i].shape[1]
1778 1778
1779 1779 arrayBlock = listBlocks[i]
1780 1780 # arrayBlockCenter = listCenter[i]
1781 1781
1782 1782 #Estimate the Phase Difference
1783 1783 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
1784 1784 #Phase Difference RMS
1785 1785 arrayPhaseRMS = numpy.abs(phaseDiff)
1786 1786 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
1787 1787 indPhase = numpy.where(phaseRMSaux==4)
1788 1788 #Shifting
1789 1789 if indPhase[0].shape[0] > 0:
1790 1790 for j in range(indSides.size):
1791 1791 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
1792 1792 voltsCohDet[:,startInd:endInd,:] = arrayBlock
1793 1793
1794 1794 return voltsCohDet
1795 1795
1796 1796 def __calculateCCF(self, volts, pairslist ,laglist):
1797 1797
1798 1798 nHeights = volts.shape[2]
1799 1799 nPoints = volts.shape[1]
1800 1800 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
1801 1801
1802 1802 for i in range(len(pairslist)):
1803 1803 volts1 = volts[pairslist[i][0]]
1804 1804 volts2 = volts[pairslist[i][1]]
1805 1805
1806 1806 for t in range(len(laglist)):
1807 1807 idxT = laglist[t]
1808 1808 if idxT >= 0:
1809 1809 vStacked = numpy.vstack((volts2[idxT:,:],
1810 1810 numpy.zeros((idxT, nHeights),dtype='complex')))
1811 1811 else:
1812 1812 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
1813 1813 volts2[:(nPoints + idxT),:]))
1814 1814 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
1815 1815
1816 1816 vStacked = None
1817 1817 return voltsCCF
1818 1818
1819 1819 def __getNoise(self, power, timeSegment, timeInterval):
1820 1820 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
1821 1821 numBlocks = int(power.shape[0]/numProfPerBlock)
1822 1822 numHeights = power.shape[1]
1823 1823
1824 1824 listPower = numpy.array_split(power, numBlocks, 0)
1825 1825 noise = numpy.zeros((power.shape[0], power.shape[1]))
1826 1826 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
1827 1827
1828 1828 startInd = 0
1829 1829 endInd = 0
1830 1830
1831 1831 for i in range(numBlocks): #split por canal
1832 1832 startInd = endInd
1833 1833 endInd = endInd + listPower[i].shape[0]
1834 1834
1835 1835 arrayBlock = listPower[i]
1836 1836 noiseAux = numpy.mean(arrayBlock, 0)
1837 1837 # noiseAux = numpy.median(noiseAux)
1838 1838 # noiseAux = numpy.mean(arrayBlock)
1839 1839 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
1840 1840
1841 1841 noiseAux1 = numpy.mean(arrayBlock)
1842 1842 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
1843 1843
1844 1844 return noise, noise1
1845 1845
1846 1846 def __findMeteors(self, power, thresh):
1847 1847 nProf = power.shape[0]
1848 1848 nHeights = power.shape[1]
1849 1849 listMeteors = []
1850 1850
1851 1851 for i in range(nHeights):
1852 1852 powerAux = power[:,i]
1853 1853 threshAux = thresh[:,i]
1854 1854
1855 1855 indUPthresh = numpy.where(powerAux > threshAux)[0]
1856 1856 indDNthresh = numpy.where(powerAux <= threshAux)[0]
1857 1857
1858 1858 j = 0
1859 1859
1860 1860 while (j < indUPthresh.size - 2):
1861 1861 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
1862 1862 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
1863 1863 indDNthresh = indDNthresh[indDNAux]
1864 1864
1865 1865 if (indDNthresh.size > 0):
1866 1866 indEnd = indDNthresh[0] - 1
1867 1867 indInit = indUPthresh[j] if isinstance(indUPthresh[j], (int, float)) else indUPthresh[j][0] ##CHECK!!!!
1868 1868
1869 1869 meteor = powerAux[indInit:indEnd + 1]
1870 1870 indPeak = meteor.argmax() + indInit
1871 1871 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
1872 1872
1873 1873 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
1874 1874 j = numpy.where(indUPthresh == indEnd)[0] + 1
1875 1875 else: j+=1
1876 1876 else: j+=1
1877 1877
1878 1878 return listMeteors
1879 1879
1880 1880 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
1881 1881
1882 1882 arrayMeteors = numpy.asarray(listMeteors)
1883 1883 listMeteors1 = []
1884 1884
1885 1885 while arrayMeteors.shape[0] > 0:
1886 1886 FLAs = arrayMeteors[:,4]
1887 1887 maxFLA = FLAs.argmax()
1888 1888 listMeteors1.append(arrayMeteors[maxFLA,:])
1889 1889
1890 1890 MeteorInitTime = arrayMeteors[maxFLA,1]
1891 1891 MeteorEndTime = arrayMeteors[maxFLA,3]
1892 1892 MeteorHeight = arrayMeteors[maxFLA,0]
1893 1893
1894 1894 #Check neighborhood
1895 1895 maxHeightIndex = MeteorHeight + rangeLimit
1896 1896 minHeightIndex = MeteorHeight - rangeLimit
1897 1897 minTimeIndex = MeteorInitTime - timeLimit
1898 1898 maxTimeIndex = MeteorEndTime + timeLimit
1899 1899
1900 1900 #Check Heights
1901 1901 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
1902 1902 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
1903 1903 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
1904 1904
1905 1905 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
1906 1906
1907 1907 return listMeteors1
1908 1908
1909 1909 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
1910 1910 numHeights = volts.shape[2]
1911 1911 nChannel = volts.shape[0]
1912 1912
1913 1913 thresholdPhase = thresh[0]
1914 1914 thresholdNoise = thresh[1]
1915 1915 thresholdDB = float(thresh[2])
1916 1916
1917 1917 thresholdDB1 = 10**(thresholdDB/10)
1918 1918 pairsarray = numpy.array(pairslist)
1919 1919 indSides = pairsarray[:,1]
1920 1920
1921 1921 pairslist1 = list(pairslist)
1922 1922 pairslist1.append((0,4))
1923 1923 pairslist1.append((1,3))
1924 1924
1925 1925 listMeteors1 = []
1926 1926 listPowerSeries = []
1927 1927 listVoltageSeries = []
1928 1928 #volts has the war data
1929 1929
1930 1930 if frequency == 30.175e6:
1931 1931 timeLag = 45*10**-3
1932 1932 else:
1933 1933 timeLag = 15*10**-3
1934 1934 lag = int(numpy.ceil(timeLag/timeInterval))
1935 1935
1936 1936 for i in range(len(listMeteors)):
1937 1937
1938 1938 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
1939 1939 meteorAux = numpy.zeros(16)
1940 1940
1941 1941 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
1942 1942 mHeight = int(listMeteors[i][0])
1943 1943 mStart = int(listMeteors[i][1])
1944 1944 mPeak = int(listMeteors[i][2])
1945 1945 mEnd = int(listMeteors[i][3])
1946 1946
1947 1947 #get the volt data between the start and end times of the meteor
1948 1948 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
1949 1949 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
1950 1950
1951 1951 #3.6. Phase Difference estimation
1952 1952 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
1953 1953
1954 1954 #3.7. Phase difference removal & meteor start, peak and end times reestimated
1955 1955 #meteorVolts0.- all Channels, all Profiles
1956 1956 meteorVolts0 = volts[:,:,mHeight]
1957 1957 meteorThresh = noise[:,mHeight]*thresholdNoise
1958 1958 meteorNoise = noise[:,mHeight]
1959 1959 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
1960 1960 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
1961 1961
1962 1962 #Times reestimation
1963 1963 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
1964 1964 if mStart1.size > 0:
1965 1965 mStart1 = mStart1[-1] + 1
1966 1966
1967 1967 else:
1968 1968 mStart1 = mPeak
1969 1969
1970 1970 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
1971 1971 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
1972 1972 if mEndDecayTime1.size == 0:
1973 1973 mEndDecayTime1 = powerNet0.size
1974 1974 else:
1975 1975 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
1976 1976 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
1977 1977
1978 1978 #meteorVolts1.- all Channels, from start to end
1979 1979 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
1980 1980 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
1981 1981 if meteorVolts2.shape[1] == 0:
1982 1982 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
1983 1983 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
1984 1984 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
1985 1985 ##################### END PARAMETERS REESTIMATION #########################
1986 1986
1987 1987 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
1988 1988 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
1989 1989 if meteorVolts2.shape[1] > 0:
1990 1990 #Phase Difference re-estimation
1991 1991 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
1992 1992 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
1993 1993 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
1994 1994 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
1995 1995 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
1996 1996
1997 1997 #Phase Difference RMS
1998 1998 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
1999 1999 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
2000 2000 #Data from Meteor
2001 2001 mPeak1 = powerNet1.argmax() + mStart1
2002 2002 mPeakPower1 = powerNet1.max()
2003 2003 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
2004 2004 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
2005 2005 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
2006 2006 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
2007 2007 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
2008 2008 #Vectorize
2009 2009 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
2010 2010 meteorAux[7:11] = phaseDiffint[0:4]
2011 2011
2012 2012 #Rejection Criterions
2013 2013 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
2014 2014 meteorAux[-1] = 17
2015 2015 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
2016 2016 meteorAux[-1] = 1
2017 2017
2018 2018
2019 2019 else:
2020 2020 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
2021 2021 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
2022 2022 PowerSeries = 0
2023 2023
2024 2024 listMeteors1.append(meteorAux)
2025 2025 listPowerSeries.append(PowerSeries)
2026 2026 listVoltageSeries.append(meteorVolts1)
2027 2027
2028 2028 return listMeteors1, listPowerSeries, listVoltageSeries
2029 2029
2030 2030 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
2031 2031
2032 2032 threshError = 10
2033 2033 #Depending if it is 30 or 50 MHz
2034 2034 if frequency == 30.175e6:
2035 2035 timeLag = 45*10**-3
2036 2036 else:
2037 2037 timeLag = 15*10**-3
2038 2038 lag = int(numpy.ceil(timeLag/timeInterval))
2039 2039
2040 2040 listMeteors1 = []
2041 2041
2042 2042 for i in range(len(listMeteors)):
2043 2043 meteorPower = listPower[i]
2044 2044 meteorAux = listMeteors[i]
2045 2045
2046 2046 if meteorAux[-1] == 0:
2047 2047
2048 2048 try:
2049 2049 indmax = meteorPower.argmax()
2050 2050 indlag = indmax + lag
2051 2051
2052 2052 y = meteorPower[indlag:]
2053 2053 x = numpy.arange(0, y.size)*timeLag
2054 2054
2055 2055 #first guess
2056 2056 a = y[0]
2057 2057 tau = timeLag
2058 2058 #exponential fit
2059 2059 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
2060 2060 y1 = self.__exponential_function(x, *popt)
2061 2061 #error estimation
2062 2062 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
2063 2063
2064 2064 decayTime = popt[1]
2065 2065 riseTime = indmax*timeInterval
2066 2066 meteorAux[11:13] = [decayTime, error]
2067 2067
2068 2068 #Table items 7, 8 and 11
2069 2069 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
2070 2070 meteorAux[-1] = 7
2071 2071 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
2072 2072 meteorAux[-1] = 8
2073 2073 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
2074 2074 meteorAux[-1] = 11
2075 2075
2076 2076
2077 2077 except:
2078 2078 meteorAux[-1] = 11
2079 2079
2080 2080
2081 2081 listMeteors1.append(meteorAux)
2082 2082
2083 2083 return listMeteors1
2084 2084
2085 2085 #Exponential Function
2086 2086
2087 2087 def __exponential_function(self, x, a, tau):
2088 2088 y = a*numpy.exp(-x/tau)
2089 2089 return y
2090 2090
2091 2091 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
2092 2092
2093 2093 pairslist1 = list(pairslist)
2094 2094 pairslist1.append((0,4))
2095 2095 pairslist1.append((1,3))
2096 2096 numPairs = len(pairslist1)
2097 2097 #Time Lag
2098 2098 timeLag = 45*10**-3
2099 2099 c = 3e8
2100 2100 lag = numpy.ceil(timeLag/timeInterval)
2101 2101 freq = 30.175e6
2102 2102
2103 2103 listMeteors1 = []
2104 2104
2105 2105 for i in range(len(listMeteors)):
2106 2106 meteorAux = listMeteors[i]
2107 2107 if meteorAux[-1] == 0:
2108 2108 mStart = listMeteors[i][1]
2109 2109 mPeak = listMeteors[i][2]
2110 2110 mLag = mPeak - mStart + lag
2111 2111
2112 2112 #get the volt data between the start and end times of the meteor
2113 2113 meteorVolts = listVolts[i]
2114 2114 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
2115 2115
2116 2116 #Get CCF
2117 2117 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
2118 2118
2119 2119 #Method 2
2120 2120 slopes = numpy.zeros(numPairs)
2121 2121 time = numpy.array([-2,-1,1,2])*timeInterval
2122 2122 angAllCCF = numpy.angle(allCCFs[:,[0,4,2,3],0])
2123 2123
2124 2124 #Correct phases
2125 2125 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
2126 2126 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2127 2127
2128 2128 if indDer[0].shape[0] > 0:
2129 2129 for i in range(indDer[0].shape[0]):
2130 2130 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
2131 2131 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
2132 2132
2133 2133 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
2134 2134 for j in range(numPairs):
2135 2135 fit = stats.linregress(time, angAllCCF[j,:])
2136 2136 slopes[j] = fit[0]
2137 2137
2138 2138 #Remove Outlier
2139 2139 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
2140 2140 # slopes = numpy.delete(slopes,indOut)
2141 2141 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
2142 2142 # slopes = numpy.delete(slopes,indOut)
2143 2143
2144 2144 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
2145 2145 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
2146 2146 meteorAux[-2] = radialError
2147 2147 meteorAux[-3] = radialVelocity
2148 2148
2149 2149 #Setting Error
2150 2150 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
2151 2151 if numpy.abs(radialVelocity) > 200:
2152 2152 meteorAux[-1] = 15
2153 2153 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
2154 2154 elif radialError > radialStdThresh:
2155 2155 meteorAux[-1] = 12
2156 2156
2157 2157 listMeteors1.append(meteorAux)
2158 2158 return listMeteors1
2159 2159
2160 2160 def __setNewArrays(self, listMeteors, date, heiRang):
2161 2161
2162 2162 #New arrays
2163 2163 arrayMeteors = numpy.array(listMeteors)
2164 2164 arrayParameters = numpy.zeros((len(listMeteors), 13))
2165 2165
2166 2166 #Date inclusion
2167 2167 # date = re.findall(r'\((.*?)\)', date)
2168 2168 # date = date[0].split(',')
2169 2169 # date = map(int, date)
2170 2170 #
2171 2171 # if len(date)<6:
2172 2172 # date.append(0)
2173 2173 #
2174 2174 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
2175 2175 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
2176 2176 arrayDate = numpy.tile(date, (len(listMeteors)))
2177 2177
2178 2178 #Meteor array
2179 2179 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
2180 2180 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
2181 2181
2182 2182 #Parameters Array
2183 2183 arrayParameters[:,0] = arrayDate #Date
2184 2184 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
2185 2185 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
2186 2186 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
2187 2187 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
2188 2188
2189 2189
2190 2190 return arrayParameters
2191 2191
2192 2192 class CorrectSMPhases(Operation):
2193 2193
2194 2194 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
2195 2195
2196 2196 arrayParameters = dataOut.data_param
2197 2197 pairsList = []
2198 2198 pairx = (0,1)
2199 2199 pairy = (2,3)
2200 2200 pairsList.append(pairx)
2201 2201 pairsList.append(pairy)
2202 2202 jph = numpy.zeros(4)
2203 2203
2204 2204 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2205 2205 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2206 2206 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
2207 2207
2208 2208 meteorOps = SMOperations()
2209 2209 if channelPositions is None:
2210 2210 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2211 2211 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2212 2212
2213 2213 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2214 2214 h = (hmin,hmax)
2215 2215
2216 2216 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2217 2217
2218 2218 dataOut.data_param = arrayParameters
2219 2219 return
2220 2220
2221 2221 class SMPhaseCalibration(Operation):
2222 2222
2223 2223 __buffer = None
2224 2224
2225 2225 __initime = None
2226 2226
2227 2227 __dataReady = False
2228 2228
2229 2229 __isConfig = False
2230 2230
2231 2231 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
2232 2232
2233 2233 dataTime = currentTime + paramInterval
2234 2234 deltaTime = dataTime - initTime
2235 2235
2236 2236 if deltaTime >= outputInterval or deltaTime < 0:
2237 2237 return True
2238 2238
2239 2239 return False
2240 2240
2241 2241 def __getGammas(self, pairs, d, phases):
2242 2242 gammas = numpy.zeros(2)
2243 2243
2244 2244 for i in range(len(pairs)):
2245 2245
2246 2246 pairi = pairs[i]
2247 2247
2248 2248 phip3 = phases[:,pairi[1]]
2249 2249 d3 = d[pairi[1]]
2250 2250 phip2 = phases[:,pairi[0]]
2251 2251 d2 = d[pairi[0]]
2252 2252 #Calculating gamma
2253 2253 # jdcos = alp1/(k*d1)
2254 2254 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
2255 2255 jgamma = -phip2*d3/d2 - phip3
2256 2256 jgamma = numpy.angle(numpy.exp(1j*jgamma))
2257 2257 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
2258 2258 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
2259 2259
2260 2260 #Revised distribution
2261 2261 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
2262 2262
2263 2263 #Histogram
2264 2264 nBins = 64.0
2265 2265 rmin = -0.5*numpy.pi
2266 2266 rmax = 0.5*numpy.pi
2267 2267 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
2268 2268
2269 2269 meteorsY = phaseHisto[0]
2270 2270 phasesX = phaseHisto[1][:-1]
2271 2271 width = phasesX[1] - phasesX[0]
2272 2272 phasesX += width/2
2273 2273
2274 2274 #Gaussian aproximation
2275 2275 bpeak = meteorsY.argmax()
2276 2276 peak = meteorsY.max()
2277 2277 jmin = bpeak - 5
2278 2278 jmax = bpeak + 5 + 1
2279 2279
2280 2280 if jmin<0:
2281 2281 jmin = 0
2282 2282 jmax = 6
2283 2283 elif jmax > meteorsY.size:
2284 2284 jmin = meteorsY.size - 6
2285 2285 jmax = meteorsY.size
2286 2286
2287 2287 x0 = numpy.array([peak,bpeak,50])
2288 2288 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
2289 2289
2290 2290 #Gammas
2291 2291 gammas[i] = coeff[0][1]
2292 2292
2293 2293 return gammas
2294 2294
2295 2295 def __residualFunction(self, coeffs, y, t):
2296 2296
2297 2297 return y - self.__gauss_function(t, coeffs)
2298 2298
2299 2299 def __gauss_function(self, t, coeffs):
2300 2300
2301 2301 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
2302 2302
2303 2303 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
2304 2304 meteorOps = SMOperations()
2305 2305 nchan = 4
2306 2306 pairx = pairsList[0]
2307 2307 pairy = pairsList[1]
2308 2308 center_xangle = 0
2309 2309 center_yangle = 0
2310 2310 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
2311 2311 ntimes = len(range_angle)
2312 2312
2313 2313 nstepsx = 20.0
2314 2314 nstepsy = 20.0
2315 2315
2316 2316 for iz in range(ntimes):
2317 2317 min_xangle = -range_angle[iz]/2 + center_xangle
2318 2318 max_xangle = range_angle[iz]/2 + center_xangle
2319 2319 min_yangle = -range_angle[iz]/2 + center_yangle
2320 2320 max_yangle = range_angle[iz]/2 + center_yangle
2321 2321
2322 2322 inc_x = (max_xangle-min_xangle)/nstepsx
2323 2323 inc_y = (max_yangle-min_yangle)/nstepsy
2324 2324
2325 2325 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
2326 2326 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
2327 2327 penalty = numpy.zeros((nstepsx,nstepsy))
2328 2328 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
2329 2329 jph = numpy.zeros(nchan)
2330 2330
2331 2331 # Iterations looking for the offset
2332 2332 for iy in range(int(nstepsy)):
2333 2333 for ix in range(int(nstepsx)):
2334 2334 jph[pairy[1]] = alpha_y[iy]
2335 2335 jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
2336 2336
2337 2337 jph[pairx[1]] = alpha_x[ix]
2338 2338 jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
2339 2339
2340 2340 jph_array[:,ix,iy] = jph
2341 2341
2342 2342 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
2343 2343 error = meteorsArray1[:,-1]
2344 2344 ind1 = numpy.where(error==0)[0]
2345 2345 penalty[ix,iy] = ind1.size
2346 2346
2347 2347 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
2348 2348 phOffset = jph_array[:,i,j]
2349 2349
2350 2350 center_xangle = phOffset[pairx[1]]
2351 2351 center_yangle = phOffset[pairy[1]]
2352 2352
2353 2353 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
2354 2354 phOffset = phOffset*180/numpy.pi
2355 2355 return phOffset
2356 2356
2357 2357
2358 2358 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
2359 2359
2360 2360 dataOut.flagNoData = True
2361 2361 self.__dataReady = False
2362 2362 dataOut.outputInterval = nHours*3600
2363 2363
2364 2364 if self.__isConfig == False:
2365 2365 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2366 2366 #Get Initial LTC time
2367 2367 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2368 2368 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2369 2369
2370 2370 self.__isConfig = True
2371 2371
2372 2372 if self.__buffer is None:
2373 2373 self.__buffer = dataOut.data_param.copy()
2374 2374
2375 2375 else:
2376 2376 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2377 2377
2378 2378 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2379 2379
2380 2380 if self.__dataReady:
2381 2381 dataOut.utctimeInit = self.__initime
2382 2382 self.__initime += dataOut.outputInterval #to erase time offset
2383 2383
2384 2384 freq = dataOut.frequency
2385 2385 c = dataOut.C #m/s
2386 2386 lamb = c/freq
2387 2387 k = 2*numpy.pi/lamb
2388 2388 azimuth = 0
2389 2389 h = (hmin, hmax)
2390 2390 pairs = ((0,1),(2,3))
2391 2391
2392 2392 if channelPositions is None:
2393 2393 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2394 2394 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2395 2395 meteorOps = SMOperations()
2396 2396 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2397 2397
2398 2398 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
2399 2399
2400 2400 meteorsArray = self.__buffer
2401 2401 error = meteorsArray[:,-1]
2402 2402 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
2403 2403 ind1 = numpy.where(boolError)[0]
2404 2404 meteorsArray = meteorsArray[ind1,:]
2405 2405 meteorsArray[:,-1] = 0
2406 2406 phases = meteorsArray[:,8:12]
2407 2407
2408 2408 #Calculate Gammas
2409 2409 gammas = self.__getGammas(pairs, distances, phases)
2410 2410 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
2411 2411 #Calculate Phases
2412 2412 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
2413 2413 phasesOff = phasesOff.reshape((1,phasesOff.size))
2414 2414 dataOut.data_output = -phasesOff
2415 2415 dataOut.flagNoData = False
2416 2416 dataOut.channelList = pairslist0
2417 2417 self.__buffer = None
2418 2418
2419 2419
2420 2420 return
2421 2421
2422 2422 class SMOperations():
2423 2423
2424 2424 def __init__(self):
2425 2425
2426 2426 return
2427 2427
2428 2428 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
2429 2429
2430 2430 arrayParameters = arrayParameters0.copy()
2431 2431 hmin = h[0]
2432 2432 hmax = h[1]
2433 2433
2434 2434 #Calculate AOA (Error N 3, 4)
2435 2435 #JONES ET AL. 1998
2436 2436 AOAthresh = numpy.pi/8
2437 2437 error = arrayParameters[:,-1]
2438 2438 phases = -arrayParameters[:,8:12] + jph
2439 2439 # phases = numpy.unwrap(phases)
2440 2440 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
2441 2441
2442 2442 #Calculate Heights (Error N 13 and 14)
2443 2443 error = arrayParameters[:,-1]
2444 2444 Ranges = arrayParameters[:,1]
2445 2445 zenith = arrayParameters[:,4]
2446 2446 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
2447 2447
2448 2448 #----------------------- Get Final data ------------------------------------
2449 2449 # error = arrayParameters[:,-1]
2450 2450 # ind1 = numpy.where(error==0)[0]
2451 2451 # arrayParameters = arrayParameters[ind1,:]
2452 2452
2453 2453 return arrayParameters
2454 2454
2455 2455 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
2456 2456
2457 2457 arrayAOA = numpy.zeros((phases.shape[0],3))
2458 2458 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
2459 2459
2460 2460 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
2461 2461 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
2462 2462 arrayAOA[:,2] = cosDirError
2463 2463
2464 2464 azimuthAngle = arrayAOA[:,0]
2465 2465 zenithAngle = arrayAOA[:,1]
2466 2466
2467 2467 #Setting Error
2468 2468 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
2469 2469 error[indError] = 0
2470 2470 #Number 3: AOA not fesible
2471 2471 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
2472 2472 error[indInvalid] = 3
2473 2473 #Number 4: Large difference in AOAs obtained from different antenna baselines
2474 2474 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
2475 2475 error[indInvalid] = 4
2476 2476 return arrayAOA, error
2477 2477
2478 2478 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
2479 2479
2480 2480 #Initializing some variables
2481 2481 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
2482 2482 ang_aux = ang_aux.reshape(1,ang_aux.size)
2483 2483
2484 2484 cosdir = numpy.zeros((arrayPhase.shape[0],2))
2485 2485 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
2486 2486
2487 2487
2488 2488 for i in range(2):
2489 2489 ph0 = arrayPhase[:,pairsList[i][0]]
2490 2490 ph1 = arrayPhase[:,pairsList[i][1]]
2491 2491 d0 = distances[pairsList[i][0]]
2492 2492 d1 = distances[pairsList[i][1]]
2493 2493
2494 2494 ph0_aux = ph0 + ph1
2495 2495 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
2496 2496 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
2497 2497 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
2498 2498 #First Estimation
2499 2499 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
2500 2500
2501 2501 #Most-Accurate Second Estimation
2502 2502 phi1_aux = ph0 - ph1
2503 2503 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
2504 2504 #Direction Cosine 1
2505 2505 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
2506 2506
2507 2507 #Searching the correct Direction Cosine
2508 2508 cosdir0_aux = cosdir0[:,i]
2509 2509 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
2510 2510 #Minimum Distance
2511 2511 cosDiff = (cosdir1 - cosdir0_aux)**2
2512 2512 indcos = cosDiff.argmin(axis = 1)
2513 2513 #Saving Value obtained
2514 2514 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
2515 2515
2516 2516 return cosdir0, cosdir
2517 2517
2518 2518 def __calculateAOA(self, cosdir, azimuth):
2519 2519 cosdirX = cosdir[:,0]
2520 2520 cosdirY = cosdir[:,1]
2521 2521
2522 2522 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
2523 2523 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
2524 2524 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
2525 2525
2526 2526 return angles
2527 2527
2528 2528 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
2529 2529
2530 2530 Ramb = 375 #Ramb = c/(2*PRF)
2531 2531 Re = 6371 #Earth Radius
2532 2532 heights = numpy.zeros(Ranges.shape)
2533 2533
2534 2534 R_aux = numpy.array([0,1,2])*Ramb
2535 2535 R_aux = R_aux.reshape(1,R_aux.size)
2536 2536
2537 2537 Ranges = Ranges.reshape(Ranges.size,1)
2538 2538
2539 2539 Ri = Ranges + R_aux
2540 2540 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
2541 2541
2542 2542 #Check if there is a height between 70 and 110 km
2543 2543 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
2544 2544 ind_h = numpy.where(h_bool == 1)[0]
2545 2545
2546 2546 hCorr = hi[ind_h, :]
2547 2547 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
2548 2548
2549 2549 hCorr = hi[ind_hCorr]
2550 2550 heights[ind_h] = hCorr
2551 2551
2552 2552 #Setting Error
2553 2553 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
2554 2554 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
2555 2555 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
2556 2556 error[indError] = 0
2557 2557 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
2558 2558 error[indInvalid2] = 14
2559 2559 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
2560 2560 error[indInvalid1] = 13
2561 2561
2562 2562 return heights, error
2563 2563
2564 2564 def getPhasePairs(self, channelPositions):
2565 2565 chanPos = numpy.array(channelPositions)
2566 2566 listOper = list(itertools.combinations(range(5),2))
2567 2567
2568 2568 distances = numpy.zeros(4)
2569 2569 axisX = []
2570 2570 axisY = []
2571 2571 distX = numpy.zeros(3)
2572 2572 distY = numpy.zeros(3)
2573 2573 ix = 0
2574 2574 iy = 0
2575 2575
2576 2576 pairX = numpy.zeros((2,2))
2577 2577 pairY = numpy.zeros((2,2))
2578 2578
2579 2579 for i in range(len(listOper)):
2580 2580 pairi = listOper[i]
2581 2581
2582 2582 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
2583 2583
2584 2584 if posDif[0] == 0:
2585 2585 axisY.append(pairi)
2586 2586 distY[iy] = posDif[1]
2587 2587 iy += 1
2588 2588 elif posDif[1] == 0:
2589 2589 axisX.append(pairi)
2590 2590 distX[ix] = posDif[0]
2591 2591 ix += 1
2592 2592
2593 2593 for i in range(2):
2594 2594 if i==0:
2595 2595 dist0 = distX
2596 2596 axis0 = axisX
2597 2597 else:
2598 2598 dist0 = distY
2599 2599 axis0 = axisY
2600 2600
2601 2601 side = numpy.argsort(dist0)[:-1]
2602 2602 axis0 = numpy.array(axis0)[side,:]
2603 2603 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
2604 2604 axis1 = numpy.unique(numpy.reshape(axis0,4))
2605 2605 side = axis1[axis1 != chanC]
2606 2606 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
2607 2607 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
2608 2608 if diff1<0:
2609 2609 chan2 = side[0]
2610 2610 d2 = numpy.abs(diff1)
2611 2611 chan1 = side[1]
2612 2612 d1 = numpy.abs(diff2)
2613 2613 else:
2614 2614 chan2 = side[1]
2615 2615 d2 = numpy.abs(diff2)
2616 2616 chan1 = side[0]
2617 2617 d1 = numpy.abs(diff1)
2618 2618
2619 2619 if i==0:
2620 2620 chanCX = chanC
2621 2621 chan1X = chan1
2622 2622 chan2X = chan2
2623 2623 distances[0:2] = numpy.array([d1,d2])
2624 2624 else:
2625 2625 chanCY = chanC
2626 2626 chan1Y = chan1
2627 2627 chan2Y = chan2
2628 2628 distances[2:4] = numpy.array([d1,d2])
2629 2629 # axisXsides = numpy.reshape(axisX[ix,:],4)
2630 2630 #
2631 2631 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
2632 2632 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
2633 2633 #
2634 2634 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
2635 2635 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
2636 2636 # channel25X = int(pairX[0,ind25X])
2637 2637 # channel20X = int(pairX[1,ind20X])
2638 2638 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
2639 2639 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
2640 2640 # channel25Y = int(pairY[0,ind25Y])
2641 2641 # channel20Y = int(pairY[1,ind20Y])
2642 2642
2643 2643 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
2644 2644 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
2645 2645
2646 2646 return pairslist, distances
2647 2647 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
2648 2648 #
2649 2649 # arrayAOA = numpy.zeros((phases.shape[0],3))
2650 2650 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
2651 2651 #
2652 2652 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
2653 2653 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
2654 2654 # arrayAOA[:,2] = cosDirError
2655 2655 #
2656 2656 # azimuthAngle = arrayAOA[:,0]
2657 2657 # zenithAngle = arrayAOA[:,1]
2658 2658 #
2659 2659 # #Setting Error
2660 2660 # #Number 3: AOA not fesible
2661 2661 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
2662 2662 # error[indInvalid] = 3
2663 2663 # #Number 4: Large difference in AOAs obtained from different antenna baselines
2664 2664 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
2665 2665 # error[indInvalid] = 4
2666 2666 # return arrayAOA, error
2667 2667 #
2668 2668 # def __getDirectionCosines(self, arrayPhase, pairsList):
2669 2669 #
2670 2670 # #Initializing some variables
2671 2671 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
2672 2672 # ang_aux = ang_aux.reshape(1,ang_aux.size)
2673 2673 #
2674 2674 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
2675 2675 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
2676 2676 #
2677 2677 #
2678 2678 # for i in range(2):
2679 2679 # #First Estimation
2680 2680 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
2681 2681 # #Dealias
2682 2682 # indcsi = numpy.where(phi0_aux > numpy.pi)
2683 2683 # phi0_aux[indcsi] -= 2*numpy.pi
2684 2684 # indcsi = numpy.where(phi0_aux < -numpy.pi)
2685 2685 # phi0_aux[indcsi] += 2*numpy.pi
2686 2686 # #Direction Cosine 0
2687 2687 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
2688 2688 #
2689 2689 # #Most-Accurate Second Estimation
2690 2690 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
2691 2691 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
2692 2692 # #Direction Cosine 1
2693 2693 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
2694 2694 #
2695 2695 # #Searching the correct Direction Cosine
2696 2696 # cosdir0_aux = cosdir0[:,i]
2697 2697 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
2698 2698 # #Minimum Distance
2699 2699 # cosDiff = (cosdir1 - cosdir0_aux)**2
2700 2700 # indcos = cosDiff.argmin(axis = 1)
2701 2701 # #Saving Value obtained
2702 2702 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
2703 2703 #
2704 2704 # return cosdir0, cosdir
2705 2705 #
2706 2706 # def __calculateAOA(self, cosdir, azimuth):
2707 2707 # cosdirX = cosdir[:,0]
2708 2708 # cosdirY = cosdir[:,1]
2709 2709 #
2710 2710 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
2711 2711 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
2712 2712 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
2713 2713 #
2714 2714 # return angles
2715 2715 #
2716 2716 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
2717 2717 #
2718 2718 # Ramb = 375 #Ramb = c/(2*PRF)
2719 2719 # Re = 6371 #Earth Radius
2720 2720 # heights = numpy.zeros(Ranges.shape)
2721 2721 #
2722 2722 # R_aux = numpy.array([0,1,2])*Ramb
2723 2723 # R_aux = R_aux.reshape(1,R_aux.size)
2724 2724 #
2725 2725 # Ranges = Ranges.reshape(Ranges.size,1)
2726 2726 #
2727 2727 # Ri = Ranges + R_aux
2728 2728 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
2729 2729 #
2730 2730 # #Check if there is a height between 70 and 110 km
2731 2731 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
2732 2732 # ind_h = numpy.where(h_bool == 1)[0]
2733 2733 #
2734 2734 # hCorr = hi[ind_h, :]
2735 2735 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
2736 2736 #
2737 2737 # hCorr = hi[ind_hCorr]
2738 2738 # heights[ind_h] = hCorr
2739 2739 #
2740 2740 # #Setting Error
2741 2741 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
2742 2742 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
2743 2743 #
2744 2744 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
2745 2745 # error[indInvalid2] = 14
2746 2746 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
2747 2747 # error[indInvalid1] = 13
2748 2748 #
2749 2749 # return heights, error
@@ -1,457 +1,502
1 1 '''
2 2 @author: Juan C. Espinoza
3 3 '''
4 4
5 5 import time
6 6 import json
7 7 import numpy
8 8 import paho.mqtt.client as mqtt
9 9 import zmq
10 10 from profilehooks import profile
11 11 import datetime
12 12 from zmq.utils.monitor import recv_monitor_message
13 13 from functools import wraps
14 14 from threading import Thread
15 15 from multiprocessing import Process
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit
18 from schainpy.model.data.jrodata import JROData
18 19
19 20 MAXNUMX = 100
20 21 MAXNUMY = 100
21 22
22 23 class PrettyFloat(float):
23 24 def __repr__(self):
24 25 return '%.2f' % self
25 26
26 27 def roundFloats(obj):
27 28 if isinstance(obj, list):
28 29 return map(roundFloats, obj)
29 30 elif isinstance(obj, float):
30 31 return round(obj, 2)
31 32
32 33 def decimate(z, MAXNUMY):
33 34 # dx = int(len(self.x)/self.__MAXNUMX) + 1
34 35
35 36 dy = int(len(z[0])/MAXNUMY) + 1
36 37
37 38 return z[::, ::dy]
38 39
39 40 class throttle(object):
40 41 """Decorator that prevents a function from being called more than once every
41 42 time period.
42 43 To create a function that cannot be called more than once a minute, but
43 44 will sleep until it can be called:
44 45 @throttle(minutes=1)
45 46 def foo():
46 47 pass
47 48
48 49 for i in range(10):
49 50 foo()
50 51 print "This function has run %s times." % i
51 52 """
52 53
53 54 def __init__(self, seconds=0, minutes=0, hours=0):
54 55 self.throttle_period = datetime.timedelta(
55 56 seconds=seconds, minutes=minutes, hours=hours
56 57 )
57 58
58 59 self.time_of_last_call = datetime.datetime.min
59 60
60 61 def __call__(self, fn):
61 62 @wraps(fn)
62 63 def wrapper(*args, **kwargs):
63 64 now = datetime.datetime.now()
64 65 time_since_last_call = now - self.time_of_last_call
65 66 time_left = self.throttle_period - time_since_last_call
66 67
67 68 if time_left > datetime.timedelta(seconds=0):
68 69 return
69 70
70 71 self.time_of_last_call = datetime.datetime.now()
71 72 return fn(*args, **kwargs)
72 73
73 74 return wrapper
74 75
75 76
76 77 class PublishData(Operation):
77 78 """Clase publish."""
78 79
79 80 def __init__(self, **kwargs):
80 81 """Inicio."""
81 82 Operation.__init__(self, **kwargs)
82 83 self.isConfig = False
83 84 self.client = None
84 85 self.zeromq = None
85 86 self.mqtt = None
86 87
87 88 def on_disconnect(self, client, userdata, rc):
88 89 if rc != 0:
89 90 print("Unexpected disconnection.")
90 91 self.connect()
91 92
92 93 def connect(self):
93 94 print 'trying to connect'
94 95 try:
95 96 self.client.connect(
96 97 host=self.host,
97 98 port=self.port,
98 99 keepalive=60*10,
99 100 bind_address='')
100 101 self.client.loop_start()
101 102 # self.client.publish(
102 103 # self.topic + 'SETUP',
103 104 # json.dumps(setup),
104 105 # retain=True
105 106 # )
106 107 except:
107 108 print "MQTT Conection error."
108 109 self.client = False
109 110
110 111 def setup(self, port=1883, username=None, password=None, clientId="user", zeromq=1, verbose=True, **kwargs):
111 112 self.counter = 0
112 113 self.topic = kwargs.get('topic', 'schain')
113 114 self.delay = kwargs.get('delay', 0)
114 115 self.plottype = kwargs.get('plottype', 'spectra')
115 116 self.host = kwargs.get('host', "10.10.10.82")
116 117 self.port = kwargs.get('port', 3000)
117 118 self.clientId = clientId
118 119 self.cnt = 0
119 120 self.zeromq = zeromq
120 121 self.mqtt = kwargs.get('plottype', 0)
121 122 self.client = None
122 123 self.verbose = verbose
123 124 self.dataOut.firstdata = True
124 125 setup = []
125 126 if mqtt is 1:
126 127 self.client = mqtt.Client(
127 128 client_id=self.clientId + self.topic + 'SCHAIN',
128 129 clean_session=True)
129 130 self.client.on_disconnect = self.on_disconnect
130 131 self.connect()
131 132 for plot in self.plottype:
132 133 setup.append({
133 134 'plot': plot,
134 135 'topic': self.topic + plot,
135 136 'title': getattr(self, plot + '_' + 'title', False),
136 137 'xlabel': getattr(self, plot + '_' + 'xlabel', False),
137 138 'ylabel': getattr(self, plot + '_' + 'ylabel', False),
138 139 'xrange': getattr(self, plot + '_' + 'xrange', False),
139 140 'yrange': getattr(self, plot + '_' + 'yrange', False),
140 141 'zrange': getattr(self, plot + '_' + 'zrange', False),
141 142 })
142 143 if zeromq is 1:
143 144 context = zmq.Context()
144 145 self.zmq_socket = context.socket(zmq.PUSH)
145 146 server = kwargs.get('server', 'zmq.pipe')
146 147
147 148 if 'tcp://' in server:
148 149 address = server
149 150 else:
150 151 address = 'ipc:///tmp/%s' % server
151 152
152 153 self.zmq_socket.connect(address)
153 154 time.sleep(1)
154 155
155 156
156 157 def publish_data(self):
157 158 self.dataOut.finished = False
158 159 if self.mqtt is 1:
159 160 yData = self.dataOut.heightList[:2].tolist()
160 161 if self.plottype == 'spectra':
161 162 data = getattr(self.dataOut, 'data_spc')
162 163 z = data/self.dataOut.normFactor
163 164 zdB = 10*numpy.log10(z)
164 165 xlen, ylen = zdB[0].shape
165 166 dx = int(xlen/MAXNUMX) + 1
166 167 dy = int(ylen/MAXNUMY) + 1
167 168 Z = [0 for i in self.dataOut.channelList]
168 169 for i in self.dataOut.channelList:
169 170 Z[i] = zdB[i][::dx, ::dy].tolist()
170 171 payload = {
171 172 'timestamp': self.dataOut.utctime,
172 173 'data': roundFloats(Z),
173 174 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
174 175 'interval': self.dataOut.getTimeInterval(),
175 176 'type': self.plottype,
176 177 'yData': yData
177 178 }
178 179 # print payload
179 180
180 181 elif self.plottype in ('rti', 'power'):
181 182 data = getattr(self.dataOut, 'data_spc')
182 183 z = data/self.dataOut.normFactor
183 184 avg = numpy.average(z, axis=1)
184 185 avgdB = 10*numpy.log10(avg)
185 186 xlen, ylen = z[0].shape
186 187 dy = numpy.floor(ylen/self.__MAXNUMY) + 1
187 188 AVG = [0 for i in self.dataOut.channelList]
188 189 for i in self.dataOut.channelList:
189 190 AVG[i] = avgdB[i][::dy].tolist()
190 191 payload = {
191 192 'timestamp': self.dataOut.utctime,
192 193 'data': roundFloats(AVG),
193 194 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
194 195 'interval': self.dataOut.getTimeInterval(),
195 196 'type': self.plottype,
196 197 'yData': yData
197 198 }
198 199 elif self.plottype == 'noise':
199 200 noise = self.dataOut.getNoise()/self.dataOut.normFactor
200 201 noisedB = 10*numpy.log10(noise)
201 202 payload = {
202 203 'timestamp': self.dataOut.utctime,
203 204 'data': roundFloats(noisedB.reshape(-1, 1).tolist()),
204 205 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
205 206 'interval': self.dataOut.getTimeInterval(),
206 207 'type': self.plottype,
207 208 'yData': yData
208 209 }
209 210 elif self.plottype == 'snr':
210 211 data = getattr(self.dataOut, 'data_SNR')
211 212 avgdB = 10*numpy.log10(data)
212 213
213 214 ylen = data[0].size
214 215 dy = numpy.floor(ylen/self.__MAXNUMY) + 1
215 216 AVG = [0 for i in self.dataOut.channelList]
216 217 for i in self.dataOut.channelList:
217 218 AVG[i] = avgdB[i][::dy].tolist()
218 219 payload = {
219 220 'timestamp': self.dataOut.utctime,
220 221 'data': roundFloats(AVG),
221 222 'channels': ['Ch %s' % ch for ch in self.dataOut.channelList],
222 223 'type': self.plottype,
223 224 'yData': yData
224 225 }
225 226 else:
226 227 print "Tipo de grafico invalido"
227 228 payload = {
228 229 'data': 'None',
229 230 'timestamp': 'None',
230 231 'type': None
231 232 }
232 233 # print 'Publishing data to {}'.format(self.host)
233 234 self.client.publish(self.topic + self.plottype, json.dumps(payload), qos=0)
234 235
235 236 if self.zeromq is 1:
236 237 if self.verbose:
237 238 print '[Sending] {} - {}'.format(self.dataOut.type, self.dataOut.datatime)
238 239 self.zmq_socket.send_pyobj(self.dataOut)
239 240 self.dataOut.firstdata = False
240 241
241 242
242 243 def run(self, dataOut, **kwargs):
243 244 self.dataOut = dataOut
244 245 if not self.isConfig:
245 246 self.setup(**kwargs)
246 247 self.isConfig = True
247 248
248 249 self.publish_data()
249 250 time.sleep(self.delay)
250 251
251 252 def close(self):
252 253 if self.zeromq is 1:
253 254 self.dataOut.finished = True
254 255 self.zmq_socket.send_pyobj(self.dataOut)
255 256 self.zmq_socket.close()
256 257 if self.client:
257 258 self.client.loop_stop()
258 259 self.client.disconnect()
259 260
260 class ReceiverData(ProcessingUnit, Process):
261
262 class ReceiverData(ProcessingUnit):
263
264 def __init__(self, **kwargs):
265
266 ProcessingUnit.__init__(self, **kwargs)
267
268 self.isConfig = False
269 server = kwargs.get('server', 'zmq.pipe')
270 if 'tcp://' in server:
271 address = server
272 else:
273 address = 'ipc:///tmp/%s' % server
274
275 self.address = address
276 self.dataOut = JROData()
277
278 def setup(self):
279
280 self.context = zmq.Context()
281 self.receiver = self.context.socket(zmq.PULL)
282 self.receiver.bind(self.address)
283 time.sleep(0.5)
284 print '[Starting] ReceiverData from {}'.format(self.address)
285
286
287 def run(self):
288
289 if not self.isConfig:
290 self.setup()
291 self.isConfig = True
292
293 self.dataOut = self.receiver.recv_pyobj()
294 print '[Receiving] {} - {}'.format(self.dataOut.type,
295 self.dataOut.datatime.ctime())
296
297
298 class PlotterReceiver(ProcessingUnit, Process):
261 299
262 300 throttle_value = 5
263 301
264 302 def __init__(self, **kwargs):
265 303
266 304 ProcessingUnit.__init__(self, **kwargs)
267 305 Process.__init__(self)
268 306 self.mp = False
269 307 self.isConfig = False
270 308 self.isWebConfig = False
271 309 self.plottypes =[]
272 310 self.connections = 0
273 311 server = kwargs.get('server', 'zmq.pipe')
274 312 plot_server = kwargs.get('plot_server', 'zmq.web')
275 313 if 'tcp://' in server:
276 314 address = server
277 315 else:
278 316 address = 'ipc:///tmp/%s' % server
279 317
280 318 if 'tcp://' in plot_server:
281 319 plot_address = plot_server
282 320 else:
283 321 plot_address = 'ipc:///tmp/%s' % plot_server
284 322
285 323 self.address = address
286 324 self.plot_address = plot_address
287 325 self.plottypes = [s.strip() for s in kwargs.get('plottypes', 'rti').split(',')]
288 326 self.realtime = kwargs.get('realtime', False)
289 327 self.throttle_value = kwargs.get('throttle', 5)
290 328 self.sendData = self.initThrottle(self.throttle_value)
291 329 self.setup()
292 330
293 331 def setup(self):
294 332
295 333 self.data = {}
296 334 self.data['times'] = []
297 335 for plottype in self.plottypes:
298 336 self.data[plottype] = {}
299 337 self.data['noise'] = {}
300 338 self.data['throttle'] = self.throttle_value
301 339 self.data['ENDED'] = False
302 340 self.isConfig = True
303 341 self.data_web = {}
304 342
305 343 def event_monitor(self, monitor):
306 344
307 345 events = {}
308 346
309 347 for name in dir(zmq):
310 348 if name.startswith('EVENT_'):
311 349 value = getattr(zmq, name)
312 350 events[value] = name
313 351
314 352 while monitor.poll():
315 353 evt = recv_monitor_message(monitor)
316 354 if evt['event'] == 32:
317 355 self.connections += 1
318 356 if evt['event'] == 512:
319 357 pass
320 358 if self.connections == 0 and self.started is True:
321 359 self.ended = True
322 360
323 361 evt.update({'description': events[evt['event']]})
324 362
325 363 if evt['event'] == zmq.EVENT_MONITOR_STOPPED:
326 364 break
327 365 monitor.close()
328 366 print("event monitor thread done!")
329 367
330 368 def initThrottle(self, throttle_value):
331 369
332 370 @throttle(seconds=throttle_value)
333 371 def sendDataThrottled(fn_sender, data):
334 372 fn_sender(data)
335 373
336 374 return sendDataThrottled
337 375
338 376
339 377 def send(self, data):
340 378 # print '[sending] data=%s size=%s' % (data.keys(), len(data['times']))
341 379 self.sender.send_pyobj(data)
342 380
343 381
344 382 def update(self):
345 383 t = self.dataOut.utctime
346 384
347 385 if t in self.data['times']:
348 386 return
349 387
350 388 self.data['times'].append(t)
351 389 self.data['dataOut'] = self.dataOut
352 390
353 391 for plottype in self.plottypes:
354 392 if plottype == 'spc':
355 393 z = self.dataOut.data_spc/self.dataOut.normFactor
356 394 self.data[plottype] = 10*numpy.log10(z)
357 395 self.data['noise'][t] = 10*numpy.log10(self.dataOut.getNoise()/self.dataOut.normFactor)
358 396 if plottype == 'cspc':
359 397 jcoherence = self.dataOut.data_cspc/numpy.sqrt(self.dataOut.data_spc*self.dataOut.data_spc)
360 398 self.data['cspc_coh'] = numpy.abs(jcoherence)
361 399 self.data['cspc_phase'] = numpy.arctan2(jcoherence.imag, jcoherence.real)*180/numpy.pi
362 400 if plottype == 'rti':
363 401 self.data[plottype][t] = self.dataOut.getPower()
364 402 if plottype == 'snr':
365 403 self.data[plottype][t] = 10*numpy.log10(self.dataOut.data_SNR)
366 404 if plottype == 'dop':
367 405 self.data[plottype][t] = 10*numpy.log10(self.dataOut.data_DOP)
368 406 if plottype == 'mean':
369 407 self.data[plottype][t] = self.dataOut.data_MEAN
370 408 if plottype == 'std':
371 409 self.data[plottype][t] = self.dataOut.data_STD
372 410 if plottype == 'coh':
373 411 self.data[plottype][t] = self.dataOut.getCoherence()
374 412 if plottype == 'phase':
375 413 self.data[plottype][t] = self.dataOut.getCoherence(phase=True)
376 if plottype == 'wind':
414 if plottype == 'output':
377 415 self.data[plottype][t] = self.dataOut.data_output
416 if plottype == 'param':
417 self.data[plottype][t] = self.dataOut.data_param
378 418 if self.realtime:
379 419 self.data_web['timestamp'] = t
380 420 if plottype == 'spc':
381 421 self.data_web[plottype] = roundFloats(decimate(self.data[plottype]).tolist())
382 422 elif plottype == 'cspc':
383 423 self.data_web['cspc_coh'] = roundFloats(decimate(self.data['cspc_coh']).tolist())
384 424 self.data_web['cspc_phase'] = roundFloats(decimate(self.data['cspc_phase']).tolist())
385 425 elif plottype == 'noise':
386 426 self.data_web['noise'] = roundFloats(self.data['noise'][t].tolist())
387 427 else:
388 428 self.data_web[plottype] = roundFloats(decimate(self.data[plottype][t]).tolist())
389 429 self.data_web['interval'] = self.dataOut.getTimeInterval()
390 430 self.data_web['type'] = plottype
391 431
392 432 def run(self):
393 433
394 434 print '[Starting] {} from {}'.format(self.name, self.address)
395 435
396 436 self.context = zmq.Context()
397 437 self.receiver = self.context.socket(zmq.PULL)
398 438 self.receiver.bind(self.address)
399 439 monitor = self.receiver.get_monitor_socket()
400 440 self.sender = self.context.socket(zmq.PUB)
401 441 if self.realtime:
402 442 self.sender_web = self.context.socket(zmq.PUB)
403 443 self.sender_web.connect(self.plot_address)
404 444 time.sleep(1)
445
446 if 'server' in self.kwargs:
447 self.sender.bind("ipc:///tmp/{}.plots".format(self.kwargs['server']))
448 else:
405 449 self.sender.bind("ipc:///tmp/zmq.plots")
450
406 451 time.sleep(3)
452
407 453 t = Thread(target=self.event_monitor, args=(monitor,))
408 454 t.start()
409 455
410 456 while True:
411 457 self.dataOut = self.receiver.recv_pyobj()
412 458 # print '[Receiving] {} - {}'.format(self.dataOut.type,
413 459 # self.dataOut.datatime.ctime())
414 460
415 461 self.update()
416 462
417 463 if self.dataOut.firstdata is True:
418 464 self.data['STARTED'] = True
419 465
420
421 466 if self.dataOut.finished is True:
422 467 self.send(self.data)
423 468 self.connections -= 1
424 469 if self.connections == 0 and self.started:
425 470 self.ended = True
426 471 self.data['ENDED'] = True
427 472 self.send(self.data)
428 473 self.setup()
429 474 self.started = False
430 475 else:
431 476 if self.realtime:
432 477 self.send(self.data)
433 478 self.sender_web.send_string(json.dumps(self.data_web))
434 479 else:
435 480 self.sendData(self.send, self.data)
436 481 self.started = True
437 482
438 483 self.data['STARTED'] = False
439 484 return
440 485
441 486 def sendToWeb(self):
442 487
443 488 if not self.isWebConfig:
444 489 context = zmq.Context()
445 490 sender_web_config = context.socket(zmq.PUB)
446 491 if 'tcp://' in self.plot_address:
447 492 dum, address, port = self.plot_address.split(':')
448 493 conf_address = '{}:{}:{}'.format(dum, address, int(port)+1)
449 494 else:
450 495 conf_address = self.plot_address + '.config'
451 496 sender_web_config.bind(conf_address)
452 497 time.sleep(1)
453 498 for kwargs in self.operationKwargs.values():
454 499 if 'plot' in kwargs:
455 500 print '[Sending] Config data to web for {}'.format(kwargs['code'].upper())
456 501 sender_web_config.send_string(json.dumps(kwargs))
457 502 self.isWebConfig = True
@@ -1,78 +1,80
1 1 #!/usr/bin/env python
2 2 '''
3 3 Created on Jul 7, 2014
4 4
5 5 @author: roj-idl71
6 6 '''
7 7 import os, sys
8 8
9 9 from schainpy.controller import Project
10 10
11 11 if __name__ == '__main__':
12 12 desc = "Segundo Test"
13 13
14 14 controllerObj = Project()
15 15 controllerObj.setup(id='191', name='test01', description=desc)
16 16
17 proc1 = controllerObj.addProcUnit(name='ReceiverData')
17 proc1 = controllerObj.addProcUnit(name='PlotterReceiver')
18 18 # proc1.addParameter(name='realtime', value='0', format='bool')
19 19 #proc1.addParameter(name='plottypes', value='rti,coh,phase,snr,dop', format='str')
20 20 #proc1.addParameter(name='plottypes', value='rti,coh,phase,snr', format='str')
21 21 proc1.addParameter(name='plottypes', value='dop', format='str')
22 22
23 proc1.addParameter(name='throttle', value='10', format='int')
23 #proc1.addParameter(name='throttle', value='10', format='int')
24
25 proc1.addParameter(name='interactive', value='0', format='bool') # ? PREGUNTAR
24 26 #proc1.addParameter(name='server', value='tcp://10.10.10.82:7000', format='str')
25 27 ## TODO Agregar direccion de server de publicacion a graficos como variable
26 28
27 29 """
28 30 op1 = proc1.addOperation(name='PlotRTIData', optype='other')
29 31 op1.addParameter(name='wintitle', value='HF System', format='str')
30 32 op1.addParameter(name='save', value='/home/ci-81/Pictures', format='str')
31 33 op1.addParameter(name='show', value='0', format='bool')
32 34 op1.addParameter(name='zmin', value='-110', format='float')
33 35 op1.addParameter(name='zmax', value='-50', format='float')
34 36 op1.addParameter(name='colormap', value='jet', format='str')
35 37 #
36 38 op2 = proc1.addOperation(name='PlotCOHData', optype='other')
37 39 op2.addParameter(name='wintitle', value='HF System', format='str')
38 40 op2.addParameter(name='zmin', value='0.001', format='float')
39 41 op2.addParameter(name='zmax', value='1', format='float')
40 42 op2.addParameter(name='save', value='/home/ci-81/Pictures', format='str')
41 43 op2.addParameter(name='colormap', value='jet', format='str')
42 44 op2.addParameter(name='show', value='0', format='bool')
43 45 # #
44 46
45 47 op6 = proc1.addOperation(name='PlotPHASEData', optype='other')
46 48 op6.addParameter(name='wintitle', value='HF System', format='str')
47 49 op6.addParameter(name='save', value='/home/ci-81/Pictures', format='str')
48 50 op6.addParameter(name='show', value='1', format='bool')
49 51 #
50 52
51 53 # proc2 = controllerObj.addProcUnit(name='ReceiverData')
52 54 # proc2.addParameter(name='server', value='juanca', format='str')
53 55 # proc2.addParameter(name='plottypes', value='snr,dop', format='str')
54 56 #
55 57
56 58 op3 = proc1.addOperation(name='PlotSNRData', optype='other')
57 59 op3.addParameter(name='wintitle', value='HF System SNR0', format='str')
58 60 op3.addParameter(name='save', value='/home/ci-81/Pictures', format='str')
59 61 op3.addParameter(name='show', value='0', format='bool')
60 62 op3.addParameter(name='zmin', value='-10', format='int')
61 63 op3.addParameter(name='zmax', value='30', format='int')
62 64 op3.addParameter(name='SNRthresh', value='0', format='float')
63 65 """
64 66 #
65 67 op5 = proc1.addOperation(name='PlotDOPData', optype='other')
66 68 op5.addParameter(name='wintitle', value='HF System DOP', format='str')
67 69 op5.addParameter(name='save', value='/home/ci-81/Pictures', format='str')
68 70 op5.addParameter(name='show', value='1', format='bool')
69 71 op5.addParameter(name='zmin', value='-120', format='float')
70 72 op5.addParameter(name='zmax', value='120', format='float')
71 73 op5.addParameter(name='colormap', value='RdBu_r', format='str')
72 74 """
73 75 op4 = proc1.addOperation(name='PlotSNRData1', optype='other')
74 76 op4.addParameter(name='wintitle', value='HF System SNR1', format='str')
75 77 op4.addParameter(name='save', value='/home/ci-81/Pictures', format='str')
76 78 op4.addParameter(name='show', value='0', format='bool')
77 79 """
78 80 controllerObj.start()
@@ -1,1 +1,1
1 <Project description="HF_EXAMPLE" id="191" name="test01"><ReadUnit datatype="SpectraReader" id="1911" inputId="0" name="SpectraReader"><Operation id="19111" name="run" priority="1" type="self"><Parameter format="str" id="191111" name="datatype" value="SpectraReader" /><Parameter format="str" id="191112" name="path" value="/media/ci-81/Huancayo/DATA/hfradar_2016/pdata/sp1_f1" /><Parameter format="date" id="191113" name="startDate" value="2016/04/23" /><Parameter format="date" id="191114" name="endDate" value="2016/04/23" /><Parameter format="time" id="191115" name="startTime" value="00:00:00" /><Parameter format="time" id="191116" name="endTime" value="23:59:59" /><Parameter format="int" id="191118" name="cursor" value="6" /><Parameter format="int" id="191119" name="skip" value="16" /><Parameter format="int" id="191120" name="delay" value="10" /><Parameter format="int" id="191121" name="walk" value="1" /><Parameter format="int" id="191122" name="online" value="0" /></Operation></ReadUnit><ProcUnit datatype="ParametersProc" id="1913" inputId="1911" name="ParametersProc"><Operation id="19131" name="run" priority="1" type="self" /><Operation id="19132" name="SpectralMoments" priority="2" type="other" /><Operation id="19133" name="PublishData" priority="3" type="other"><Parameter format="int" id="191331" name="zeromq" value="1" /></Operation></ProcUnit><ProcUnit datatype="Spectra" id="1912" inputId="1911" name="SpectraProc"><Operation id="19121" name="run" priority="1" type="self" /><Operation id="19122" name="removeInterference" priority="2" type="self" /></ProcUnit></Project> No newline at end of file
1 <Project description="HF_EXAMPLE" id="191" name="test01"><ReadUnit datatype="SpectraReader" id="1911" inputId="0" name="SpectraReader"><Operation id="19111" name="run" priority="1" type="self"><Parameter format="str" id="191111" name="datatype" value="SpectraReader" /><Parameter format="str" id="191112" name="path" value="/media/ci-81/Huancayo/DATA/hfradar_2016/pdata/sp1_f1" /><Parameter format="date" id="191113" name="startDate" value="2016/04/27" /><Parameter format="date" id="191114" name="endDate" value="2016/04/27" /><Parameter format="time" id="191115" name="startTime" value="00:00:00" /><Parameter format="time" id="191116" name="endTime" value="23:59:59" /><Parameter format="int" id="191118" name="cursor" value="0" /><Parameter format="int" id="191119" name="skip" value="0" /><Parameter format="int" id="191120" name="delay" value="10" /><Parameter format="int" id="191121" name="walk" value="1" /><Parameter format="int" id="191122" name="online" value="0" /></Operation></ReadUnit><ProcUnit datatype="ParametersProc" id="1913" inputId="1911" name="ParametersProc"><Operation id="19131" name="run" priority="1" type="self" /><Operation id="19132" name="SpectralMoments" priority="2" type="other" /><Operation id="19133" name="PublishData" priority="3" type="other"><Parameter format="int" id="191331" name="zeromq" value="1" /></Operation></ProcUnit><ProcUnit datatype="Spectra" id="1912" inputId="1911" name="SpectraProc"><Operation id="19121" name="run" priority="1" type="self" /><Operation id="19122" name="removeInterference" priority="2" type="self" /></ProcUnit></Project> No newline at end of file
@@ -1,49 +1,57
1 '''
1 """.
2
2 3 Created on Jul 16, 2014
3 4
4 5 @author: Miguel Urco
5 '''
6 """
6 7
7 8 from schainpy import __version__
8 9 from setuptools import setup, Extension
9 10
10 11 setup(name="schainpy",
11 12 version=__version__,
12 13 description="Python tools to read, write and process Jicamarca data",
13 14 author="Miguel Urco",
14 15 author_email="miguel.urco@jro.igp.gob.pe",
15 16 url="http://jro.igp.gob.pe",
16 17 packages = {'schainpy',
17 18 'schainpy.model',
18 19 'schainpy.model.data',
19 20 'schainpy.model.graphics',
20 21 'schainpy.model.io',
21 22 'schainpy.model.proc',
22 23 'schainpy.model.serializer',
23 24 'schainpy.model.utils',
24 25 'schainpy.gui',
25 26 'schainpy.gui.figures',
26 27 'schainpy.gui.viewcontroller',
27 28 'schainpy.gui.viewer',
28 29 'schainpy.gui.viewer.windows'},
29 30 ext_package='schainpy',
30 31 py_modules=[''],
31 32 package_data={'': ['schain.conf.template'],
32 33 'schainpy.gui.figures': ['*.png','*.jpg'],
33 34 },
34 35 include_package_data=False,
35 scripts =['schainpy/gui/schainGUI',
36 'schainpy/scripts/schain'],
36 entry_points={
37 'console_scripts': [
38 'schain = schaincli.cli:main',
39 ],
40 },
41 scripts=['schainpy/gui/schainGUI'],
37 42 ext_modules=[Extension("cSchain", ["schainpy/model/proc/extensions.c"])],
38 43 install_requires=[
39 44 "scipy >= 0.14.0",
40 45 "h5py >= 2.2.1",
41 46 "matplotlib >= 1.4.2",
42 47 "pyfits >= 3.4",
43 48 "numpy >= 1.11.2",
44 49 "paramiko >= 2.1.2",
45 50 "paho-mqtt >= 1.2",
46 51 "zmq",
47 "fuzzywuzzy"
52 "fuzzywuzzy",
53 "click",
54 "colorama",
55 "python-Levenshtein"
48 56 ],
49 57 )
General Comments 0
You need to be logged in to leave comments. Login now