##// END OF EJS Templates
new jars_server.py
Juan C. Espinoza -
r323:767ecbca18d6
parent child
Show More
This diff has been collapsed as it changes many lines, (1572 lines changed) Show them Hide them
@@ -1,773 +1,799
1 '''
1 '''
2 Created on Jan 5, 2016
2 Created on Jan 5, 2016
3
3 Modified on Jan 24, 2019
4 @author: Juan C. Espinoza
4 @authors: Juan C. Espinoza, Fiorella Quino, John Rojas
5
5
6 '''
6 '''
7
7
8 import os
8 import os
9 import math
9 import math
10 import json
10 import json
11 import requests
11 import requests
12 import time
12 import time
13
13
14 from threading import Thread
14 from threading import Thread
15 from subprocess import Popen, PIPE
15 from subprocess import Popen, PIPE
16 from collections import deque
16 from collections import deque
17 from datetime import datetime, timedelta
17 from datetime import datetime, timedelta
18
18
19 from flask import Flask, jsonify, request, send_file
19 from flask import Flask, jsonify, request, send_file
20
20
21 PATH = 'F:\SIR_DATA'
21 PATH = 'G:\SIR_DATA'
22 EXE = 'C:\JROAdquisicion\src\JROAcquisitionSoftware\Release\JROAcquisitionSoftware.exe'
22 EXE = 'C:\JROAdquisicion\src\JROAcquisitionSoftware\Release\JROAcquisitionSoftware.exe'
23 IPHOST='10.10.10.165'
23 IPHOST='10.10.10.X'
24 OPT = '--jars' #'--cbsim'
24 OPT = '--jars' #'--cbsim'
25 PROC = False
25 PROC = False
26 OUT = None
26 OUT = None
27 LOGGING = False
27 LOGGING = False
28 global EXPNAME
28 global EXPNAME
29
29
30 DECODE_TYPE = {1:'DECODING_TIME_DOMAIN',2:'DECODING_FREQ_DOMAIN',3:'DECODING_INV_FREQ_DOMAIN'}
30 DECODE_TYPE = {1:'DECODING_TIME_DOMAIN',2:'DECODING_FREQ_DOMAIN',3:'DECODING_INV_FREQ_DOMAIN'}
31
31
32 app = Flask(__name__)
32 app = Flask(__name__)
33
33
34 class StdoutReader(object):
34 class StdoutReader(object):
35 '''
35 '''
36 Class to manage stdout of JARS acquisition program
36 Class to manage stdout of JARS acquisition program
37 '''
37 '''
38
38
39 def __init__(self, stream, name):
39 def __init__(self, stream, name):
40 '''
40 '''
41 stream: the stream to read from.
41 stream: the stream to read from.
42 Usually a process' stdout or stderr.
42 Usually a process' stdout or stderr.
43 '''
43 '''
44
44
45 self._s = stream
45 self._s = stream
46 self._q = deque()
46 self._q = deque()
47 self._f = open(os.path.join(PATH, name, 'Restarting Report.txt'), 'ab')
47 self._f = open(os.path.join(PATH, name, 'Restarting Report.txt'), 'ab')
48 if LOGGING:
48 if LOGGING:
49 self._l = open(os.path.join(PATH, name, '{}.log'.format(name)), 'ab')
49 self._l = open(os.path.join(PATH, name, '{}.log'.format(name)), 'ab')
50
50
51 def update_queue(stream, queue):
51 def update_queue(stream, queue):
52 '''
52 '''
53 Collect lines from 'stream' and put them in 'queue'.
53 Collect lines from 'stream' and put them in 'queue'.
54 '''
54 '''
55
55
56 restart_dict = {}
56 restart_dict = {}
57 restart_num = 0
57 restart_num = 0
58 str_format = '%Y-%m-%d %H:%M:%S'
58 str_format = '%Y-%m-%d %H:%M:%S'
59 delta_time = timedelta(0,120,0)
59 delta_time = timedelta(0,120,0)
60 while True:
60 while True:
61 raw = stream.readline()
61 raw = stream.readline()
62 line = raw.rstrip()
62 line = raw.rstrip()
63 now = datetime.now()
63 now = datetime.now()
64 now_str = now.strftime(str_format)
64 now_str = now.strftime(str_format)
65 restart_dict[str(restart_num)] = now_str
65 restart_dict[str(restart_num)] = now_str
66 max_num = 13
66 max_num = 13
67 if line:
67 if line:
68 queue.append(line)
68 queue.append(line)
69 if LOGGING:
69 if LOGGING:
70 self._l.write('{}'.format(raw))
70 self._l.write('{}'.format(raw))
71 print line
71 print line
72 if 'Block' not in line:
72 if 'Error... restarting' in line or 'Trying restart' in line:
73 self._f.write('{} at {}\n'.format(line,
73 self._f.write('{} at {}\n'.format(line,
74 datetime.now().ctime()))
74 datetime.now().ctime()))
75
75
76 restart_num = restart_num + 1
76 restart_num = restart_num + 1
77 if restart_num > max_num:
77 if restart_num > max_num:
78 date1 = datetime.strptime(restart_dict['1'], str_format)
78 date1 = datetime.strptime(restart_dict['1'], str_format)
79 date2 = datetime.strptime(restart_dict[str(max_num-1)], str_format)
79 date2 = datetime.strptime(restart_dict[str(max_num-1)], str_format)
80 if (date2 - date1) < delta_time:
80 if (date2 - date1) < delta_time:
81 print str(max_num)+' restarts en menos de 2min'#RESTART
81 print str(max_num)+' restarts en menos de 2min'#RESTART
82 restart_num = 0
82 restart_num = 0
83 restart_dict = {}
83 restart_dict = {}
84 restart()
84 restart()
85 else:
85 else:
86 restart_num = 0
86 restart_num = 0
87 restart_dict = {}
87 restart_dict = {}
88 print 'NO'
88 print 'NO'
89
89
90
90
91 self._t = Thread(target=update_queue, args=(self._s, self._q))
91 self._t = Thread(target=update_queue, args=(self._s, self._q))
92 self._t.daemon = True
92 self._t.daemon = True
93 self._t.start()
93 self._t.start()
94
94
95 def readline(self):
95 def readline(self):
96 '''
96 '''
97 Return last line output
97 Return last line output
98 '''
98 '''
99 try:
99 try:
100 line = self._q.pop()
100 line = self._q.pop()
101 self._q.clear()
101 self._q.clear()
102 return line
102 return line
103 except IndexError:
103 except IndexError:
104 return None
104 return None
105
105
106 def save(self):
106 def save(self):
107 '''
107 '''
108 Save logging files
108 Save logging files
109 '''
109 '''
110 self._f.close()
110 self._f.close()
111 if LOGGING:
111 if LOGGING:
112 self._l.close()
112 self._l.close()
113
113
114 def parse_line(n, data, lines):
114 def parse_line(n, data, lines):
115
115
116 line_text = ''
116 line_text = ''
117 line_type = data['lines']['byId'][lines[n]]['line_type']
117 line_type = data['lines']['byId'][lines[n]]['line_type']
118 num = n+1
118 num = n+1
119 if line_type == 'windows':
119 if line_type == 'windows':
120 if num == 7:
120 if num == 7:
121 reference = data['lines']['byId'][lines[n]]['params']['TX_ref']
121 reference = data['lines']['byId'][lines[n]]['params']['TX_ref']
122 windows = data['lines']['byId'][lines[n]]['params']['params']
122 windows = data['lines']['byId'][lines[n]]['params']['params']
123 if windows:
123 if windows:
124 dh = str(float(windows[0]['resolution']))
124 dh = str(float(windows[0]['resolution']))
125 else:
125 else:
126 dh = ''
126 dh = ''
127
127
128 line_text = 'Sampling Windows={}\n'.format(len(windows))
128 line_text = 'Sampling Windows={}\n'.format(len(windows))
129
129
130 cnt = 0
130 cnt = 0
131 for window in windows:
131 for window in windows:
132 line_text += ('H0({cnt})={first_height}\n'
132 line_text += ('H0({cnt})={first_height}\n'
133 'NSA({cnt})={number_of_samples}\n'
133 'NSA({cnt})={number_of_samples}\n'
134 'DH({cnt})={dh}\n'.format(
134 'DH({cnt})={dh}\n'.format(
135 cnt=cnt,
135 cnt=cnt,
136 first_height=window['first_height'],
136 first_height=window['first_height'],
137 number_of_samples=int(window['number_of_samples']),
137 number_of_samples=int(window['number_of_samples']),
138 dh=dh
138 dh=dh
139 )
139 )
140 )
140 )
141 cnt += 1
141 cnt += 1
142
142
143 else:
143 else:
144 reference = data['lines']['byId'][lines[n]]['params']['TX_ref']
144 reference = data['lines']['byId'][lines[n]]['params']['TX_ref']
145 windows = data['lines']['byId'][lines[n]]['params']['params']
145 windows = data['lines']['byId'][lines[n]]['params']['params']
146 if windows:
146 if windows:
147 dh = str(float(windows[0]['resolution']))
147 dh = str(float(windows[0]['resolution']))
148 else:
148 else:
149 dh = ''
149 dh = ''
150
150
151 line_text = 'Sampling Windows (Line {})={}\n'.format(num, len(windows))
151 line_text = 'Sampling Windows (Line {})={}\n'.format(num, len(windows))
152
152
153 cnt = 0
153 cnt = 0
154 for window in windows:
154 for window in windows:
155 line_text += ('L{num}_H0({cnt})={first_height}\n'
155 line_text += ('L{num}_H0({cnt})={first_height}\n'
156 'L{num}_NSA({cnt})={number_of_samples}\n'
156 'L{num}_NSA({cnt})={number_of_samples}\n'
157 'L{num}_DH({cnt})={dh}\n'.format(
157 'L{num}_DH({cnt})={dh}\n'.format(
158 num=num,
158 num=num,
159 cnt=cnt,
159 cnt=cnt,
160 first_height=window['first_height'],
160 first_height=window['first_height'],
161 number_of_samples=int(window['number_of_samples']),
161 number_of_samples=int(window['number_of_samples']),
162 dh=dh
162 dh=dh
163 )
163 )
164 )
164 )
165 cnt += 1
165 cnt += 1
166
166
167 line_text += 'L{}_REFERENCE={}\n'.format(
167 line_text += 'L{}_REFERENCE={}\n'.format(
168 num,
168 num,
169 data['lines']['byId'][reference]['name']
169 data['lines']['byId'][reference]['name']
170 )
170 )
171
171
172 elif line_type == 'sync':
172 elif line_type == 'sync':
173 line_text = 'Line{}=Synchro\n'.format(num)
173 line_text = 'Line{}=Synchro\n'.format(num)
174
174
175 elif line_type == 'flip':
175 elif line_type == 'flip':
176 line_text = 'L{}_FLIP={}\n'.format(
176 line_text = 'L{}_FLIP={}\n'.format(
177 num,
177 num,
178 data['lines']['byId'][lines[n]]['params']['number_of_flips']
178 data['lines']['byId'][lines[n]]['params']['number_of_flips']
179 )
179 )
180
180
181 elif line_type == 'prog_pulses':
181 elif line_type == 'prog_pulses':
182 periodic = data['lines']['byId'][lines[n]]['params']['periodic']
182 periodic = data['lines']['byId'][lines[n]]['params']['periodic']
183 if periodic == '0':
183 if periodic == '0':
184 periodic = 'NO'
184 periodic = 'NO'
185 else:
185 else:
186 periodic = 'YES'
186 periodic = 'YES'
187
187
188 portions = data['lines']['byId'][lines[n]]['params']['params']
188 portions = data['lines']['byId'][lines[n]]['params']['params']
189 line_text = 'L{} Number Of Portions={}\n'.format(num, len(portions))
189 line_text = 'L{} Number Of Portions={}\n'.format(num, len(portions))
190
190
191 for i, portion in enumerate(portions):
191 for i, portion in enumerate(portions):
192 line_text += 'PORTION_BEGIN({cnt})={begin}\nPORTION_END({cnt})={end}\n'.format(
192 line_text += 'PORTION_BEGIN({cnt})={begin}\nPORTION_END({cnt})={end}\n'.format(
193 cnt=i,
193 cnt=i,
194 begin=int(portion['begin']),
194 begin=int(portion['begin']),
195 end=int(portion['end']),
195 end=int(portion['end']),
196 )
196 )
197
197
198 line_text += 'L{} Portions IPP Periodic={}\n'.format(num, periodic)
198 line_text += 'L{} Portions IPP Periodic={}\n'.format(num, periodic)
199
199
200 elif line_type == 'none':
200 elif line_type == 'none':
201 line_text = ''
201 line_text = ''
202
202
203 else:
203 else:
204 reference = data['lines']['byId'][lines[n]]['params']['TX_ref']
204 reference = data['lines']['byId'][lines[n]]['params']['TX_ref']
205 code_type = data['lines']['byId'][lines[n]]['params']['code']
205 code_type = data['lines']['byId'][lines[n]]['params']['code']
206 codes = data['lines']['byId'][lines[n]]['params']['codes']
206 codes = data['lines']['byId'][lines[n]]['params']['codes']
207
207
208 if num == 4:
208 if num == 4:
209 line_text = 'Code Type={}\n'.format(code_type)
209 line_text = 'Code Type={}\n'.format(code_type)
210 line_text += 'Number of Codes={}\nCode Width={}\n'.format(
210 line_text += 'Number of Codes={}\nCode Width={}\n'.format(
211 len(codes),
211 len(codes),
212 len(codes[0])
212 len(codes[0])
213 )
213 )
214 cnt = 0
214 cnt = 0
215 for code in codes:
215 for code in codes:
216 line_text += 'COD({})={}\n'.format(cnt, code)
216 line_text += 'COD({})={}\n'.format(cnt, code)
217 cnt += 1
217 cnt += 1
218 else:
218 else:
219 line_text = 'Code Type (Line {})={}\n'.format(num, code_type)
219 line_text = 'Code Type (Line {})={}\n'.format(num, code_type)
220 line_text += 'Number of Codes (Line {})={}\nCode Width (Line {})={}\n'.format(
220 line_text += 'Number of Codes (Line {})={}\nCode Width (Line {})={}\n'.format(
221 num,
221 num,
222 len(codes),
222 len(codes),
223 num,
223 num,
224 len(codes[0])
224 len(codes[0])
225 )
225 )
226 cnt = 0
226 cnt = 0
227 for code in codes:
227 for code in codes:
228 line_text += 'L{}_COD({})={}\n'.format(num,cnt, code)
228 line_text += 'L{}_COD({})={}\n'.format(num,cnt, code)
229 cnt += 1
229 cnt += 1
230
230
231 line_text += 'L{}_REFERENCE={}\n'.format(
231 line_text += 'L{}_REFERENCE={}\n'.format(
232 num,
232 num,
233 data['lines']['byId'][reference]['name']
233 data['lines']['byId'][reference]['name']
234 )
234 )
235
235
236 return line_text
236 return line_text
237
237
238 def create_jarsfiles(json_data):
238 def create_jarsfiles(json_data):
239 """
239 """
240 Function to create *.racp and *.jars files with json_data
240 Function to create *.racp and *.jars files with json_data
241 """
241 """
242 global EXPNAME
242 global EXPNAME
243
243
244 data = json.loads(json_data)
244 data = json.loads(json_data)
245 exp_id = data['experiments']['allIds'][0]
245 exp_id = data['experiments']['allIds'][0]
246 experiment = data['experiments']['byId'][exp_id]
246 experiment = data['experiments']['byId'][exp_id]
247 name = experiment['name']
247 name = experiment['name']
248 EXPNAME = name
248 EXPNAME = name
249 folder_name = os.path.join(PATH, name)
249 folder_name = os.path.join(PATH, name)
250 print 'Experiment: ' + name + ' received...'
250 print 'Experiment: ' + name + ' received...'
251 if not os.path.exists(folder_name):
251 if not os.path.exists(folder_name):
252 os.makedirs(folder_name)
252 os.makedirs(folder_name)
253 if not os.path.exists(folder_name+'/DATA'):
253 if not os.path.exists(folder_name+'/DATA'):
254 os.mkdir(folder_name+'/DATA')
254 os.mkdir(folder_name+'/DATA')
255
255
256 try:
256 try:
257 json_file = open(folder_name+'/'+name+'_jars.json', 'w')
257 json_file = open(folder_name+'/'+name+'_jars.json', 'w')
258 except:
258 except:
259 return 0, 'Error creating .json file'
259 return 0, 'Error creating .json file'
260
260
261 json_file.write(json_data)
261 json_file.write(json_data)
262 json_file.close()
262 json_file.close()
263
263
264 try:
264 try:
265 racp_file = open(folder_name+'/'+name+'_jars.racp', 'w')
265 racp_file = open(folder_name+'/'+name+'_jars.racp', 'w')
266 except:
266 except:
267 return 0, 'Error creating .racp file'
267 return 0, 'Error creating .racp file'
268
268
269 conf_ids = data['configurations']['allIds']
269 conf_ids = data['configurations']['allIds']
270
270
271 rcs = [pk for pk in conf_ids \
271 rcs = [pk for pk in conf_ids \
272 if data['configurations']['byId'][pk]['device_type'] == 'rc']
272 if data['configurations']['byId'][pk]['device_type'] == 'rc']
273 if len(rcs) == 1:
273 if len(rcs) == 1:
274 rc_id = rcs[0]
274 rc_id = rcs[0]
275 rc_mix_id = 0
275 rc_mix_id = 0
276 else:
276 else:
277 rcs = [pk for pk in conf_ids \
277 rcs = [pk for pk in conf_ids \
278 if data['configurations']['byId'][pk]['device_type'] == 'rc' and data['configurations']['byId'][pk]['mix'] == True]
278 if data['configurations']['byId'][pk]['device_type'] == 'rc' and data['configurations']['byId'][pk]['mix'] == True]
279 rc_mix_id = rcs[0]
279 rc_mix_id = rcs[0]
280 mix_parameters = data['configurations']['byId'][rc_mix_id]['parameters'].split('-')
280 mix_parameters = data['configurations']['byId'][rc_mix_id]['parameters'].split('-')
281 rc_id = mix_parameters[0].split('|')[0]
281 rc_id = mix_parameters[0].split('|')[0]
282
282
283 jars_id = [pk for pk in conf_ids \
283 jars_id = [pk for pk in conf_ids \
284 if data['configurations']['byId'][pk]['device_type'] == 'jars'][0]
284 if data['configurations']['byId'][pk]['device_type'] == 'jars'][0]
285
285
286 rc = data['configurations']['byId'][rc_id]
286 rc = data['configurations']['byId'][rc_id]
287 jars = data['configurations']['byId'][jars_id]
287 jars = data['configurations']['byId'][jars_id]
288
288
289 if rc_mix_id <> 0:
289 if rc_mix_id <> 0:
290 rc_mix = data['configurations']['byId'][rc_mix_id]
290 rc_mix = data['configurations']['byId'][rc_mix_id]
291 mix_text = '*******Mixed Experiment*******************\n'
291 mix_text = '*******Mixed Experiment*******************\n'
292 mix_text += 'Number of Experiments={}\n'.format(len(mix_parameters))
292 mix_text += 'Number of Experiments={}\n'.format(len(mix_parameters))
293 for i,param in enumerate(mix_parameters):
293 for i,param in enumerate(mix_parameters):
294 pk, mode, op, delay, mask = param.split('|')
294 pk, mode, op, delay, mask = param.split('|')
295 mix_text += 'EXP({})={}\n'.format(i, data['configurations']['byId'][pk]['name'])
295 mix_text += 'EXP({})={}\n'.format(i, data['configurations']['byId'][pk]['name'])
296 mix_text += 'DELAY({})={}\n'.format(i, delay)
296 mix_text += 'DELAY({})={}\n'.format(i, delay)
297 mix_text += 'RELOJ={}\n'.format(int(data['configurations']['byId'][pk]['clock']))
297 mix_text += 'RELOJ={}\n'.format(int(data['configurations']['byId'][pk]['clock']))
298 mix_text += 'MIXER MODE={}_FLAG\n'.format(op)
298 mix_text += 'MIXER MODE={}_FLAG\n'.format(op)
299 mix_text += 'MIXER MASK={}\n'.format(mask)
299 mix_text += 'MIXER MASK={}\n'.format(mask)
300 mix_text += '*******System parameters******************\n'
300 mix_text += '*******System parameters******************\n'
301 else:
301 else:
302 mix_text = ''
302 mix_text = ''
303
303
304 exp_type = jars['exp_type']
304 exp_type = jars['exp_type']
305 if exp_type == 0:
305 if exp_type == 0:
306 exp_type = 'EXP_RAW_DATA'
306 exp_type = 'EXP_RAW_DATA'
307 else:
307 else:
308 exp_type = 'EXP_PROCESS_SPECTRA'
308 exp_type = 'EXP_PROCESS_SPECTRA'
309
309
310 racp_text = 'EXPERIMENT TYPE={}\nEXPERIMENT NAME={}\nHEADER VERSION=1103\n'.format(
310 racp_text = 'EXPERIMENT TYPE={}\nEXPERIMENT NAME={}\nHEADER VERSION=1103\n'.format(
311 exp_type,
311 exp_type,
312 name
312 name
313 )
313 )
314
314
315 racp_text += '*****Radar Controller Parameters**********\n{}'.format(mix_text)
315 racp_text += '*****Radar Controller Parameters**********\n{}'.format(mix_text)
316 if rc_mix_id == 0:
316 if rc_mix_id == 0:
317 racp_text += 'IPP={}\n'.format(float(rc['ipp']))
317 racp_text += 'IPP={}\n'.format(float(rc['ipp']))
318 racp_text += 'NTX={}\n'.format(rc['ntx'])
318 racp_text += 'NTX={}\n'.format(rc['ntx'])
319 else:
319 else:
320 racp_text += 'IPP={}\n'.format(float(rc_mix['ipp']))
320 racp_text += 'IPP={}\n'.format(float(rc_mix['ipp']))
321 racp_text += 'NTX={}\n'.format(rc_mix['ntx'])
321 racp_text += 'NTX={}\n'.format(rc_mix['ntx'])
322
322
323 racp_text += 'TXA={}\n'.format(
323 racp_text += 'TXA={}\n'.format(
324 data['lines']['byId'][rc['lines'][1]]['params']['pulse_width']
324 data['lines']['byId'][rc['lines'][1]]['params']['pulse_width']
325 )
325 )
326 if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx':
326 if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx':
327 racp_text += 'TXB={}\n'.format(
327 racp_text += 'TXB={}\n'.format(
328 data['lines']['byId'][rc['lines'][2]]['params']['pulse_width']
328 data['lines']['byId'][rc['lines'][2]]['params']['pulse_width']
329 )
329 )
330 idTR = data['lines']['byId'][rc['lines'][0]]['params']['TX_ref']
330 idTR = data['lines']['byId'][rc['lines'][0]]['params']['TX_ref']
331 rangeTR = data['lines']['byId'][rc['lines'][0]]['params']['range']
331 rangeTR = data['lines']['byId'][rc['lines'][0]]['params']['range']
332
332
333 if rangeTR != '0':
333 if rangeTR != '0':
334 racp_text += 'Pulse selection_TR={}\n'.format(rangeTR)
334 racp_text += 'Pulse selection_TR={}\n'.format(rangeTR)
335 elif idTR != '0':
335 elif idTR != '0':
336 racp_text += 'Pulse selection_TR={}\n'.format(
336 racp_text += 'Pulse selection_TR={}\n'.format(
337 data['lines']['byId'][idTR]['name'][-1]
337 data['lines']['byId'][idTR]['name'][-1]
338 )
338 )
339 rangeTXA = data['lines']['byId'][rc['lines'][1]]['params']['range']
339 rangeTXA = data['lines']['byId'][rc['lines'][1]]['params']['range']
340 if rangeTXA != '0':
340 if rangeTXA != '0':
341 racp_text += 'Pulse selection_TXA={}\n'.format(rangeTXA)
341 racp_text += 'Pulse selection_TXA={}\n'.format(rangeTXA)
342 if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx':
342 if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx':
343 rangeTXB = data['lines']['byId'][rc['lines'][2]]['params']['range']
343 rangeTXB = data['lines']['byId'][rc['lines'][2]]['params']['range']
344 if rangeTXB != '0':
344 if rangeTXB != '0':
345 racp_text += 'Pulse selection_TXB={}\n'.format(rangeTXB)
345 racp_text += 'Pulse selection_TXB={}\n'.format(rangeTXB)
346 for n in range(3, 6):
346 for n in range(3, 6):
347 racp_text += parse_line(n, data, rc['lines'])
347 racp_text += parse_line(n, data, rc['lines'])
348
348
349 if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx':
349 if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx':
350 taus = data['lines']['byId'][rc['lines'][2]]['params']['delays'].split(',')
350 taus = data['lines']['byId'][rc['lines'][2]]['params']['delays'].split(',')
351 if taus != '0':
351 if taus != '0':
352 racp_text += 'Number of Taus={}\n'.format(len(taus))
352 racp_text += 'Number of Taus={}\n'.format(len(taus))
353 for n, tau in enumerate(taus):
353 for n, tau in enumerate(taus):
354 racp_text += 'TAU({})={}\n'.format(n, tau)
354 racp_text += 'TAU({})={}\n'.format(n, tau)
355
355
356 racp_text += parse_line(6, data, rc['lines'])
356 racp_text += parse_line(6, data, rc['lines'])
357 racp_text += 'SAMPLING REFERENCE=MIDDLE OF FIRST SUB-BAUD\n'
357 racp_text += 'SAMPLING REFERENCE=MIDDLE OF FIRST SUB-BAUD\n'
358 racp_text += 'RELOJ={}\n'.format(int(rc['clock']))
358 racp_text += 'RELOJ={}\n'.format(int(rc['clock']))
359 racp_text += 'CLOCK DIVIDER={}\n'.format(int(rc['clock_divider']))
359 racp_text += 'CLOCK DIVIDER={}\n'.format(int(rc['clock_divider']))
360 racp_text += 'TR_BEFORE={}\n'.format(rc['time_before'])
360 racp_text += 'TR_BEFORE={}\n'.format(rc['time_before'])
361 racp_text += 'TR_AFTER={}\n'.format(rc['time_after'])
361 racp_text += 'TR_AFTER={}\n'.format(rc['time_after'])
362 racp_text += 'WINDOW IN LINE 5&6=NO\n'
362 racp_text += 'WINDOW IN LINE 5&6=NO\n'
363 racp_text += '******System Parameters*******************\n'
363 racp_text += '******System Parameters*******************\n'
364 racp_text += 'Number of Cards={}\n'.format(jars['cards_number'])
364 racp_text += 'Number of Cards={}\n'.format(jars['cards_number'])
365
365
366 for i in range(jars['cards_number']):
366 for i in range(jars['cards_number']):
367 racp_text += 'Card({})={}\n'.format(i, i)
367 racp_text += 'Card({})={}\n'.format(i, i)
368
368
369 channels = jars['channels'].split(',')
369 channels = jars['channels'].split(',')
370
370
371 if channels:
371 if channels:
372 racp_text += 'Number of Channels={}\n'.format(len(channels))
372 racp_text += 'Number of Channels={}\n'.format(len(channels))
373 for i, channel in enumerate(channels):
373 for i, channel in enumerate(channels):
374 racp_text += 'Channel({})={}\n'.format(i, channel)
374 racp_text += 'Channel({})={}\n'.format(i, channel)
375
375
376 if exp_type == 'EXP_RAW_DATA':
376 if exp_type == 'EXP_RAW_DATA':
377 racp_text += 'RAW DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA'))
377 racp_text += 'RAW DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA'))
378 else:
378 else:
379 racp_text += 'PROCESS DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA'))
379 racp_text += 'PROCESS DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA'))
380
380
381 if jars['create_directory']:
381 if jars['create_directory']:
382 racp_text += 'CREATE DIRECTORY PER DAY=YES'+'\n'
382 racp_text += 'CREATE DIRECTORY PER DAY=YES'+'\n'
383 else:
383 else:
384 racp_text += 'CREATE DIRECTORY PER DAY=NO'+'\n'
384 racp_text += 'CREATE DIRECTORY PER DAY=NO'+'\n'
385
385
386 if jars['include_expname']:
386 if jars['include_expname']:
387 racp_text += 'INCLUDE EXPNAME IN DIRECTORY=YES'+'\n'
387 racp_text += 'INCLUDE EXPNAME IN DIRECTORY=YES'+'\n'
388 else:
388 else:
389 racp_text += 'INCLUDE EXPNAME IN DIRECTORY=NO'+'\n'
389 racp_text += 'INCLUDE EXPNAME IN DIRECTORY=NO'+'\n'
390
390
391 racp_text += '******System Parameters*******************\n'
391 racp_text += '******System Parameters*******************\n'
392 racp_text += 'ADC Resolution=8\n'
392 racp_text += 'ADC Resolution=8\n'
393 racp_text += 'PCI DIO BusWidth=32\n'
393 racp_text += 'PCI DIO BusWidth=32\n'
394
394
395 if exp_type == 'EXP_RAW_DATA':
395 if exp_type == 'EXP_RAW_DATA':
396 racp_text += 'RAW DATA BLOCKS={}\n'.format(jars['raw_data_blocks'])
396 racp_text += 'RAW DATA BLOCKS={}\n'.format(jars['raw_data_blocks'])
397 spectra_text = ''
397 spectra_text = ''
398 else:
398 else:
399 racp_text += 'PROCESS DATA BLOCKS=100\n'
399 racp_text += 'PROCESS DATA BLOCKS={}\n'.format(jars['raw_data_blocks']) # 26/12/2018
400 spectra_text = '------------------------------------------\n'
400 spectra_text = '------------------------------------------\n'
401
401
402 if jars['fftpoints'] > 1:
402 if jars['fftpoints'] > 1:
403 spectra_text += 'FFTPOINTS={}\n'.format(jars['fftpoints'])
403 spectra_text += 'FFTPOINTS={}\n'.format(jars['fftpoints'])
404
404
405 if jars['incohe_integr']:
405 if jars['incohe_integr']:
406 spectra_text += 'INCOHERENT INTEGRATIONS={}\n'.format(jars['incohe_integr'])
406 spectra_text += 'INCOHERENT INTEGRATIONS={}\n'.format(jars['incohe_integr'])
407
407
408 if jars['save_ch_dc']:
408 if jars['save_ch_dc']:
409 spectra_text += 'SAVE CHANNELS DC=YES\n'
409 spectra_text += 'SAVE CHANNELS DC=YES\n'
410
410
411 dum = jars['spectral']
411 dum = jars['spectral']
412
412
413 if dum.endswith(','):
413 if dum.endswith(','):
414 dum = dum[:-1]
414 dum = dum[:-1]
415 spectral = json.loads('[{}]'.format(dum))
415 spectral = json.loads('[{}]'.format(dum))
416
416
417 if spectral:
417 if spectral:
418 spectra_text += '------------------------------------------\n'
418 spectra_text += '------------------------------------------\n'
419 spectra_text += 'TOTAL SPECTRAL COMBINATIONS={}\n'.format(len(spectral))
419 spectra_text += 'TOTAL SPECTRAL COMBINATIONS={}\n'.format(len(spectral))
420 for i, spc in enumerate(spectral):
420 for i, spc in enumerate(spectral):
421 spectra_text += 'SPEC_COMB({})={},{}\n'.format(i, *spc)
421 spectra_text += 'SPEC_COMB({})={},{}\n'.format(i, *spc)
422
422
423 racp_text += '******Process Parameters******************\n'
423 racp_text += '******Process Parameters******************\n'
424
424
425 data_type = jars['data_type']
425 data_type = jars['data_type']
426
426
427 if data_type == 0:
427 if data_type == 0:
428 racp_text += 'DATATYPE=SHORT\n'
428 racp_text += 'DATATYPE=SHORT\n'
429 elif data_type == 1:
429 elif data_type == 1:
430 racp_text += 'DATATYPE=FLOAT\n'
430 racp_text += 'DATATYPE=FLOAT\n'
431
431
432 racp_text += 'DATA ARRANGE=CONTIGUOUS_CH\n'
432 racp_text += 'DATA ARRANGE=CONTIGUOUS_CH\n'
433
433
434 if jars['cohe_integr'] > 1:
434 if jars['cohe_integr'] > 1:
435 racp_text += 'COHERENT INTEGRATIONS={}\n'.format(jars['cohe_integr'])
435 racp_text += 'COHERENT INTEGRATIONS={}\n'.format(jars['cohe_integr'])
436
436
437 decode_text = ''
437 decode_text = ''
438 decode_data = jars['decode_data']
438 decode_data = jars['decode_data']
439 if decode_data !=0:
439 if decode_data !=0:
440 decode_text = 'DECODE DATA=YES\n'
440 decode_text = 'DECODE DATA=YES\n'
441 decode_text += 'DECODING TYPE={}\n'.format(DECODE_TYPE[decode_data])
441 decode_text += 'DECODING TYPE={}\n'.format(DECODE_TYPE[decode_data])
442 if jars['post_coh_int'] == True:
442 if jars['post_coh_int'] == True:
443 decode_text += 'POST COHERENT INTEGRATIONS=YES\n'
443 decode_text += 'POST COHERENT INTEGRATIONS=YES\n'
444 decode_text += '------------------------------------------\n'
444 decode_text += '------------------------------------------\n'
445
445
446 racp_text += 'COHERENT INTEGRATION STRIDE={}\n'.format(jars['cohe_integr_str'])
446 racp_text += 'COHERENT INTEGRATION STRIDE={}\n'.format(jars['cohe_integr_str'])
447 racp_text += '------------------------------------------\n'
447 racp_text += '------------------------------------------\n'
448 racp_text += 'ACQUIRED PROFILES={}\n'.format(jars['acq_profiles'])
448 racp_text += 'ACQUIRED PROFILES={}\n'.format(jars['acq_profiles'])
449 racp_text += 'PROFILES PER BLOCK={}\n'.format(jars['profiles_block'])
449 racp_text += 'PROFILES PER BLOCK={}\n'.format(jars['profiles_block'])
450 racp_text += spectra_text
450 racp_text += spectra_text
451 racp_text += '------------------------------------------\n'
451 racp_text += '------------------------------------------\n'
452 racp_text += decode_text
452 racp_text += decode_text
453 racp_text += 'BEGIN ON START=NO\n'
453 racp_text += 'BEGIN ON START=NO\n'
454 racp_text += 'BEGIN_TIME={}\n'.format(experiment['start_time'][:-3])
454 racp_text += 'BEGIN_TIME={}\n'.format(experiment['start_time'][:-3])
455 racp_text += 'END_TIME={}\n'.format(experiment['end_time'][:-3])
455 racp_text += 'END_TIME={}\n'.format(experiment['end_time'][:-3])
456 racp_text += 'GENERATE ACQUISITION LINK=YES\n'
456 racp_text += 'GENERATE ACQUISITION LINK=YES\n'
457 racp_text += 'VIEW RAW DATA=YES\n'
457 racp_text += 'VIEW RAW DATA=YES\n'
458 racp_text += 'REFRESH RATE=1\n'
458 racp_text += 'REFRESH RATE=1\n'
459 racp_text += '------------------------------------------\n'
459 racp_text += '------------------------------------------\n'
460 racp_text += 'SEND STATUS TO FTP=YES\n'
460 racp_text += 'SEND STATUS TO FTP=YES\n'
461 racp_text += 'FTP SERVER=jro.igp.gob.pe\n'
461 racp_text += 'FTP SERVER=jro.igp.gob.pe\n'
462 racp_text += 'FTP USER=wmaster\n'
462 racp_text += 'FTP USER=wmaster\n'
463 racp_text += 'FTP PASSWD=PKQLX20\n'
463 racp_text += 'FTP PASSWD=PKQLX20\n'
464 racp_text += 'FTP DIR=/users/database/on-line/\n'
464 racp_text += 'FTP DIR=/users/database/on-line/\n'
465 racp_text += 'FTP FILE=status.txt\n'
465 racp_text += 'FTP FILE=status.txt\n'
466 racp_text += 'FTP INTERVAL={}\n'.format(jars['ftp_interval'])
466 racp_text += 'FTP INTERVAL={}\n'.format(jars['ftp_interval'])
467 racp_text += 'SAVE STATUS AND BLOCK=YES\n'
467 racp_text += 'SAVE STATUS AND BLOCK=YES\n'
468 racp_text += 'GENERATE RTI=YES\n'
468 racp_text += 'GENERATE RTI=YES\n'
469 racp_text += 'RTI Inc.Int.=1\n'
469 racp_text += 'RTI Inc.Int.=1\n'
470 racp_text += 'SEND RTI AND BLOCK=YES\n'
470 racp_text += 'SEND RTI AND BLOCK=YES\n'
471 racp_text += '------------------------------------------\n'
471 racp_text += '------------------------------------------\n'
472 racp_text += 'COMPORT CONFIG=Com1 CBR_9600 TWOSTOPBITS NOPARITY\n'
472 racp_text += 'COMPORT CONFIG=Com1 CBR_9600 TWOSTOPBITS NOPARITY\n'
473 racp_text += 'JAM CONFIGURE FILE=dmasg_pprofiles_pch_64_pdigi_6clk.jam\n'
473 racp_text += 'JAM CONFIGURE FILE=dmasg_pprofiles_pch_64_pdigi_6clk.jam\n'
474 racp_text += 'ACQUISITION SYSTEM=JARS\n'
474 racp_text += 'ACQUISITION SYSTEM=JARS\n'
475 racp_text += '************JARS CONFIGURATION PARAMETERS************\n'
475 racp_text += '************JARS CONFIGURATION PARAMETERS************\n'
476
476
477 #-------------------------JARS FILTER---------------------------------------
477 #-------------------------JARS FILTER---------------------------------------
478 filter_parms = jars['filter_parms']
478 filter_parms = jars['filter_parms']
479 if filter_parms.__class__.__name__ == 'unicode':
479 if filter_parms.__class__.__name__ == 'unicode':
480 filter_parms = eval(filter_parms)
480 filter_parms = eval(filter_parms)
481 elif filter_parms.__class__.__name__ == 'str':
481 elif filter_parms.__class__.__name__ == 'str':
482 filter_parms = eval(filter_parms)
482 filter_parms = eval(filter_parms)
483 if filter_parms.__class__.__name__ == 'str':
483 if filter_parms.__class__.__name__ == 'str':
484 filter_parms = eval(filter_parms)
484 filter_parms = eval(filter_parms)
485 try:
485
486 fclock = float(filter_parms['clock'])
486 try:
487 fch = float(filter_parms['fch'])
487 fclock = float(filter_parms['clock'])
488 m_dds = float(filter_parms['mult'])
488 fch = float(filter_parms['frequency'])
489 M_CIC2 = float(filter_parms['filter_2'])
489 m_dds = float(filter_parms['multiplier'])
490 M_CIC5 = float(filter_parms['filter_5'])
490 M_CIC2 = float(filter_parms['cic_2'])
491 M_RCF = float(filter_parms['filter_fir'])
491 M_CIC5 = float(filter_parms['cic_5'])
492 except:
492 M_RCF = float(filter_parms['fir'])
493 fclock = eval(filter_parms['clock'])
493 except:
494 fch = eval(filter_parms['fch'])
494 fclock = eval(filter_parms['clock'])
495 m_dds = eval(filter_parms['mult'])
495 fch = eval(filter_parms['frequency'])
496 M_CIC2 = eval(filter_parms['filter_2'])
496 m_dds = eval(filter_parms['multiplier'])
497 M_CIC5 = eval(filter_parms['filter_5'])
497 M_CIC2 = eval(filter_parms['cic_2'])
498 M_RCF = eval(filter_parms['filter_fir'])
498 M_CIC5 = eval(filter_parms['cic_5'])
499
499 M_RCF = eval(filter_parms['fir'])
500 filter_text = 'Loading\n'
500
501 filter_text += 'Impulse file found -> C:\jars\F1MHZ_8_MATCH.imp\n'
501 filter_text = 'Loading\n'
502 filter_text += 'Autoscale off\n'
502 filter_text += 'Impulse file found -> C:\jars\F1MHZ_8_MATCH.imp\n'
503 filter_text += 'Initialize Printer Port\n'
503 filter_text += 'Autoscale off\n'
504 filter_text += 'Chip Hardware Reset\n'
504 filter_text += 'Initialize Printer Port\n'
505 filter_text += '300h -> 1\n'
505 filter_text += 'Chip Hardware Reset\n'
506 filter_text += '301h -> 6\n'
506 filter_text += '300h -> 1\n'
507 filter_text += '302h -> 11111111111111111111111111111111\n'
507 filter_text += '301h -> 6\n'
508
508 filter_text += '302h -> 11111111111111111111111111111111\n'
509 if abs(fch) < (fclock/2):
509
510 nco = (2**32)*((fch/fclock))#%1)
510 if abs(fch) < (fclock/2):
511 nco_i = long(nco)
511 nco = (2**32)*((fch/fclock))#%1)
512 else:
512 nco_i = long(nco)
513 nco = (2**32)*(fclock-fch)/(fclock)
513 else:
514 nco_i = long(nco)
514 nco = (2**32)*(fclock-fch)/(fclock)
515
515 nco_i = long(nco)
516 filter_text += '303h -> {}\n'.format(nco_i)
516
517 filter_text += '304h -> 0\n'
517 filter_text += '303h -> {}\n'.format(nco_i)
518
518 filter_text += '304h -> 0\n'
519 input_level = 1
519
520 S_CIC2 = math.ceil(math.log((M_CIC2**2)*input_level)/math.log(2))
520 input_level = 1
521 if S_CIC2 < 0:
521 S_CIC2 = math.ceil(math.log((M_CIC2**2)*input_level)/math.log(2))
522 S_CIC2 = 0
522 if S_CIC2 < 0:
523 if S_CIC2 > 7:
523 S_CIC2 = 0
524 S_CIC2 = 7
524 if S_CIC2 > 7:
525
525 S_CIC2 = 7
526 filter_text += '305h -> {}\n'.format(int(S_CIC2))
526
527 filter_text += '306h -> {}\n'.format(int(M_CIC2-1))
527 S_CIC2 = float(filter_parms['scale_cic_2']) ## 19/12/18
528
528
529 OL_CIC2 = input_level/(2.0**S_CIC2)
529 filter_text += '305h -> {}\n'.format(int(S_CIC2))
530
530 filter_text += '306h -> {}\n'.format(int(M_CIC2-1))
531 S_CIC5 = math.ceil(math.log((M_CIC5**5)*OL_CIC2)/math.log(2))-5
531
532 if S_CIC5 < 0:
532 OL_CIC2 = input_level/(2.0**S_CIC2)
533 S_CIC5 = 0
533
534 if S_CIC5 > 7:
534 S_CIC5 = math.ceil(math.log((M_CIC5**5)*OL_CIC2)/math.log(2))-5
535 S_CIC5 = 7
535 if S_CIC5 < 0:
536
536 S_CIC5 = 0
537 OL_CIC5 = ((M_CIC5**5)/(2**(S_CIC5+5)))*OL_CIC2
537 if S_CIC5 > 7:
538
538 S_CIC5 = 7
539 filter_text += '307h -> {}\n'.format(int(S_CIC5))
539
540 filter_text += '308h -> {}\n'.format(int(M_CIC5-1))
540 OL_CIC5 = ((M_CIC5**5)/(2**(S_CIC5+5)))*OL_CIC2
541
541 S_CIC5 = float(filter_parms['scale_cic_5']) #19/12/18
542 Gain = 1
542
543 S_RCF = int(4.0-math.log(Gain)/math.log(2))
543 filter_text += '307h -> {}\n'.format(int(S_CIC5))
544 if S_RCF < 0:
544 filter_text += '308h -> {}\n'.format(int(M_CIC5-1))
545 S_RCF = 0
545
546 if S_RCF > 7:
546 Gain = 1
547 S_RCF = 7
547 S_RCF = int(4.0-math.log(Gain)/math.log(2))
548
548 if S_RCF < 0:
549 filter_text += '309h -> {}\n'.format(S_RCF)
549 S_RCF = 0
550 filter_text += '30Ah -> {}\n'.format(int(M_RCF-1))
550 if S_RCF > 7:
551
551 S_RCF = 7
552 Offset = 0
552
553 filter_text += '30Bh -> {}\n'.format(Offset)
553 S_RCF = int(float(filter_parms['scale_fir'])) #19/12/18
554
554
555 ntaps = int(M_RCF)
555 filter_text += '309h -> {}\n'.format(S_RCF)
556 filter_text += '30Ch -> {}\n'.format(ntaps-1)
556 filter_text += '30Ah -> {}\n'.format(int(M_RCF-1))
557 filter_text += '30Dh -> 0\n'
557
558
558 Offset = 0
559 fsamp = fclock/(M_CIC2*M_CIC5*M_RCF)
559 filter_text += '30Bh -> {}\n'.format(Offset)
560
560
561 tap = int(2.0*((2**19)-1)/(ntaps*OL_CIC5))
561 ntaps = int(M_RCF)
562 for p in range(0, ntaps):
562 ntaps = int(float(filter_parms['number_taps'])) #19/12/18
563 filter_text += ' {} -> {}\n'.format(p, int(math.ceil(tap)))#filter_text += ' {} -> {}\n'.format(p, int(math.ceil(hn)))
563
564
564 filter_text += '30Ch -> {}\n'.format(ntaps-1)
565 filter_text += 'RCF Gain -> .999996185302734\n'
565 filter_text += '30Dh -> 0\n'
566 filter_text += 'Chip Restarted:\n'
566
567 filter_text += '300h -> 1\n'
567 fsamp = fclock/(M_CIC2*M_CIC5*M_RCF)
568 filter_text += '300h -> 0'
568
569
569 tap = int(2.0*((2**19)-1)/(ntaps*OL_CIC5))
570 filter_name = '{}_{}MHz_clock{}MHz_F{}MHz_{}_{}_{}.jars'.format(
570 #tap = int(filter_parms['taps'].split(',')[0]) #19/12/18
571 abs(fch),
571 tap = filter_parms['taps'].split(',') #19/12/18
572 int((abs(fch)-abs(int(fch)))*1000),
572
573 fclock,
573 numtaps = len(tap) # 23/01/19 incluido para saber la cantidad de coeficientes del filtro
574 round(fsamp,3),
574
575 M_CIC2,
575 for p in range(0, numtaps):#for p in range(0, ntaps):
576 M_CIC5,
576 #filter_text += ' {} -> {}\n'.format(p, int(math.ceil(tap)))#filter_text += ' {} -> {}\n'.format(p, int(math.ceil(hn)))
577 M_RCF
577 filter_text += ' {} -> {}\n'.format(p, int(math.ceil(int(tap[p]))))#filter_text += ' {} -> {}\n'.format(p, int(math.ceil(hn)))
578 )
578
579
579 filter_text += 'RCF Gain -> .999996185302734\n'
580 jars_file = open(os.path.join(folder_name, filter_name), 'wb')
580 filter_text += 'Chip Restarted:\n'
581 jars_file.write(filter_text)
581 filter_text += '300h -> 1\n'
582 jars_file.close()
582 filter_text += '300h -> 0'
583 racp_text += 'JARS_FILTER={}\n'.format(os.path.join(folder_name, filter_name))
583
584 racp_text += 'MARK WIDTH=2\n'
584 if fsamp >= 1: # frecuencia de muestreo mayor a 1MHz [24/01/19]
585 racp_text += 'GENERATE OWN SAMPLING WINDOW=NO\n'
585 filter_name = '{}_{}MHz_clock{}MHz_F{}MHz_{}_{}_{}.jars'.format(
586
586 abs(fch),
587 if jars['save_data']:
587 int((abs(fch)-abs(int(fch)))*1000),
588 racp_text += 'SAVE DATA=YES\n'
588 fclock,
589 else:
589 round(fsamp,3),
590 racp_text += 'SAVE DATA=NO\n'
590 int(M_CIC2),
591
591 int(M_CIC5),
592 racp_text += 'RC_STOP_SEQUENCE=255,0\n'
592 int(M_RCF)
593 racp_text += 'RC_START_SEQUENCE=255,24\n'
593 )
594
594
595 racp_file.write(racp_text)
595 if fsamp < 1: # frecuencia de muestreo menor a 1MHz [24/01/19]
596 racp_file.close()
596 filter_name = '{}_{}MHz_clock{}MHz_F{}KHz_{}_{}_{}.jars'.format(
597
597 abs(fch),
598 return 1, racp_file.name
598 int((abs(fch)-abs(int(fch)))*1000),
599
599 fclock,
600 @app.route('/status/')
600 round(fsamp,3)*1e3,
601 def status():
601 int(M_CIC2),
602 '''
602 int(M_CIC5),
603 0 : Not configured/running
603 int(M_RCF)
604 3 : Running and acquiring data
604 )
605 2 : Configured
605
606 1 : Connected
606 jars_file = open(os.path.join(folder_name, filter_name), 'wb')
607 '''
607 jars_file.write(filter_text)
608
608 jars_file.close()
609 name = request.args.get('name', None)
609 racp_text += 'JARS_FILTER={}\n'.format(os.path.join(folder_name, filter_name))
610 global EXPNAME
610 racp_text += 'MARK WIDTH=2\n'
611 EXPNAME = name
611 racp_text += 'GENERATE OWN SAMPLING WINDOW=NO\n'
612
612
613 if name is None:
613 if jars['save_data']:
614 return jsonify({
614 racp_text += 'SAVE DATA=YES\n'
615 'status': 1,
615 else:
616 'message': 'JARS Connected, missing experiment'
616 racp_text += 'SAVE DATA=NO\n'
617 })
617
618 else:
618 racp_text += 'RC_STOP_SEQUENCE=255,0\n'
619 racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name))
619 racp_text += 'RC_START_SEQUENCE=255,24\n'
620
620
621 if name and not os.path.exists(racp_file):
621 racp_file.write(racp_text)
622 return jsonify({
622 racp_file.close()
623 'status': 1,
623
624 'message': 'JARS not configured'
624 return 1, racp_file.name
625 })
625
626 elif os.path.exists(racp_file) and hasattr(PROC, 'pid'):
626 @app.route('/status/')
627 if PROC.poll() is None:
627 def status():
628 status = 3
628 '''
629 msg = 'Process: PID={}, OUT={}'.format(
629 0 : Not configured/running
630 PROC.pid,
630 3 : Running and acquiring data
631 OUT.readline()
631 2 : Configured
632 )
632 1 : Connected
633 else:
633 '''
634 status = 2
634
635 msg = 'JARS Configured'
635 name = request.args.get('name', None)
636 else:
636 global EXPNAME
637 status = 2
637 EXPNAME = name
638 msg = 'JARS Configured'
638
639
639 if name is None:
640 return jsonify({
640 return jsonify({
641 'status': status,
641 'status': 1,
642 'message': msg
642 'message': 'JARS Connected, missing experiment'
643 })
643 })
644
644 else:
645 @app.route('/start/', methods=['POST'])
645 racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name))
646 def start():
646
647 '''
647 if name and not os.path.exists(racp_file):
648 '''
648 return jsonify({
649
649 'status': 1,
650 global PROC
650 'message': 'JARS not configured'
651 global OUT
651 })
652 global EXPNAME
652 elif os.path.exists(racp_file) and hasattr(PROC, 'pid'):
653
653 if PROC.poll() is None:
654 name = request.json['name']
654 status = 3
655 EXPNAME = name
655 msg = 'Process: PID={}, OUT={}'.format(
656 racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name))
656 PROC.pid,
657 if hasattr(PROC, 'pid') and PROC.poll() is None:
657 OUT.readline()
658 status = 3
658 )
659 msg = 'JARS already running'
659 else:
660 elif os.path.exists(racp_file):
660 status = 2
661 PROC = Popen([EXE, '-rf', racp_file, OPT], stdout=PIPE)
661 msg = 'JARS Configured'
662 OUT = StdoutReader(PROC.stdout, name)
662 else:
663 status = 3
663 status = 2
664 msg = 'JARS starting ok'
664 msg = 'JARS Configured'
665 elif not os.path.exists(racp_file):
665
666 status = 1
666 return jsonify({
667 msg = 'Experiment: {} not configured'.format(name)
667 'status': status,
668
668 'message': msg
669 return jsonify({
669 })
670 'status': status,
670
671 'message': msg
671 @app.route('/start/', methods=['POST'])
672 })
672 def start():
673
673 '''
674 @app.route('/stop/', methods=['POST'])
674 '''
675 def stop():
675
676 '''
676 global PROC
677 '''
677 global OUT
678
678 global EXPNAME
679 global PROC
679
680
680 name = request.json['name']
681 if hasattr(PROC, 'pid'):
681 EXPNAME = name
682 if PROC.poll() is None:
682 racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name))
683 OUT.save()
683 if hasattr(PROC, 'pid') and PROC.poll() is None:
684 PROC.kill()
684 status = 3
685 status = 2
685 msg = 'JARS already running'
686 msg = 'JARS stopped OK'
686 elif os.path.exists(racp_file):
687 else:
687 PROC = Popen([EXE, '-rf', racp_file, OPT], stdout=PIPE)
688 status = 1
688 OUT = StdoutReader(PROC.stdout, name)
689 msg = 'JARS not running'
689 status = 3
690 else:
690 msg = 'JARS starting ok'
691 status = 1
691 elif not os.path.exists(racp_file):
692 msg = 'JARS not running'
692 status = 1
693
693 msg = 'Experiment: {} not configured'.format(name)
694 return jsonify({
694
695 'status': status,
695 return jsonify({
696 'message': msg
696 'status': status,
697 })
697 'message': msg
698
698 })
699 @app.route('/write/', methods=['POST'])
699
700 def write():
700 @app.route('/stop/', methods=['POST'])
701 '''
701 def stop():
702 '''
702 '''
703 status = 1
703 '''
704 json_data = json.loads(request.json)
704
705 conf_ids = json_data['configurations']['allIds']
705 global PROC
706 for pk in conf_ids:
706
707 if json_data['configurations']['byId'][pk]['device_type'] == 'jars':
707 if hasattr(PROC, 'pid'):
708 data = json_data['configurations']['byId'][pk]['filter_parms']
708 if PROC.poll() is None:
709
709 OUT.save()
710 if request.json:
710 PROC.kill()
711 try:
711 status = 2
712 ret, racp = create_jarsfiles(request.json)
712 msg = 'JARS stopped OK'
713 except Exception as e:
713 else:
714 ret = 0
714 status = 1
715 msg = str(e)
715 msg = 'JARS not running'
716 else:
716 else:
717 msg = 'Missing POST data'
717 status = 1
718
718 msg = 'JARS not running'
719 if ret == 1:
719
720 status = 2
720 return jsonify({
721 msg = 'JARS configured OK'
721 'status': status,
722 else:
722 'message': msg
723 msg = ret
723 })
724
724
725 return jsonify({
725 @app.route('/write/', methods=['POST'])
726 'status': status,
726 def write():
727 'message': msg
727 '''
728 })
728 '''
729
729 status = 1
730
730 json_data = json.loads(request.json)
731 def restart():
731 conf_ids = json_data['configurations']['allIds']
732 '''
732 for pk in conf_ids:
733 '''
733 if json_data['configurations']['byId'][pk]['device_type'] == 'jars':
734
734 data = json_data['configurations']['byId'][pk]['filter_parms']
735 global EXPNAME
735
736 #ip_host = '10.10.10.99'
736 if request.json:
737 port = 5000
737 try:
738 route_stop = 'http://'+IPHOST+':'+str(port)+'/stop/'
738 ret, racp = create_jarsfiles(request.json)
739 stop = requests.post(route_stop, data={})
739 except Exception as e:
740 print 'Restarting...'
740 ret = 0
741 time.sleep(3)
741 msg = str(e)
742 route_start = 'http://'+IPHOST+':'+str(port)+'/start/'
742 else:
743 start = requests.post(route_start, json={'name':EXPNAME})
743 msg = 'Missing POST data'
744
744
745 return
745 if ret == 1:
746
746 status = 2
747 @app.route('/get_log/')
747 msg = 'JARS configured OK'
748 def get_log():
748 else:
749 '''
749 msg = ret
750 This function sends Restarting Report.txt of the Experiment.
750
751 '''
751 return jsonify({
752
752 'status': status,
753 name = request.args.get('name', None)
753 'message': msg
754 global EXPNAME
754 })
755 EXPNAME = name
755
756
756
757 if name is None:
757 def restart():
758 return jsonify({
758 '''
759 'status': 1,
759 '''
760 'message': 'JARS Connected, missing experiment'
760
761 })
761 global EXPNAME
762 else:
762 #ip_host = '10.10.10.99'
763 try:
763 port = 5000
764 rr_file = os.path.join(PATH, name, 'Restarting Report.txt')
764 route_stop = 'http://'+IPHOST+':'+str(port)+'/stop/'
765 return send_file(rr_file, attachment_filename='Restarting Report.txt')
765 stop = requests.post(route_stop, data={})
766 except Exception as e:
766 print 'Restarting...'
767 return jsonify({
767 time.sleep(3)
768 'status': 1,
768 route_start = 'http://'+IPHOST+':'+str(port)+'/start/'
769 'message': str(e)
769 start = requests.post(route_start, json={'name':EXPNAME})
770 })
770
771
771 return
772 if __name__ == '__main__':
772
773 app.run(debug=True, host='0.0.0.0')
773 @app.route('/get_log/')
774 def get_log():
775 '''
776 This function sends Restarting Report.txt of the Experiment.
777 '''
778
779 name = request.args.get('name', None)
780 global EXPNAME
781 EXPNAME = name
782
783 if name is None:
784 return jsonify({
785 'status': 1,
786 'message': 'JARS Connected, missing experiment'
787 })
788 else:
789 try:
790 rr_file = os.path.join(PATH, name, 'Restarting Report.txt')
791 return send_file(rr_file, attachment_filename='Restarting Report.txt')
792 except Exception as e:
793 return jsonify({
794 'status': 1,
795 'message': str(e)
796 })
797
798 if __name__ == '__main__':
799 app.run(debug=True, host='0.0.0.0', port=5000)
General Comments 0
You need to be logged in to leave comments. Login now