This diff has been collapsed as it changes many lines, (1572 lines changed) Show them Hide them | |||
@@ -1,773 +1,799 | |||
|
1 | ''' | |
|
2 | Created on Jan 5, 2016 | |
|
3 | ||
|
4 | @author: Juan C. Espinoza | |
|
5 | ||
|
6 | ''' | |
|
7 | ||
|
8 | import os | |
|
9 | import math | |
|
10 | import json | |
|
11 | import requests | |
|
12 | import time | |
|
13 | ||
|
14 | from threading import Thread | |
|
15 | from subprocess import Popen, PIPE | |
|
16 | from collections import deque | |
|
17 | from datetime import datetime, timedelta | |
|
18 | ||
|
19 | from flask import Flask, jsonify, request, send_file | |
|
20 | ||
|
21 |
PATH = ' |
|
|
22 | EXE = 'C:\JROAdquisicion\src\JROAcquisitionSoftware\Release\JROAcquisitionSoftware.exe' | |
|
23 |
IPHOST='10.10.10. |
|
|
24 | OPT = '--jars' #'--cbsim' | |
|
25 | PROC = False | |
|
26 | OUT = None | |
|
27 | LOGGING = False | |
|
28 | global EXPNAME | |
|
29 | ||
|
30 | DECODE_TYPE = {1:'DECODING_TIME_DOMAIN',2:'DECODING_FREQ_DOMAIN',3:'DECODING_INV_FREQ_DOMAIN'} | |
|
31 | ||
|
32 | app = Flask(__name__) | |
|
33 | ||
|
34 | class StdoutReader(object): | |
|
35 | ''' | |
|
36 | Class to manage stdout of JARS acquisition program | |
|
37 | ''' | |
|
38 | ||
|
39 | def __init__(self, stream, name): | |
|
40 | ''' | |
|
41 | stream: the stream to read from. | |
|
42 | Usually a process' stdout or stderr. | |
|
43 | ''' | |
|
44 | ||
|
45 | self._s = stream | |
|
46 | self._q = deque() | |
|
47 | self._f = open(os.path.join(PATH, name, 'Restarting Report.txt'), 'ab') | |
|
48 | if LOGGING: | |
|
49 | self._l = open(os.path.join(PATH, name, '{}.log'.format(name)), 'ab') | |
|
50 | ||
|
51 | def update_queue(stream, queue): | |
|
52 | ''' | |
|
53 | Collect lines from 'stream' and put them in 'queue'. | |
|
54 | ''' | |
|
55 | ||
|
56 | restart_dict = {} | |
|
57 | restart_num = 0 | |
|
58 | str_format = '%Y-%m-%d %H:%M:%S' | |
|
59 | delta_time = timedelta(0,120,0) | |
|
60 | while True: | |
|
61 | raw = stream.readline() | |
|
62 | line = raw.rstrip() | |
|
63 | now = datetime.now() | |
|
64 | now_str = now.strftime(str_format) | |
|
65 | restart_dict[str(restart_num)] = now_str | |
|
66 | max_num = 13 | |
|
67 | if line: | |
|
68 | queue.append(line) | |
|
69 | if LOGGING: | |
|
70 | self._l.write('{}'.format(raw)) | |
|
71 | print line | |
|
72 |
if ' |
|
|
73 | self._f.write('{} at {}\n'.format(line, | |
|
74 | datetime.now().ctime())) | |
|
75 | ||
|
76 | restart_num = restart_num + 1 | |
|
77 | if restart_num > max_num: | |
|
78 | date1 = datetime.strptime(restart_dict['1'], str_format) | |
|
79 | date2 = datetime.strptime(restart_dict[str(max_num-1)], str_format) | |
|
80 | if (date2 - date1) < delta_time: | |
|
81 | print str(max_num)+' restarts en menos de 2min'#RESTART | |
|
82 | restart_num = 0 | |
|
83 | restart_dict = {} | |
|
84 | restart() | |
|
85 | else: | |
|
86 | restart_num = 0 | |
|
87 | restart_dict = {} | |
|
88 | print 'NO' | |
|
89 | ||
|
90 | ||
|
91 | self._t = Thread(target=update_queue, args=(self._s, self._q)) | |
|
92 | self._t.daemon = True | |
|
93 | self._t.start() | |
|
94 | ||
|
95 | def readline(self): | |
|
96 | ''' | |
|
97 | Return last line output | |
|
98 | ''' | |
|
99 | try: | |
|
100 | line = self._q.pop() | |
|
101 | self._q.clear() | |
|
102 | return line | |
|
103 | except IndexError: | |
|
104 | return None | |
|
105 | ||
|
106 | def save(self): | |
|
107 | ''' | |
|
108 | Save logging files | |
|
109 | ''' | |
|
110 | self._f.close() | |
|
111 | if LOGGING: | |
|
112 | self._l.close() | |
|
113 | ||
|
114 | def parse_line(n, data, lines): | |
|
115 | ||
|
116 | line_text = '' | |
|
117 | line_type = data['lines']['byId'][lines[n]]['line_type'] | |
|
118 | num = n+1 | |
|
119 | if line_type == 'windows': | |
|
120 | if num == 7: | |
|
121 | reference = data['lines']['byId'][lines[n]]['params']['TX_ref'] | |
|
122 | windows = data['lines']['byId'][lines[n]]['params']['params'] | |
|
123 | if windows: | |
|
124 | dh = str(float(windows[0]['resolution'])) | |
|
125 | else: | |
|
126 | dh = '' | |
|
127 | ||
|
128 | line_text = 'Sampling Windows={}\n'.format(len(windows)) | |
|
129 | ||
|
130 | cnt = 0 | |
|
131 | for window in windows: | |
|
132 | line_text += ('H0({cnt})={first_height}\n' | |
|
133 | 'NSA({cnt})={number_of_samples}\n' | |
|
134 | 'DH({cnt})={dh}\n'.format( | |
|
135 | cnt=cnt, | |
|
136 | first_height=window['first_height'], | |
|
137 | number_of_samples=int(window['number_of_samples']), | |
|
138 | dh=dh | |
|
139 | ) | |
|
140 | ) | |
|
141 | cnt += 1 | |
|
142 | ||
|
143 | else: | |
|
144 | reference = data['lines']['byId'][lines[n]]['params']['TX_ref'] | |
|
145 | windows = data['lines']['byId'][lines[n]]['params']['params'] | |
|
146 | if windows: | |
|
147 | dh = str(float(windows[0]['resolution'])) | |
|
148 | else: | |
|
149 | dh = '' | |
|
150 | ||
|
151 | line_text = 'Sampling Windows (Line {})={}\n'.format(num, len(windows)) | |
|
152 | ||
|
153 | cnt = 0 | |
|
154 | for window in windows: | |
|
155 | line_text += ('L{num}_H0({cnt})={first_height}\n' | |
|
156 | 'L{num}_NSA({cnt})={number_of_samples}\n' | |
|
157 | 'L{num}_DH({cnt})={dh}\n'.format( | |
|
158 | num=num, | |
|
159 | cnt=cnt, | |
|
160 | first_height=window['first_height'], | |
|
161 | number_of_samples=int(window['number_of_samples']), | |
|
162 | dh=dh | |
|
163 | ) | |
|
164 | ) | |
|
165 | cnt += 1 | |
|
166 | ||
|
167 | line_text += 'L{}_REFERENCE={}\n'.format( | |
|
168 | num, | |
|
169 | data['lines']['byId'][reference]['name'] | |
|
170 | ) | |
|
171 | ||
|
172 | elif line_type == 'sync': | |
|
173 | line_text = 'Line{}=Synchro\n'.format(num) | |
|
174 | ||
|
175 | elif line_type == 'flip': | |
|
176 | line_text = 'L{}_FLIP={}\n'.format( | |
|
177 | num, | |
|
178 | data['lines']['byId'][lines[n]]['params']['number_of_flips'] | |
|
179 | ) | |
|
180 | ||
|
181 | elif line_type == 'prog_pulses': | |
|
182 | periodic = data['lines']['byId'][lines[n]]['params']['periodic'] | |
|
183 | if periodic == '0': | |
|
184 | periodic = 'NO' | |
|
185 | else: | |
|
186 | periodic = 'YES' | |
|
187 | ||
|
188 | portions = data['lines']['byId'][lines[n]]['params']['params'] | |
|
189 | line_text = 'L{} Number Of Portions={}\n'.format(num, len(portions)) | |
|
190 | ||
|
191 | for i, portion in enumerate(portions): | |
|
192 | line_text += 'PORTION_BEGIN({cnt})={begin}\nPORTION_END({cnt})={end}\n'.format( | |
|
193 | cnt=i, | |
|
194 | begin=int(portion['begin']), | |
|
195 | end=int(portion['end']), | |
|
196 | ) | |
|
197 | ||
|
198 | line_text += 'L{} Portions IPP Periodic={}\n'.format(num, periodic) | |
|
199 | ||
|
200 | elif line_type == 'none': | |
|
201 | line_text = '' | |
|
202 | ||
|
203 | else: | |
|
204 | reference = data['lines']['byId'][lines[n]]['params']['TX_ref'] | |
|
205 | code_type = data['lines']['byId'][lines[n]]['params']['code'] | |
|
206 | codes = data['lines']['byId'][lines[n]]['params']['codes'] | |
|
207 | ||
|
208 | if num == 4: | |
|
209 | line_text = 'Code Type={}\n'.format(code_type) | |
|
210 | line_text += 'Number of Codes={}\nCode Width={}\n'.format( | |
|
211 | len(codes), | |
|
212 | len(codes[0]) | |
|
213 | ) | |
|
214 | cnt = 0 | |
|
215 | for code in codes: | |
|
216 | line_text += 'COD({})={}\n'.format(cnt, code) | |
|
217 | cnt += 1 | |
|
218 | else: | |
|
219 | line_text = 'Code Type (Line {})={}\n'.format(num, code_type) | |
|
220 | line_text += 'Number of Codes (Line {})={}\nCode Width (Line {})={}\n'.format( | |
|
221 | num, | |
|
222 | len(codes), | |
|
223 | num, | |
|
224 | len(codes[0]) | |
|
225 | ) | |
|
226 | cnt = 0 | |
|
227 | for code in codes: | |
|
228 | line_text += 'L{}_COD({})={}\n'.format(num,cnt, code) | |
|
229 | cnt += 1 | |
|
230 | ||
|
231 | line_text += 'L{}_REFERENCE={}\n'.format( | |
|
232 | num, | |
|
233 | data['lines']['byId'][reference]['name'] | |
|
234 | ) | |
|
235 | ||
|
236 | return line_text | |
|
237 | ||
|
238 | def create_jarsfiles(json_data): | |
|
239 | """ | |
|
240 | Function to create *.racp and *.jars files with json_data | |
|
241 | """ | |
|
242 | global EXPNAME | |
|
243 | ||
|
244 | data = json.loads(json_data) | |
|
245 | exp_id = data['experiments']['allIds'][0] | |
|
246 | experiment = data['experiments']['byId'][exp_id] | |
|
247 | name = experiment['name'] | |
|
248 | EXPNAME = name | |
|
249 | folder_name = os.path.join(PATH, name) | |
|
250 | print 'Experiment: ' + name + ' received...' | |
|
251 | if not os.path.exists(folder_name): | |
|
252 | os.makedirs(folder_name) | |
|
253 | if not os.path.exists(folder_name+'/DATA'): | |
|
254 | os.mkdir(folder_name+'/DATA') | |
|
255 | ||
|
256 |
|
|
|
257 | json_file = open(folder_name+'/'+name+'_jars.json', 'w') | |
|
258 | except: | |
|
259 | return 0, 'Error creating .json file' | |
|
260 | ||
|
261 | json_file.write(json_data) | |
|
262 | json_file.close() | |
|
263 | ||
|
264 | try: | |
|
265 | racp_file = open(folder_name+'/'+name+'_jars.racp', 'w') | |
|
266 | except: | |
|
267 | return 0, 'Error creating .racp file' | |
|
268 | ||
|
269 | conf_ids = data['configurations']['allIds'] | |
|
270 | ||
|
271 | rcs = [pk for pk in conf_ids \ | |
|
272 | if data['configurations']['byId'][pk]['device_type'] == 'rc'] | |
|
273 | if len(rcs) == 1: | |
|
274 | rc_id = rcs[0] | |
|
275 | rc_mix_id = 0 | |
|
276 | else: | |
|
277 | rcs = [pk for pk in conf_ids \ | |
|
278 | if data['configurations']['byId'][pk]['device_type'] == 'rc' and data['configurations']['byId'][pk]['mix'] == True] | |
|
279 | rc_mix_id = rcs[0] | |
|
280 | mix_parameters = data['configurations']['byId'][rc_mix_id]['parameters'].split('-') | |
|
281 | rc_id = mix_parameters[0].split('|')[0] | |
|
282 | ||
|
283 | jars_id = [pk for pk in conf_ids \ | |
|
284 | if data['configurations']['byId'][pk]['device_type'] == 'jars'][0] | |
|
285 | ||
|
286 | rc = data['configurations']['byId'][rc_id] | |
|
287 | jars = data['configurations']['byId'][jars_id] | |
|
288 | ||
|
289 | if rc_mix_id <> 0: | |
|
290 | rc_mix = data['configurations']['byId'][rc_mix_id] | |
|
291 | mix_text = '*******Mixed Experiment*******************\n' | |
|
292 | mix_text += 'Number of Experiments={}\n'.format(len(mix_parameters)) | |
|
293 | for i,param in enumerate(mix_parameters): | |
|
294 | pk, mode, op, delay, mask = param.split('|') | |
|
295 | mix_text += 'EXP({})={}\n'.format(i, data['configurations']['byId'][pk]['name']) | |
|
296 | mix_text += 'DELAY({})={}\n'.format(i, delay) | |
|
297 | mix_text += 'RELOJ={}\n'.format(int(data['configurations']['byId'][pk]['clock'])) | |
|
298 | mix_text += 'MIXER MODE={}_FLAG\n'.format(op) | |
|
299 | mix_text += 'MIXER MASK={}\n'.format(mask) | |
|
300 | mix_text += '*******System parameters******************\n' | |
|
301 | else: | |
|
302 | mix_text = '' | |
|
303 | ||
|
304 | exp_type = jars['exp_type'] | |
|
305 | if exp_type == 0: | |
|
306 | exp_type = 'EXP_RAW_DATA' | |
|
307 | else: | |
|
308 | exp_type = 'EXP_PROCESS_SPECTRA' | |
|
309 | ||
|
310 | racp_text = 'EXPERIMENT TYPE={}\nEXPERIMENT NAME={}\nHEADER VERSION=1103\n'.format( | |
|
311 | exp_type, | |
|
312 | name | |
|
313 | ) | |
|
314 | ||
|
315 | racp_text += '*****Radar Controller Parameters**********\n{}'.format(mix_text) | |
|
316 | if rc_mix_id == 0: | |
|
317 | racp_text += 'IPP={}\n'.format(float(rc['ipp'])) | |
|
318 | racp_text += 'NTX={}\n'.format(rc['ntx']) | |
|
319 | else: | |
|
320 | racp_text += 'IPP={}\n'.format(float(rc_mix['ipp'])) | |
|
321 | racp_text += 'NTX={}\n'.format(rc_mix['ntx']) | |
|
322 | ||
|
323 | racp_text += 'TXA={}\n'.format( | |
|
324 | data['lines']['byId'][rc['lines'][1]]['params']['pulse_width'] | |
|
325 | ) | |
|
326 | if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx': | |
|
327 | racp_text += 'TXB={}\n'.format( | |
|
328 | data['lines']['byId'][rc['lines'][2]]['params']['pulse_width'] | |
|
329 | ) | |
|
330 | idTR = data['lines']['byId'][rc['lines'][0]]['params']['TX_ref'] | |
|
331 | rangeTR = data['lines']['byId'][rc['lines'][0]]['params']['range'] | |
|
332 | ||
|
333 | if rangeTR != '0': | |
|
334 | racp_text += 'Pulse selection_TR={}\n'.format(rangeTR) | |
|
335 | elif idTR != '0': | |
|
336 | racp_text += 'Pulse selection_TR={}\n'.format( | |
|
337 | data['lines']['byId'][idTR]['name'][-1] | |
|
338 | ) | |
|
339 | rangeTXA = data['lines']['byId'][rc['lines'][1]]['params']['range'] | |
|
340 | if rangeTXA != '0': | |
|
341 | racp_text += 'Pulse selection_TXA={}\n'.format(rangeTXA) | |
|
342 | if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx': | |
|
343 | rangeTXB = data['lines']['byId'][rc['lines'][2]]['params']['range'] | |
|
344 | if rangeTXB != '0': | |
|
345 | racp_text += 'Pulse selection_TXB={}\n'.format(rangeTXB) | |
|
346 | for n in range(3, 6): | |
|
347 | racp_text += parse_line(n, data, rc['lines']) | |
|
348 | ||
|
349 | if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx': | |
|
350 | taus = data['lines']['byId'][rc['lines'][2]]['params']['delays'].split(',') | |
|
351 | if taus != '0': | |
|
352 | racp_text += 'Number of Taus={}\n'.format(len(taus)) | |
|
353 | for n, tau in enumerate(taus): | |
|
354 | racp_text += 'TAU({})={}\n'.format(n, tau) | |
|
355 | ||
|
356 | racp_text += parse_line(6, data, rc['lines']) | |
|
357 | racp_text += 'SAMPLING REFERENCE=MIDDLE OF FIRST SUB-BAUD\n' | |
|
358 | racp_text += 'RELOJ={}\n'.format(int(rc['clock'])) | |
|
359 | racp_text += 'CLOCK DIVIDER={}\n'.format(int(rc['clock_divider'])) | |
|
360 | racp_text += 'TR_BEFORE={}\n'.format(rc['time_before']) | |
|
361 | racp_text += 'TR_AFTER={}\n'.format(rc['time_after']) | |
|
362 | racp_text += 'WINDOW IN LINE 5&6=NO\n' | |
|
363 | racp_text += '******System Parameters*******************\n' | |
|
364 | racp_text += 'Number of Cards={}\n'.format(jars['cards_number']) | |
|
365 | ||
|
366 | for i in range(jars['cards_number']): | |
|
367 | racp_text += 'Card({})={}\n'.format(i, i) | |
|
368 | ||
|
369 | channels = jars['channels'].split(',') | |
|
370 | ||
|
371 | if channels: | |
|
372 | racp_text += 'Number of Channels={}\n'.format(len(channels)) | |
|
373 | for i, channel in enumerate(channels): | |
|
374 | racp_text += 'Channel({})={}\n'.format(i, channel) | |
|
375 | ||
|
376 | if exp_type == 'EXP_RAW_DATA': | |
|
377 | racp_text += 'RAW DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA')) | |
|
378 | else: | |
|
379 | racp_text += 'PROCESS DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA')) | |
|
380 | ||
|
381 | if jars['create_directory']: | |
|
382 | racp_text += 'CREATE DIRECTORY PER DAY=YES'+'\n' | |
|
383 | else: | |
|
384 | racp_text += 'CREATE DIRECTORY PER DAY=NO'+'\n' | |
|
385 | ||
|
386 | if jars['include_expname']: | |
|
387 | racp_text += 'INCLUDE EXPNAME IN DIRECTORY=YES'+'\n' | |
|
388 | else: | |
|
389 | racp_text += 'INCLUDE EXPNAME IN DIRECTORY=NO'+'\n' | |
|
390 | ||
|
391 | racp_text += '******System Parameters*******************\n' | |
|
392 | racp_text += 'ADC Resolution=8\n' | |
|
393 | racp_text += 'PCI DIO BusWidth=32\n' | |
|
394 | ||
|
395 | if exp_type == 'EXP_RAW_DATA': | |
|
396 | racp_text += 'RAW DATA BLOCKS={}\n'.format(jars['raw_data_blocks']) | |
|
397 | spectra_text = '' | |
|
398 | else: | |
|
399 |
racp_text += 'PROCESS DATA BLOCKS= |
|
|
400 | spectra_text = '------------------------------------------\n' | |
|
401 | ||
|
402 | if jars['fftpoints'] > 1: | |
|
403 | spectra_text += 'FFTPOINTS={}\n'.format(jars['fftpoints']) | |
|
404 | ||
|
405 | if jars['incohe_integr']: | |
|
406 | spectra_text += 'INCOHERENT INTEGRATIONS={}\n'.format(jars['incohe_integr']) | |
|
407 | ||
|
408 | if jars['save_ch_dc']: | |
|
409 | spectra_text += 'SAVE CHANNELS DC=YES\n' | |
|
410 | ||
|
411 | dum = jars['spectral'] | |
|
412 | ||
|
413 | if dum.endswith(','): | |
|
414 | dum = dum[:-1] | |
|
415 | spectral = json.loads('[{}]'.format(dum)) | |
|
416 | ||
|
417 | if spectral: | |
|
418 | spectra_text += '------------------------------------------\n' | |
|
419 | spectra_text += 'TOTAL SPECTRAL COMBINATIONS={}\n'.format(len(spectral)) | |
|
420 | for i, spc in enumerate(spectral): | |
|
421 | spectra_text += 'SPEC_COMB({})={},{}\n'.format(i, *spc) | |
|
422 | ||
|
423 | racp_text += '******Process Parameters******************\n' | |
|
424 | ||
|
425 | data_type = jars['data_type'] | |
|
426 | ||
|
427 | if data_type == 0: | |
|
428 | racp_text += 'DATATYPE=SHORT\n' | |
|
429 | elif data_type == 1: | |
|
430 | racp_text += 'DATATYPE=FLOAT\n' | |
|
431 | ||
|
432 | racp_text += 'DATA ARRANGE=CONTIGUOUS_CH\n' | |
|
433 | ||
|
434 | if jars['cohe_integr'] > 1: | |
|
435 | racp_text += 'COHERENT INTEGRATIONS={}\n'.format(jars['cohe_integr']) | |
|
436 | ||
|
437 | decode_text = '' | |
|
438 | decode_data = jars['decode_data'] | |
|
439 | if decode_data !=0: | |
|
440 | decode_text = 'DECODE DATA=YES\n' | |
|
441 | decode_text += 'DECODING TYPE={}\n'.format(DECODE_TYPE[decode_data]) | |
|
442 | if jars['post_coh_int'] == True: | |
|
443 | decode_text += 'POST COHERENT INTEGRATIONS=YES\n' | |
|
444 | decode_text += '------------------------------------------\n' | |
|
445 | ||
|
446 | racp_text += 'COHERENT INTEGRATION STRIDE={}\n'.format(jars['cohe_integr_str']) | |
|
447 | racp_text += '------------------------------------------\n' | |
|
448 | racp_text += 'ACQUIRED PROFILES={}\n'.format(jars['acq_profiles']) | |
|
449 | racp_text += 'PROFILES PER BLOCK={}\n'.format(jars['profiles_block']) | |
|
450 | racp_text += spectra_text | |
|
451 | racp_text += '------------------------------------------\n' | |
|
452 | racp_text += decode_text | |
|
453 | racp_text += 'BEGIN ON START=NO\n' | |
|
454 | racp_text += 'BEGIN_TIME={}\n'.format(experiment['start_time'][:-3]) | |
|
455 | racp_text += 'END_TIME={}\n'.format(experiment['end_time'][:-3]) | |
|
456 | racp_text += 'GENERATE ACQUISITION LINK=YES\n' | |
|
457 | racp_text += 'VIEW RAW DATA=YES\n' | |
|
458 | racp_text += 'REFRESH RATE=1\n' | |
|
459 | racp_text += '------------------------------------------\n' | |
|
460 | racp_text += 'SEND STATUS TO FTP=YES\n' | |
|
461 | racp_text += 'FTP SERVER=jro.igp.gob.pe\n' | |
|
462 | racp_text += 'FTP USER=wmaster\n' | |
|
463 | racp_text += 'FTP PASSWD=PKQLX20\n' | |
|
464 | racp_text += 'FTP DIR=/users/database/on-line/\n' | |
|
465 | racp_text += 'FTP FILE=status.txt\n' | |
|
466 | racp_text += 'FTP INTERVAL={}\n'.format(jars['ftp_interval']) | |
|
467 | racp_text += 'SAVE STATUS AND BLOCK=YES\n' | |
|
468 | racp_text += 'GENERATE RTI=YES\n' | |
|
469 | racp_text += 'RTI Inc.Int.=1\n' | |
|
470 | racp_text += 'SEND RTI AND BLOCK=YES\n' | |
|
471 | racp_text += '------------------------------------------\n' | |
|
472 | racp_text += 'COMPORT CONFIG=Com1 CBR_9600 TWOSTOPBITS NOPARITY\n' | |
|
473 | racp_text += 'JAM CONFIGURE FILE=dmasg_pprofiles_pch_64_pdigi_6clk.jam\n' | |
|
474 | racp_text += 'ACQUISITION SYSTEM=JARS\n' | |
|
475 | racp_text += '************JARS CONFIGURATION PARAMETERS************\n' | |
|
476 | ||
|
477 | #-------------------------JARS FILTER--------------------------------------- | |
|
478 | filter_parms = jars['filter_parms'] | |
|
479 | if filter_parms.__class__.__name__ == 'unicode': | |
|
480 | filter_parms = eval(filter_parms) | |
|
481 | elif filter_parms.__class__.__name__ == 'str': | |
|
482 | filter_parms = eval(filter_parms) | |
|
483 | if filter_parms.__class__.__name__ == 'str': | |
|
484 | filter_parms = eval(filter_parms) | |
|
485 | try: | |
|
486 | fclock = float(filter_parms['clock']) | |
|
487 |
fc |
|
|
488 |
|
|
|
489 |
|
|
|
490 |
M_CIC |
|
|
491 |
M_ |
|
|
492 | except: | |
|
493 | fclock = eval(filter_parms['clock']) | |
|
494 |
fc |
|
|
495 |
|
|
|
496 |
|
|
|
497 |
M_CIC |
|
|
498 |
M_ |
|
|
499 | ||
|
500 | filter_text = 'Loading\n' | |
|
501 | filter_text += 'Impulse file found -> C:\jars\F1MHZ_8_MATCH.imp\n' | |
|
502 | filter_text += 'Autoscale off\n' | |
|
503 |
filter_text += ' |
|
|
504 |
filter_text += ' |
|
|
505 |
filter_text += ' |
|
|
506 |
filter_text += '30 |
|
|
507 |
filter_text += '30 |
|
|
508 | ||
|
509 | if abs(fch) < (fclock/2): | |
|
510 | nco = (2**32)*((fch/fclock))#%1) | |
|
511 | nco_i = long(nco) | |
|
512 | else: | |
|
513 | nco = (2**32)*(fclock-fch)/(fclock) | |
|
514 | nco_i = long(nco) | |
|
515 | ||
|
516 | filter_text += '303h -> {}\n'.format(nco_i) | |
|
517 |
filter_text += '30 |
|
|
518 | ||
|
519 | input_level = 1 | |
|
520 | S_CIC2 = math.ceil(math.log((M_CIC2**2)*input_level)/math.log(2)) | |
|
521 | if S_CIC2 < 0: | |
|
522 |
|
|
|
523 |
|
|
|
524 |
|
|
|
525 | ||
|
526 | filter_text += '305h -> {}\n'.format(int(S_CIC2)) | |
|
527 | filter_text += '306h -> {}\n'.format(int(M_CIC2-1)) | |
|
528 | ||
|
529 | OL_CIC2 = input_level/(2.0**S_CIC2) | |
|
530 | ||
|
531 | S_CIC5 = math.ceil(math.log((M_CIC5**5)*OL_CIC2)/math.log(2))-5 | |
|
532 | if S_CIC5 < 0: | |
|
533 | S_CIC5 = 0 | |
|
534 | if S_CIC5 > 7: | |
|
535 |
|
|
|
536 | ||
|
537 | OL_CIC5 = ((M_CIC5**5)/(2**(S_CIC5+5)))*OL_CIC2 | |
|
538 | ||
|
539 | filter_text += '307h -> {}\n'.format(int(S_CIC5)) | |
|
540 | filter_text += '308h -> {}\n'.format(int(M_CIC5-1)) | |
|
541 | ||
|
542 | Gain = 1 | |
|
543 | S_RCF = int(4.0-math.log(Gain)/math.log(2)) | |
|
544 | if S_RCF < 0: | |
|
545 | S_RCF = 0 | |
|
546 | if S_RCF > 7: | |
|
547 | S_RCF = 7 | |
|
548 | ||
|
549 | filter_text += '309h -> {}\n'.format(S_RCF) | |
|
550 | filter_text += '30Ah -> {}\n'.format(int(M_RCF-1)) | |
|
551 | ||
|
552 | Offset = 0 | |
|
553 | filter_text += '30Bh -> {}\n'.format(Offset) | |
|
554 | ||
|
555 | ntaps = int(M_RCF) | |
|
556 |
filter_text += '30 |
|
|
557 | filter_text += '30Dh -> 0\n' | |
|
558 | ||
|
559 | fsamp = fclock/(M_CIC2*M_CIC5*M_RCF) | |
|
560 | ||
|
561 | tap = int(2.0*((2**19)-1)/(ntaps*OL_CIC5)) | |
|
562 | for p in range(0, ntaps): | |
|
563 | filter_text += ' {} -> {}\n'.format(p, int(math.ceil(tap)))#filter_text += ' {} -> {}\n'.format(p, int(math.ceil(hn))) | |
|
564 | ||
|
565 |
filter_text += ' |
|
|
566 | filter_text += 'Chip Restarted:\n' | |
|
567 | filter_text += '300h -> 1\n' | |
|
568 | filter_text += '300h -> 0' | |
|
569 | ||
|
570 | filter_name = '{}_{}MHz_clock{}MHz_F{}MHz_{}_{}_{}.jars'.format( | |
|
571 | abs(fch), | |
|
572 | int((abs(fch)-abs(int(fch)))*1000), | |
|
573 | fclock, | |
|
574 | round(fsamp,3), | |
|
575 | M_CIC2, | |
|
576 | M_CIC5, | |
|
577 | M_RCF | |
|
578 | ) | |
|
579 | ||
|
580 | jars_file = open(os.path.join(folder_name, filter_name), 'wb') | |
|
581 | jars_file.write(filter_text) | |
|
582 | jars_file.close() | |
|
583 | racp_text += 'JARS_FILTER={}\n'.format(os.path.join(folder_name, filter_name)) | |
|
584 | racp_text += 'MARK WIDTH=2\n' | |
|
585 | racp_text += 'GENERATE OWN SAMPLING WINDOW=NO\n' | |
|
586 | ||
|
587 | if jars['save_data']: | |
|
588 | racp_text += 'SAVE DATA=YES\n' | |
|
589 | else: | |
|
590 | racp_text += 'SAVE DATA=NO\n' | |
|
591 | ||
|
592 | racp_text += 'RC_STOP_SEQUENCE=255,0\n' | |
|
593 | racp_text += 'RC_START_SEQUENCE=255,24\n' | |
|
594 | ||
|
595 | racp_file.write(racp_text) | |
|
596 | racp_file.close() | |
|
597 | ||
|
598 | return 1, racp_file.name | |
|
599 | ||
|
600 | @app.route('/status/') | |
|
601 | def status(): | |
|
602 | ''' | |
|
603 | 0 : Not configured/running | |
|
604 | 3 : Running and acquiring data | |
|
605 | 2 : Configured | |
|
606 | 1 : Connected | |
|
607 | ''' | |
|
608 | ||
|
609 | name = request.args.get('name', None) | |
|
610 | global EXPNAME | |
|
611 | EXPNAME = name | |
|
612 | ||
|
613 | if name is None: | |
|
614 | return jsonify({ | |
|
615 | 'status': 1, | |
|
616 | 'message': 'JARS Connected, missing experiment' | |
|
617 | }) | |
|
618 | else: | |
|
619 | racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name)) | |
|
620 | ||
|
621 | if name and not os.path.exists(racp_file): | |
|
622 | return jsonify({ | |
|
623 | 'status': 1, | |
|
624 | 'message': 'JARS not configured' | |
|
625 | }) | |
|
626 | elif os.path.exists(racp_file) and hasattr(PROC, 'pid'): | |
|
627 | if PROC.poll() is None: | |
|
628 | status = 3 | |
|
629 | msg = 'Process: PID={}, OUT={}'.format( | |
|
630 | PROC.pid, | |
|
631 | OUT.readline() | |
|
632 | ) | |
|
633 | else: | |
|
634 | status = 2 | |
|
635 | msg = 'JARS Configured' | |
|
636 | else: | |
|
637 | status = 2 | |
|
638 | msg = 'JARS Configured' | |
|
639 | ||
|
640 | return jsonify({ | |
|
641 |
'status': |
|
|
642 | 'message': msg | |
|
643 | }) | |
|
644 | ||
|
645 | @app.route('/start/', methods=['POST']) | |
|
646 | def start(): | |
|
647 | ''' | |
|
648 | ''' | |
|
649 | ||
|
650 | global PROC | |
|
651 | global OUT | |
|
652 | global EXPNAME | |
|
653 | ||
|
654 | name = request.json['name'] | |
|
655 | EXPNAME = name | |
|
656 | racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name)) | |
|
657 | if hasattr(PROC, 'pid') and PROC.poll() is None: | |
|
658 |
|
|
|
659 | msg = 'JARS already running' | |
|
660 | elif os.path.exists(racp_file): | |
|
661 | PROC = Popen([EXE, '-rf', racp_file, OPT], stdout=PIPE) | |
|
662 | OUT = StdoutReader(PROC.stdout, name) | |
|
663 |
status = |
|
|
664 |
msg = 'JARS |
|
|
665 | elif not os.path.exists(racp_file): | |
|
666 | status = 1 | |
|
667 | msg = 'Experiment: {} not configured'.format(name) | |
|
668 | ||
|
669 | return jsonify({ | |
|
670 | 'status': status, | |
|
671 | 'message': msg | |
|
672 | }) | |
|
673 | ||
|
674 | @app.route('/stop/', methods=['POST']) | |
|
675 | def stop(): | |
|
676 | ''' | |
|
677 | ''' | |
|
678 | ||
|
679 | global PROC | |
|
680 | ||
|
681 | if hasattr(PROC, 'pid'): | |
|
682 | if PROC.poll() is None: | |
|
683 | OUT.save() | |
|
684 | PROC.kill() | |
|
685 | status = 2 | |
|
686 | msg = 'JARS stopped OK' | |
|
687 | else: | |
|
688 | status = 1 | |
|
689 | msg = 'JARS not running' | |
|
690 | else: | |
|
691 | status = 1 | |
|
692 | msg = 'JARS not running' | |
|
693 | ||
|
694 | return jsonify({ | |
|
695 | 'status': status, | |
|
696 |
' |
|
|
697 | }) | |
|
698 | ||
|
699 | @app.route('/write/', methods=['POST']) | |
|
700 | def write(): | |
|
701 | ''' | |
|
702 |
|
|
|
703 | status = 1 | |
|
704 | json_data = json.loads(request.json) | |
|
705 | conf_ids = json_data['configurations']['allIds'] | |
|
706 | for pk in conf_ids: | |
|
707 | if json_data['configurations']['byId'][pk]['device_type'] == 'jars': | |
|
708 | data = json_data['configurations']['byId'][pk]['filter_parms'] | |
|
709 | ||
|
710 | if request.json: | |
|
711 | try: | |
|
712 | ret, racp = create_jarsfiles(request.json) | |
|
713 | except Exception as e: | |
|
714 |
|
|
|
715 |
msg = |
|
|
716 | else: | |
|
717 | msg = 'Missing POST data' | |
|
718 | ||
|
719 | if ret == 1: | |
|
720 | status = 2 | |
|
721 | msg = 'JARS configured OK' | |
|
722 | else: | |
|
723 | msg = ret | |
|
724 | ||
|
725 | return jsonify({ | |
|
726 | 'status': status, | |
|
727 | 'message': msg | |
|
728 |
|
|
|
729 | ||
|
730 | ||
|
731 | def restart(): | |
|
732 | ''' | |
|
733 | ''' | |
|
734 | ||
|
735 | global EXPNAME | |
|
736 | #ip_host = '10.10.10.99' | |
|
737 | port = 5000 | |
|
738 | route_stop = 'http://'+IPHOST+':'+str(port)+'/stop/' | |
|
739 | stop = requests.post(route_stop, data={}) | |
|
740 | print 'Restarting...' | |
|
741 | time.sleep(3) | |
|
742 | route_start = 'http://'+IPHOST+':'+str(port)+'/start/' | |
|
743 | start = requests.post(route_start, json={'name':EXPNAME}) | |
|
744 | ||
|
745 | return | |
|
746 | ||
|
747 | @app.route('/get_log/') | |
|
748 | def get_log(): | |
|
749 | ''' | |
|
750 | This function sends Restarting Report.txt of the Experiment. | |
|
751 | ''' | |
|
752 | ||
|
753 | name = request.args.get('name', None) | |
|
754 | global EXPNAME | |
|
755 | EXPNAME = name | |
|
756 | ||
|
757 | if name is None: | |
|
758 | return jsonify({ | |
|
759 | 'status': 1, | |
|
760 | 'message': 'JARS Connected, missing experiment' | |
|
761 | }) | |
|
762 | else: | |
|
763 | try: | |
|
764 | rr_file = os.path.join(PATH, name, 'Restarting Report.txt') | |
|
765 | return send_file(rr_file, attachment_filename='Restarting Report.txt') | |
|
766 | except Exception as e: | |
|
767 | return jsonify({ | |
|
768 | 'status': 1, | |
|
769 | 'message': str(e) | |
|
770 | }) | |
|
771 | ||
|
772 | if __name__ == '__main__': | |
|
773 | app.run(debug=True, host='0.0.0.0') | |
|
1 | ''' | |
|
2 | Created on Jan 5, 2016 | |
|
3 | Modified on Jan 24, 2019 | |
|
4 | @authors: Juan C. Espinoza, Fiorella Quino, John Rojas | |
|
5 | ||
|
6 | ''' | |
|
7 | ||
|
8 | import os | |
|
9 | import math | |
|
10 | import json | |
|
11 | import requests | |
|
12 | import time | |
|
13 | ||
|
14 | from threading import Thread | |
|
15 | from subprocess import Popen, PIPE | |
|
16 | from collections import deque | |
|
17 | from datetime import datetime, timedelta | |
|
18 | ||
|
19 | from flask import Flask, jsonify, request, send_file | |
|
20 | ||
|
21 | PATH = 'G:\SIR_DATA' | |
|
22 | EXE = 'C:\JROAdquisicion\src\JROAcquisitionSoftware\Release\JROAcquisitionSoftware.exe' | |
|
23 | IPHOST='10.10.10.X' | |
|
24 | OPT = '--jars' #'--cbsim' | |
|
25 | PROC = False | |
|
26 | OUT = None | |
|
27 | LOGGING = False | |
|
28 | global EXPNAME | |
|
29 | ||
|
30 | DECODE_TYPE = {1:'DECODING_TIME_DOMAIN',2:'DECODING_FREQ_DOMAIN',3:'DECODING_INV_FREQ_DOMAIN'} | |
|
31 | ||
|
32 | app = Flask(__name__) | |
|
33 | ||
|
34 | class StdoutReader(object): | |
|
35 | ''' | |
|
36 | Class to manage stdout of JARS acquisition program | |
|
37 | ''' | |
|
38 | ||
|
39 | def __init__(self, stream, name): | |
|
40 | ''' | |
|
41 | stream: the stream to read from. | |
|
42 | Usually a process' stdout or stderr. | |
|
43 | ''' | |
|
44 | ||
|
45 | self._s = stream | |
|
46 | self._q = deque() | |
|
47 | self._f = open(os.path.join(PATH, name, 'Restarting Report.txt'), 'ab') | |
|
48 | if LOGGING: | |
|
49 | self._l = open(os.path.join(PATH, name, '{}.log'.format(name)), 'ab') | |
|
50 | ||
|
51 | def update_queue(stream, queue): | |
|
52 | ''' | |
|
53 | Collect lines from 'stream' and put them in 'queue'. | |
|
54 | ''' | |
|
55 | ||
|
56 | restart_dict = {} | |
|
57 | restart_num = 0 | |
|
58 | str_format = '%Y-%m-%d %H:%M:%S' | |
|
59 | delta_time = timedelta(0,120,0) | |
|
60 | while True: | |
|
61 | raw = stream.readline() | |
|
62 | line = raw.rstrip() | |
|
63 | now = datetime.now() | |
|
64 | now_str = now.strftime(str_format) | |
|
65 | restart_dict[str(restart_num)] = now_str | |
|
66 | max_num = 13 | |
|
67 | if line: | |
|
68 | queue.append(line) | |
|
69 | if LOGGING: | |
|
70 | self._l.write('{}'.format(raw)) | |
|
71 | print line | |
|
72 | if 'Error... restarting' in line or 'Trying restart' in line: | |
|
73 | self._f.write('{} at {}\n'.format(line, | |
|
74 | datetime.now().ctime())) | |
|
75 | ||
|
76 | restart_num = restart_num + 1 | |
|
77 | if restart_num > max_num: | |
|
78 | date1 = datetime.strptime(restart_dict['1'], str_format) | |
|
79 | date2 = datetime.strptime(restart_dict[str(max_num-1)], str_format) | |
|
80 | if (date2 - date1) < delta_time: | |
|
81 | print str(max_num)+' restarts en menos de 2min'#RESTART | |
|
82 | restart_num = 0 | |
|
83 | restart_dict = {} | |
|
84 | restart() | |
|
85 | else: | |
|
86 | restart_num = 0 | |
|
87 | restart_dict = {} | |
|
88 | print 'NO' | |
|
89 | ||
|
90 | ||
|
91 | self._t = Thread(target=update_queue, args=(self._s, self._q)) | |
|
92 | self._t.daemon = True | |
|
93 | self._t.start() | |
|
94 | ||
|
95 | def readline(self): | |
|
96 | ''' | |
|
97 | Return last line output | |
|
98 | ''' | |
|
99 | try: | |
|
100 | line = self._q.pop() | |
|
101 | self._q.clear() | |
|
102 | return line | |
|
103 | except IndexError: | |
|
104 | return None | |
|
105 | ||
|
106 | def save(self): | |
|
107 | ''' | |
|
108 | Save logging files | |
|
109 | ''' | |
|
110 | self._f.close() | |
|
111 | if LOGGING: | |
|
112 | self._l.close() | |
|
113 | ||
|
114 | def parse_line(n, data, lines): | |
|
115 | ||
|
116 | line_text = '' | |
|
117 | line_type = data['lines']['byId'][lines[n]]['line_type'] | |
|
118 | num = n+1 | |
|
119 | if line_type == 'windows': | |
|
120 | if num == 7: | |
|
121 | reference = data['lines']['byId'][lines[n]]['params']['TX_ref'] | |
|
122 | windows = data['lines']['byId'][lines[n]]['params']['params'] | |
|
123 | if windows: | |
|
124 | dh = str(float(windows[0]['resolution'])) | |
|
125 | else: | |
|
126 | dh = '' | |
|
127 | ||
|
128 | line_text = 'Sampling Windows={}\n'.format(len(windows)) | |
|
129 | ||
|
130 | cnt = 0 | |
|
131 | for window in windows: | |
|
132 | line_text += ('H0({cnt})={first_height}\n' | |
|
133 | 'NSA({cnt})={number_of_samples}\n' | |
|
134 | 'DH({cnt})={dh}\n'.format( | |
|
135 | cnt=cnt, | |
|
136 | first_height=window['first_height'], | |
|
137 | number_of_samples=int(window['number_of_samples']), | |
|
138 | dh=dh | |
|
139 | ) | |
|
140 | ) | |
|
141 | cnt += 1 | |
|
142 | ||
|
143 | else: | |
|
144 | reference = data['lines']['byId'][lines[n]]['params']['TX_ref'] | |
|
145 | windows = data['lines']['byId'][lines[n]]['params']['params'] | |
|
146 | if windows: | |
|
147 | dh = str(float(windows[0]['resolution'])) | |
|
148 | else: | |
|
149 | dh = '' | |
|
150 | ||
|
151 | line_text = 'Sampling Windows (Line {})={}\n'.format(num, len(windows)) | |
|
152 | ||
|
153 | cnt = 0 | |
|
154 | for window in windows: | |
|
155 | line_text += ('L{num}_H0({cnt})={first_height}\n' | |
|
156 | 'L{num}_NSA({cnt})={number_of_samples}\n' | |
|
157 | 'L{num}_DH({cnt})={dh}\n'.format( | |
|
158 | num=num, | |
|
159 | cnt=cnt, | |
|
160 | first_height=window['first_height'], | |
|
161 | number_of_samples=int(window['number_of_samples']), | |
|
162 | dh=dh | |
|
163 | ) | |
|
164 | ) | |
|
165 | cnt += 1 | |
|
166 | ||
|
167 | line_text += 'L{}_REFERENCE={}\n'.format( | |
|
168 | num, | |
|
169 | data['lines']['byId'][reference]['name'] | |
|
170 | ) | |
|
171 | ||
|
172 | elif line_type == 'sync': | |
|
173 | line_text = 'Line{}=Synchro\n'.format(num) | |
|
174 | ||
|
175 | elif line_type == 'flip': | |
|
176 | line_text = 'L{}_FLIP={}\n'.format( | |
|
177 | num, | |
|
178 | data['lines']['byId'][lines[n]]['params']['number_of_flips'] | |
|
179 | ) | |
|
180 | ||
|
181 | elif line_type == 'prog_pulses': | |
|
182 | periodic = data['lines']['byId'][lines[n]]['params']['periodic'] | |
|
183 | if periodic == '0': | |
|
184 | periodic = 'NO' | |
|
185 | else: | |
|
186 | periodic = 'YES' | |
|
187 | ||
|
188 | portions = data['lines']['byId'][lines[n]]['params']['params'] | |
|
189 | line_text = 'L{} Number Of Portions={}\n'.format(num, len(portions)) | |
|
190 | ||
|
191 | for i, portion in enumerate(portions): | |
|
192 | line_text += 'PORTION_BEGIN({cnt})={begin}\nPORTION_END({cnt})={end}\n'.format( | |
|
193 | cnt=i, | |
|
194 | begin=int(portion['begin']), | |
|
195 | end=int(portion['end']), | |
|
196 | ) | |
|
197 | ||
|
198 | line_text += 'L{} Portions IPP Periodic={}\n'.format(num, periodic) | |
|
199 | ||
|
200 | elif line_type == 'none': | |
|
201 | line_text = '' | |
|
202 | ||
|
203 | else: | |
|
204 | reference = data['lines']['byId'][lines[n]]['params']['TX_ref'] | |
|
205 | code_type = data['lines']['byId'][lines[n]]['params']['code'] | |
|
206 | codes = data['lines']['byId'][lines[n]]['params']['codes'] | |
|
207 | ||
|
208 | if num == 4: | |
|
209 | line_text = 'Code Type={}\n'.format(code_type) | |
|
210 | line_text += 'Number of Codes={}\nCode Width={}\n'.format( | |
|
211 | len(codes), | |
|
212 | len(codes[0]) | |
|
213 | ) | |
|
214 | cnt = 0 | |
|
215 | for code in codes: | |
|
216 | line_text += 'COD({})={}\n'.format(cnt, code) | |
|
217 | cnt += 1 | |
|
218 | else: | |
|
219 | line_text = 'Code Type (Line {})={}\n'.format(num, code_type) | |
|
220 | line_text += 'Number of Codes (Line {})={}\nCode Width (Line {})={}\n'.format( | |
|
221 | num, | |
|
222 | len(codes), | |
|
223 | num, | |
|
224 | len(codes[0]) | |
|
225 | ) | |
|
226 | cnt = 0 | |
|
227 | for code in codes: | |
|
228 | line_text += 'L{}_COD({})={}\n'.format(num,cnt, code) | |
|
229 | cnt += 1 | |
|
230 | ||
|
231 | line_text += 'L{}_REFERENCE={}\n'.format( | |
|
232 | num, | |
|
233 | data['lines']['byId'][reference]['name'] | |
|
234 | ) | |
|
235 | ||
|
236 | return line_text | |
|
237 | ||
|
238 | def create_jarsfiles(json_data): | |
|
239 | """ | |
|
240 | Function to create *.racp and *.jars files with json_data | |
|
241 | """ | |
|
242 | global EXPNAME | |
|
243 | ||
|
244 | data = json.loads(json_data) | |
|
245 | exp_id = data['experiments']['allIds'][0] | |
|
246 | experiment = data['experiments']['byId'][exp_id] | |
|
247 | name = experiment['name'] | |
|
248 | EXPNAME = name | |
|
249 | folder_name = os.path.join(PATH, name) | |
|
250 | print 'Experiment: ' + name + ' received...' | |
|
251 | if not os.path.exists(folder_name): | |
|
252 | os.makedirs(folder_name) | |
|
253 | if not os.path.exists(folder_name+'/DATA'): | |
|
254 | os.mkdir(folder_name+'/DATA') | |
|
255 | ||
|
256 | try: | |
|
257 | json_file = open(folder_name+'/'+name+'_jars.json', 'w') | |
|
258 | except: | |
|
259 | return 0, 'Error creating .json file' | |
|
260 | ||
|
261 | json_file.write(json_data) | |
|
262 | json_file.close() | |
|
263 | ||
|
264 | try: | |
|
265 | racp_file = open(folder_name+'/'+name+'_jars.racp', 'w') | |
|
266 | except: | |
|
267 | return 0, 'Error creating .racp file' | |
|
268 | ||
|
269 | conf_ids = data['configurations']['allIds'] | |
|
270 | ||
|
271 | rcs = [pk for pk in conf_ids \ | |
|
272 | if data['configurations']['byId'][pk]['device_type'] == 'rc'] | |
|
273 | if len(rcs) == 1: | |
|
274 | rc_id = rcs[0] | |
|
275 | rc_mix_id = 0 | |
|
276 | else: | |
|
277 | rcs = [pk for pk in conf_ids \ | |
|
278 | if data['configurations']['byId'][pk]['device_type'] == 'rc' and data['configurations']['byId'][pk]['mix'] == True] | |
|
279 | rc_mix_id = rcs[0] | |
|
280 | mix_parameters = data['configurations']['byId'][rc_mix_id]['parameters'].split('-') | |
|
281 | rc_id = mix_parameters[0].split('|')[0] | |
|
282 | ||
|
283 | jars_id = [pk for pk in conf_ids \ | |
|
284 | if data['configurations']['byId'][pk]['device_type'] == 'jars'][0] | |
|
285 | ||
|
286 | rc = data['configurations']['byId'][rc_id] | |
|
287 | jars = data['configurations']['byId'][jars_id] | |
|
288 | ||
|
289 | if rc_mix_id <> 0: | |
|
290 | rc_mix = data['configurations']['byId'][rc_mix_id] | |
|
291 | mix_text = '*******Mixed Experiment*******************\n' | |
|
292 | mix_text += 'Number of Experiments={}\n'.format(len(mix_parameters)) | |
|
293 | for i,param in enumerate(mix_parameters): | |
|
294 | pk, mode, op, delay, mask = param.split('|') | |
|
295 | mix_text += 'EXP({})={}\n'.format(i, data['configurations']['byId'][pk]['name']) | |
|
296 | mix_text += 'DELAY({})={}\n'.format(i, delay) | |
|
297 | mix_text += 'RELOJ={}\n'.format(int(data['configurations']['byId'][pk]['clock'])) | |
|
298 | mix_text += 'MIXER MODE={}_FLAG\n'.format(op) | |
|
299 | mix_text += 'MIXER MASK={}\n'.format(mask) | |
|
300 | mix_text += '*******System parameters******************\n' | |
|
301 | else: | |
|
302 | mix_text = '' | |
|
303 | ||
|
304 | exp_type = jars['exp_type'] | |
|
305 | if exp_type == 0: | |
|
306 | exp_type = 'EXP_RAW_DATA' | |
|
307 | else: | |
|
308 | exp_type = 'EXP_PROCESS_SPECTRA' | |
|
309 | ||
|
310 | racp_text = 'EXPERIMENT TYPE={}\nEXPERIMENT NAME={}\nHEADER VERSION=1103\n'.format( | |
|
311 | exp_type, | |
|
312 | name | |
|
313 | ) | |
|
314 | ||
|
315 | racp_text += '*****Radar Controller Parameters**********\n{}'.format(mix_text) | |
|
316 | if rc_mix_id == 0: | |
|
317 | racp_text += 'IPP={}\n'.format(float(rc['ipp'])) | |
|
318 | racp_text += 'NTX={}\n'.format(rc['ntx']) | |
|
319 | else: | |
|
320 | racp_text += 'IPP={}\n'.format(float(rc_mix['ipp'])) | |
|
321 | racp_text += 'NTX={}\n'.format(rc_mix['ntx']) | |
|
322 | ||
|
323 | racp_text += 'TXA={}\n'.format( | |
|
324 | data['lines']['byId'][rc['lines'][1]]['params']['pulse_width'] | |
|
325 | ) | |
|
326 | if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx': | |
|
327 | racp_text += 'TXB={}\n'.format( | |
|
328 | data['lines']['byId'][rc['lines'][2]]['params']['pulse_width'] | |
|
329 | ) | |
|
330 | idTR = data['lines']['byId'][rc['lines'][0]]['params']['TX_ref'] | |
|
331 | rangeTR = data['lines']['byId'][rc['lines'][0]]['params']['range'] | |
|
332 | ||
|
333 | if rangeTR != '0': | |
|
334 | racp_text += 'Pulse selection_TR={}\n'.format(rangeTR) | |
|
335 | elif idTR != '0': | |
|
336 | racp_text += 'Pulse selection_TR={}\n'.format( | |
|
337 | data['lines']['byId'][idTR]['name'][-1] | |
|
338 | ) | |
|
339 | rangeTXA = data['lines']['byId'][rc['lines'][1]]['params']['range'] | |
|
340 | if rangeTXA != '0': | |
|
341 | racp_text += 'Pulse selection_TXA={}\n'.format(rangeTXA) | |
|
342 | if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx': | |
|
343 | rangeTXB = data['lines']['byId'][rc['lines'][2]]['params']['range'] | |
|
344 | if rangeTXB != '0': | |
|
345 | racp_text += 'Pulse selection_TXB={}\n'.format(rangeTXB) | |
|
346 | for n in range(3, 6): | |
|
347 | racp_text += parse_line(n, data, rc['lines']) | |
|
348 | ||
|
349 | if data['lines']['byId'][rc['lines'][2]]['line_type'] == 'tx': | |
|
350 | taus = data['lines']['byId'][rc['lines'][2]]['params']['delays'].split(',') | |
|
351 | if taus != '0': | |
|
352 | racp_text += 'Number of Taus={}\n'.format(len(taus)) | |
|
353 | for n, tau in enumerate(taus): | |
|
354 | racp_text += 'TAU({})={}\n'.format(n, tau) | |
|
355 | ||
|
356 | racp_text += parse_line(6, data, rc['lines']) | |
|
357 | racp_text += 'SAMPLING REFERENCE=MIDDLE OF FIRST SUB-BAUD\n' | |
|
358 | racp_text += 'RELOJ={}\n'.format(int(rc['clock'])) | |
|
359 | racp_text += 'CLOCK DIVIDER={}\n'.format(int(rc['clock_divider'])) | |
|
360 | racp_text += 'TR_BEFORE={}\n'.format(rc['time_before']) | |
|
361 | racp_text += 'TR_AFTER={}\n'.format(rc['time_after']) | |
|
362 | racp_text += 'WINDOW IN LINE 5&6=NO\n' | |
|
363 | racp_text += '******System Parameters*******************\n' | |
|
364 | racp_text += 'Number of Cards={}\n'.format(jars['cards_number']) | |
|
365 | ||
|
366 | for i in range(jars['cards_number']): | |
|
367 | racp_text += 'Card({})={}\n'.format(i, i) | |
|
368 | ||
|
369 | channels = jars['channels'].split(',') | |
|
370 | ||
|
371 | if channels: | |
|
372 | racp_text += 'Number of Channels={}\n'.format(len(channels)) | |
|
373 | for i, channel in enumerate(channels): | |
|
374 | racp_text += 'Channel({})={}\n'.format(i, channel) | |
|
375 | ||
|
376 | if exp_type == 'EXP_RAW_DATA': | |
|
377 | racp_text += 'RAW DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA')) | |
|
378 | else: | |
|
379 | racp_text += 'PROCESS DATA DIRECTORY={}\n'.format(os.path.join(folder_name, 'DATA')) | |
|
380 | ||
|
381 | if jars['create_directory']: | |
|
382 | racp_text += 'CREATE DIRECTORY PER DAY=YES'+'\n' | |
|
383 | else: | |
|
384 | racp_text += 'CREATE DIRECTORY PER DAY=NO'+'\n' | |
|
385 | ||
|
386 | if jars['include_expname']: | |
|
387 | racp_text += 'INCLUDE EXPNAME IN DIRECTORY=YES'+'\n' | |
|
388 | else: | |
|
389 | racp_text += 'INCLUDE EXPNAME IN DIRECTORY=NO'+'\n' | |
|
390 | ||
|
391 | racp_text += '******System Parameters*******************\n' | |
|
392 | racp_text += 'ADC Resolution=8\n' | |
|
393 | racp_text += 'PCI DIO BusWidth=32\n' | |
|
394 | ||
|
395 | if exp_type == 'EXP_RAW_DATA': | |
|
396 | racp_text += 'RAW DATA BLOCKS={}\n'.format(jars['raw_data_blocks']) | |
|
397 | spectra_text = '' | |
|
398 | else: | |
|
399 | racp_text += 'PROCESS DATA BLOCKS={}\n'.format(jars['raw_data_blocks']) # 26/12/2018 | |
|
400 | spectra_text = '------------------------------------------\n' | |
|
401 | ||
|
402 | if jars['fftpoints'] > 1: | |
|
403 | spectra_text += 'FFTPOINTS={}\n'.format(jars['fftpoints']) | |
|
404 | ||
|
405 | if jars['incohe_integr']: | |
|
406 | spectra_text += 'INCOHERENT INTEGRATIONS={}\n'.format(jars['incohe_integr']) | |
|
407 | ||
|
408 | if jars['save_ch_dc']: | |
|
409 | spectra_text += 'SAVE CHANNELS DC=YES\n' | |
|
410 | ||
|
411 | dum = jars['spectral'] | |
|
412 | ||
|
413 | if dum.endswith(','): | |
|
414 | dum = dum[:-1] | |
|
415 | spectral = json.loads('[{}]'.format(dum)) | |
|
416 | ||
|
417 | if spectral: | |
|
418 | spectra_text += '------------------------------------------\n' | |
|
419 | spectra_text += 'TOTAL SPECTRAL COMBINATIONS={}\n'.format(len(spectral)) | |
|
420 | for i, spc in enumerate(spectral): | |
|
421 | spectra_text += 'SPEC_COMB({})={},{}\n'.format(i, *spc) | |
|
422 | ||
|
423 | racp_text += '******Process Parameters******************\n' | |
|
424 | ||
|
425 | data_type = jars['data_type'] | |
|
426 | ||
|
427 | if data_type == 0: | |
|
428 | racp_text += 'DATATYPE=SHORT\n' | |
|
429 | elif data_type == 1: | |
|
430 | racp_text += 'DATATYPE=FLOAT\n' | |
|
431 | ||
|
432 | racp_text += 'DATA ARRANGE=CONTIGUOUS_CH\n' | |
|
433 | ||
|
434 | if jars['cohe_integr'] > 1: | |
|
435 | racp_text += 'COHERENT INTEGRATIONS={}\n'.format(jars['cohe_integr']) | |
|
436 | ||
|
437 | decode_text = '' | |
|
438 | decode_data = jars['decode_data'] | |
|
439 | if decode_data !=0: | |
|
440 | decode_text = 'DECODE DATA=YES\n' | |
|
441 | decode_text += 'DECODING TYPE={}\n'.format(DECODE_TYPE[decode_data]) | |
|
442 | if jars['post_coh_int'] == True: | |
|
443 | decode_text += 'POST COHERENT INTEGRATIONS=YES\n' | |
|
444 | decode_text += '------------------------------------------\n' | |
|
445 | ||
|
446 | racp_text += 'COHERENT INTEGRATION STRIDE={}\n'.format(jars['cohe_integr_str']) | |
|
447 | racp_text += '------------------------------------------\n' | |
|
448 | racp_text += 'ACQUIRED PROFILES={}\n'.format(jars['acq_profiles']) | |
|
449 | racp_text += 'PROFILES PER BLOCK={}\n'.format(jars['profiles_block']) | |
|
450 | racp_text += spectra_text | |
|
451 | racp_text += '------------------------------------------\n' | |
|
452 | racp_text += decode_text | |
|
453 | racp_text += 'BEGIN ON START=NO\n' | |
|
454 | racp_text += 'BEGIN_TIME={}\n'.format(experiment['start_time'][:-3]) | |
|
455 | racp_text += 'END_TIME={}\n'.format(experiment['end_time'][:-3]) | |
|
456 | racp_text += 'GENERATE ACQUISITION LINK=YES\n' | |
|
457 | racp_text += 'VIEW RAW DATA=YES\n' | |
|
458 | racp_text += 'REFRESH RATE=1\n' | |
|
459 | racp_text += '------------------------------------------\n' | |
|
460 | racp_text += 'SEND STATUS TO FTP=YES\n' | |
|
461 | racp_text += 'FTP SERVER=jro.igp.gob.pe\n' | |
|
462 | racp_text += 'FTP USER=wmaster\n' | |
|
463 | racp_text += 'FTP PASSWD=PKQLX20\n' | |
|
464 | racp_text += 'FTP DIR=/users/database/on-line/\n' | |
|
465 | racp_text += 'FTP FILE=status.txt\n' | |
|
466 | racp_text += 'FTP INTERVAL={}\n'.format(jars['ftp_interval']) | |
|
467 | racp_text += 'SAVE STATUS AND BLOCK=YES\n' | |
|
468 | racp_text += 'GENERATE RTI=YES\n' | |
|
469 | racp_text += 'RTI Inc.Int.=1\n' | |
|
470 | racp_text += 'SEND RTI AND BLOCK=YES\n' | |
|
471 | racp_text += '------------------------------------------\n' | |
|
472 | racp_text += 'COMPORT CONFIG=Com1 CBR_9600 TWOSTOPBITS NOPARITY\n' | |
|
473 | racp_text += 'JAM CONFIGURE FILE=dmasg_pprofiles_pch_64_pdigi_6clk.jam\n' | |
|
474 | racp_text += 'ACQUISITION SYSTEM=JARS\n' | |
|
475 | racp_text += '************JARS CONFIGURATION PARAMETERS************\n' | |
|
476 | ||
|
477 | #-------------------------JARS FILTER--------------------------------------- | |
|
478 | filter_parms = jars['filter_parms'] | |
|
479 | if filter_parms.__class__.__name__ == 'unicode': | |
|
480 | filter_parms = eval(filter_parms) | |
|
481 | elif filter_parms.__class__.__name__ == 'str': | |
|
482 | filter_parms = eval(filter_parms) | |
|
483 | if filter_parms.__class__.__name__ == 'str': | |
|
484 | filter_parms = eval(filter_parms) | |
|
485 | ||
|
486 | try: | |
|
487 | fclock = float(filter_parms['clock']) | |
|
488 | fch = float(filter_parms['frequency']) | |
|
489 | m_dds = float(filter_parms['multiplier']) | |
|
490 | M_CIC2 = float(filter_parms['cic_2']) | |
|
491 | M_CIC5 = float(filter_parms['cic_5']) | |
|
492 | M_RCF = float(filter_parms['fir']) | |
|
493 | except: | |
|
494 | fclock = eval(filter_parms['clock']) | |
|
495 | fch = eval(filter_parms['frequency']) | |
|
496 | m_dds = eval(filter_parms['multiplier']) | |
|
497 | M_CIC2 = eval(filter_parms['cic_2']) | |
|
498 | M_CIC5 = eval(filter_parms['cic_5']) | |
|
499 | M_RCF = eval(filter_parms['fir']) | |
|
500 | ||
|
501 | filter_text = 'Loading\n' | |
|
502 | filter_text += 'Impulse file found -> C:\jars\F1MHZ_8_MATCH.imp\n' | |
|
503 | filter_text += 'Autoscale off\n' | |
|
504 | filter_text += 'Initialize Printer Port\n' | |
|
505 | filter_text += 'Chip Hardware Reset\n' | |
|
506 | filter_text += '300h -> 1\n' | |
|
507 | filter_text += '301h -> 6\n' | |
|
508 | filter_text += '302h -> 11111111111111111111111111111111\n' | |
|
509 | ||
|
510 | if abs(fch) < (fclock/2): | |
|
511 | nco = (2**32)*((fch/fclock))#%1) | |
|
512 | nco_i = long(nco) | |
|
513 | else: | |
|
514 | nco = (2**32)*(fclock-fch)/(fclock) | |
|
515 | nco_i = long(nco) | |
|
516 | ||
|
517 | filter_text += '303h -> {}\n'.format(nco_i) | |
|
518 | filter_text += '304h -> 0\n' | |
|
519 | ||
|
520 | input_level = 1 | |
|
521 | S_CIC2 = math.ceil(math.log((M_CIC2**2)*input_level)/math.log(2)) | |
|
522 | if S_CIC2 < 0: | |
|
523 | S_CIC2 = 0 | |
|
524 | if S_CIC2 > 7: | |
|
525 | S_CIC2 = 7 | |
|
526 | ||
|
527 | S_CIC2 = float(filter_parms['scale_cic_2']) ## 19/12/18 | |
|
528 | ||
|
529 | filter_text += '305h -> {}\n'.format(int(S_CIC2)) | |
|
530 | filter_text += '306h -> {}\n'.format(int(M_CIC2-1)) | |
|
531 | ||
|
532 | OL_CIC2 = input_level/(2.0**S_CIC2) | |
|
533 | ||
|
534 | S_CIC5 = math.ceil(math.log((M_CIC5**5)*OL_CIC2)/math.log(2))-5 | |
|
535 | if S_CIC5 < 0: | |
|
536 | S_CIC5 = 0 | |
|
537 | if S_CIC5 > 7: | |
|
538 | S_CIC5 = 7 | |
|
539 | ||
|
540 | OL_CIC5 = ((M_CIC5**5)/(2**(S_CIC5+5)))*OL_CIC2 | |
|
541 | S_CIC5 = float(filter_parms['scale_cic_5']) #19/12/18 | |
|
542 | ||
|
543 | filter_text += '307h -> {}\n'.format(int(S_CIC5)) | |
|
544 | filter_text += '308h -> {}\n'.format(int(M_CIC5-1)) | |
|
545 | ||
|
546 | Gain = 1 | |
|
547 | S_RCF = int(4.0-math.log(Gain)/math.log(2)) | |
|
548 | if S_RCF < 0: | |
|
549 | S_RCF = 0 | |
|
550 | if S_RCF > 7: | |
|
551 | S_RCF = 7 | |
|
552 | ||
|
553 | S_RCF = int(float(filter_parms['scale_fir'])) #19/12/18 | |
|
554 | ||
|
555 | filter_text += '309h -> {}\n'.format(S_RCF) | |
|
556 | filter_text += '30Ah -> {}\n'.format(int(M_RCF-1)) | |
|
557 | ||
|
558 | Offset = 0 | |
|
559 | filter_text += '30Bh -> {}\n'.format(Offset) | |
|
560 | ||
|
561 | ntaps = int(M_RCF) | |
|
562 | ntaps = int(float(filter_parms['number_taps'])) #19/12/18 | |
|
563 | ||
|
564 | filter_text += '30Ch -> {}\n'.format(ntaps-1) | |
|
565 | filter_text += '30Dh -> 0\n' | |
|
566 | ||
|
567 | fsamp = fclock/(M_CIC2*M_CIC5*M_RCF) | |
|
568 | ||
|
569 | tap = int(2.0*((2**19)-1)/(ntaps*OL_CIC5)) | |
|
570 | #tap = int(filter_parms['taps'].split(',')[0]) #19/12/18 | |
|
571 | tap = filter_parms['taps'].split(',') #19/12/18 | |
|
572 | ||
|
573 | numtaps = len(tap) # 23/01/19 incluido para saber la cantidad de coeficientes del filtro | |
|
574 | ||
|
575 | for p in range(0, numtaps):#for p in range(0, ntaps): | |
|
576 | #filter_text += ' {} -> {}\n'.format(p, int(math.ceil(tap)))#filter_text += ' {} -> {}\n'.format(p, int(math.ceil(hn))) | |
|
577 | filter_text += ' {} -> {}\n'.format(p, int(math.ceil(int(tap[p]))))#filter_text += ' {} -> {}\n'.format(p, int(math.ceil(hn))) | |
|
578 | ||
|
579 | filter_text += 'RCF Gain -> .999996185302734\n' | |
|
580 | filter_text += 'Chip Restarted:\n' | |
|
581 | filter_text += '300h -> 1\n' | |
|
582 | filter_text += '300h -> 0' | |
|
583 | ||
|
584 | if fsamp >= 1: # frecuencia de muestreo mayor a 1MHz [24/01/19] | |
|
585 | filter_name = '{}_{}MHz_clock{}MHz_F{}MHz_{}_{}_{}.jars'.format( | |
|
586 | abs(fch), | |
|
587 | int((abs(fch)-abs(int(fch)))*1000), | |
|
588 | fclock, | |
|
589 | round(fsamp,3), | |
|
590 | int(M_CIC2), | |
|
591 | int(M_CIC5), | |
|
592 | int(M_RCF) | |
|
593 | ) | |
|
594 | ||
|
595 | if fsamp < 1: # frecuencia de muestreo menor a 1MHz [24/01/19] | |
|
596 | filter_name = '{}_{}MHz_clock{}MHz_F{}KHz_{}_{}_{}.jars'.format( | |
|
597 | abs(fch), | |
|
598 | int((abs(fch)-abs(int(fch)))*1000), | |
|
599 | fclock, | |
|
600 | round(fsamp,3)*1e3, | |
|
601 | int(M_CIC2), | |
|
602 | int(M_CIC5), | |
|
603 | int(M_RCF) | |
|
604 | ) | |
|
605 | ||
|
606 | jars_file = open(os.path.join(folder_name, filter_name), 'wb') | |
|
607 | jars_file.write(filter_text) | |
|
608 | jars_file.close() | |
|
609 | racp_text += 'JARS_FILTER={}\n'.format(os.path.join(folder_name, filter_name)) | |
|
610 | racp_text += 'MARK WIDTH=2\n' | |
|
611 | racp_text += 'GENERATE OWN SAMPLING WINDOW=NO\n' | |
|
612 | ||
|
613 | if jars['save_data']: | |
|
614 | racp_text += 'SAVE DATA=YES\n' | |
|
615 | else: | |
|
616 | racp_text += 'SAVE DATA=NO\n' | |
|
617 | ||
|
618 | racp_text += 'RC_STOP_SEQUENCE=255,0\n' | |
|
619 | racp_text += 'RC_START_SEQUENCE=255,24\n' | |
|
620 | ||
|
621 | racp_file.write(racp_text) | |
|
622 | racp_file.close() | |
|
623 | ||
|
624 | return 1, racp_file.name | |
|
625 | ||
|
626 | @app.route('/status/') | |
|
627 | def status(): | |
|
628 | ''' | |
|
629 | 0 : Not configured/running | |
|
630 | 3 : Running and acquiring data | |
|
631 | 2 : Configured | |
|
632 | 1 : Connected | |
|
633 | ''' | |
|
634 | ||
|
635 | name = request.args.get('name', None) | |
|
636 | global EXPNAME | |
|
637 | EXPNAME = name | |
|
638 | ||
|
639 | if name is None: | |
|
640 | return jsonify({ | |
|
641 | 'status': 1, | |
|
642 | 'message': 'JARS Connected, missing experiment' | |
|
643 | }) | |
|
644 | else: | |
|
645 | racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name)) | |
|
646 | ||
|
647 | if name and not os.path.exists(racp_file): | |
|
648 | return jsonify({ | |
|
649 | 'status': 1, | |
|
650 | 'message': 'JARS not configured' | |
|
651 | }) | |
|
652 | elif os.path.exists(racp_file) and hasattr(PROC, 'pid'): | |
|
653 | if PROC.poll() is None: | |
|
654 | status = 3 | |
|
655 | msg = 'Process: PID={}, OUT={}'.format( | |
|
656 | PROC.pid, | |
|
657 | OUT.readline() | |
|
658 | ) | |
|
659 | else: | |
|
660 | status = 2 | |
|
661 | msg = 'JARS Configured' | |
|
662 | else: | |
|
663 | status = 2 | |
|
664 | msg = 'JARS Configured' | |
|
665 | ||
|
666 | return jsonify({ | |
|
667 | 'status': status, | |
|
668 | 'message': msg | |
|
669 | }) | |
|
670 | ||
|
671 | @app.route('/start/', methods=['POST']) | |
|
672 | def start(): | |
|
673 | ''' | |
|
674 | ''' | |
|
675 | ||
|
676 | global PROC | |
|
677 | global OUT | |
|
678 | global EXPNAME | |
|
679 | ||
|
680 | name = request.json['name'] | |
|
681 | EXPNAME = name | |
|
682 | racp_file = os.path.join(PATH, name, '{}_jars.racp'.format(name)) | |
|
683 | if hasattr(PROC, 'pid') and PROC.poll() is None: | |
|
684 | status = 3 | |
|
685 | msg = 'JARS already running' | |
|
686 | elif os.path.exists(racp_file): | |
|
687 | PROC = Popen([EXE, '-rf', racp_file, OPT], stdout=PIPE) | |
|
688 | OUT = StdoutReader(PROC.stdout, name) | |
|
689 | status = 3 | |
|
690 | msg = 'JARS starting ok' | |
|
691 | elif not os.path.exists(racp_file): | |
|
692 | status = 1 | |
|
693 | msg = 'Experiment: {} not configured'.format(name) | |
|
694 | ||
|
695 | return jsonify({ | |
|
696 | 'status': status, | |
|
697 | 'message': msg | |
|
698 | }) | |
|
699 | ||
|
700 | @app.route('/stop/', methods=['POST']) | |
|
701 | def stop(): | |
|
702 | ''' | |
|
703 | ''' | |
|
704 | ||
|
705 | global PROC | |
|
706 | ||
|
707 | if hasattr(PROC, 'pid'): | |
|
708 | if PROC.poll() is None: | |
|
709 | OUT.save() | |
|
710 | PROC.kill() | |
|
711 | status = 2 | |
|
712 | msg = 'JARS stopped OK' | |
|
713 | else: | |
|
714 | status = 1 | |
|
715 | msg = 'JARS not running' | |
|
716 | else: | |
|
717 | status = 1 | |
|
718 | msg = 'JARS not running' | |
|
719 | ||
|
720 | return jsonify({ | |
|
721 | 'status': status, | |
|
722 | 'message': msg | |
|
723 | }) | |
|
724 | ||
|
725 | @app.route('/write/', methods=['POST']) | |
|
726 | def write(): | |
|
727 | ''' | |
|
728 | ''' | |
|
729 | status = 1 | |
|
730 | json_data = json.loads(request.json) | |
|
731 | conf_ids = json_data['configurations']['allIds'] | |
|
732 | for pk in conf_ids: | |
|
733 | if json_data['configurations']['byId'][pk]['device_type'] == 'jars': | |
|
734 | data = json_data['configurations']['byId'][pk]['filter_parms'] | |
|
735 | ||
|
736 | if request.json: | |
|
737 | try: | |
|
738 | ret, racp = create_jarsfiles(request.json) | |
|
739 | except Exception as e: | |
|
740 | ret = 0 | |
|
741 | msg = str(e) | |
|
742 | else: | |
|
743 | msg = 'Missing POST data' | |
|
744 | ||
|
745 | if ret == 1: | |
|
746 | status = 2 | |
|
747 | msg = 'JARS configured OK' | |
|
748 | else: | |
|
749 | msg = ret | |
|
750 | ||
|
751 | return jsonify({ | |
|
752 | 'status': status, | |
|
753 | 'message': msg | |
|
754 | }) | |
|
755 | ||
|
756 | ||
|
757 | def restart(): | |
|
758 | ''' | |
|
759 | ''' | |
|
760 | ||
|
761 | global EXPNAME | |
|
762 | #ip_host = '10.10.10.99' | |
|
763 | port = 5000 | |
|
764 | route_stop = 'http://'+IPHOST+':'+str(port)+'/stop/' | |
|
765 | stop = requests.post(route_stop, data={}) | |
|
766 | print 'Restarting...' | |
|
767 | time.sleep(3) | |
|
768 | route_start = 'http://'+IPHOST+':'+str(port)+'/start/' | |
|
769 | start = requests.post(route_start, json={'name':EXPNAME}) | |
|
770 | ||
|
771 | return | |
|
772 | ||
|
773 | @app.route('/get_log/') | |
|
774 | def get_log(): | |
|
775 | ''' | |
|
776 | This function sends Restarting Report.txt of the Experiment. | |
|
777 | ''' | |
|
778 | ||
|
779 | name = request.args.get('name', None) | |
|
780 | global EXPNAME | |
|
781 | EXPNAME = name | |
|
782 | ||
|
783 | if name is None: | |
|
784 | return jsonify({ | |
|
785 | 'status': 1, | |
|
786 | 'message': 'JARS Connected, missing experiment' | |
|
787 | }) | |
|
788 | else: | |
|
789 | try: | |
|
790 | rr_file = os.path.join(PATH, name, 'Restarting Report.txt') | |
|
791 | return send_file(rr_file, attachment_filename='Restarting Report.txt') | |
|
792 | except Exception as e: | |
|
793 | return jsonify({ | |
|
794 | 'status': 1, | |
|
795 | 'message': str(e) | |
|
796 | }) | |
|
797 | ||
|
798 | if __name__ == '__main__': | |
|
799 | app.run(debug=True, host='0.0.0.0', port=5000) |
General Comments 0
You need to be logged in to leave comments.
Login now