@@ -0,0 +1,200 | |||
|
1 | from django.core.management.base import BaseCommand | |
|
2 | from apps.main.models import Experiment | |
|
3 | from django.shortcuts import get_object_or_404 | |
|
4 | import os, fnmatch | |
|
5 | import time | |
|
6 | from datetime import datetime | |
|
7 | import h5py | |
|
8 | import numpy as np | |
|
9 | ||
|
10 | class Command(BaseCommand): | |
|
11 | """ | |
|
12 | Check pedestal acquisition each 10 minutes. | |
|
13 | Example: | |
|
14 | manage.py restart_pedestal | |
|
15 | """ | |
|
16 | def handle(self, *args, **options): | |
|
17 | #start = time.time() | |
|
18 | #time.sleep(1) | |
|
19 | restart_pedestal(self) | |
|
20 | #end = time.time() | |
|
21 | #self.stdout.write(f'TIME: "{end - start}"') | |
|
22 | ||
|
23 | def check_experiment(): | |
|
24 | if len(Experiment.objects.filter(status=2)) > 0: | |
|
25 | return True | |
|
26 | else: | |
|
27 | return False | |
|
28 | ||
|
29 | def pedestal_start(self, id_exp): | |
|
30 | all_status = Experiment.objects.filter(status=2) | |
|
31 | check_id = False | |
|
32 | ||
|
33 | if len(all_status) > 0: | |
|
34 | check_id = all_status[0].pk | |
|
35 | ||
|
36 | if check_id and check_id == id_exp: | |
|
37 | exp = get_object_or_404(Experiment, pk=id_exp) | |
|
38 | name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S')) | |
|
39 | exp.pedestal.start_device(name_experiment=name) | |
|
40 | self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition was restarted') | |
|
41 | ||
|
42 | def pedestal_stop(self, id_exp): | |
|
43 | all_status = Experiment.objects.filter(status=2) | |
|
44 | check_id = False | |
|
45 | ||
|
46 | if len(all_status) > 0: | |
|
47 | check_id = all_status[0].pk | |
|
48 | ||
|
49 | if check_id and check_id == id_exp: | |
|
50 | exp = get_object_or_404(Experiment, pk=id_exp) | |
|
51 | exp.pedestal.stop_device() | |
|
52 | self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition "{exp.name}" was stopped') | |
|
53 | ||
|
54 | def hdf5_list_content(get_file): | |
|
55 | table_records = np.array(get_file).tolist() | |
|
56 | table_dim = get_file.ndim | |
|
57 | table_rows = get_file.shape[0] | |
|
58 | ||
|
59 | if table_dim == 1 and table_rows >= 1: | |
|
60 | #type(table_records[0]) -> float | |
|
61 | return table_records | |
|
62 | else: | |
|
63 | return False | |
|
64 | ||
|
65 | def hdf5_read(file): | |
|
66 | dataspeed = {"ele_speed": False, "azi_speed": False} | |
|
67 | ||
|
68 | for key, value in dataspeed.items(): | |
|
69 | with h5py.File(file, 'r') as hdf: | |
|
70 | get = hdf.get('Data'+'/'+key) | |
|
71 | if get is not None: | |
|
72 | # 10 values | |
|
73 | dataspeed[key] = hdf5_list_content(get)[-10:] | |
|
74 | ||
|
75 | return dataspeed | |
|
76 | ||
|
77 | def count_data(last_position): | |
|
78 | pattern = "pos@*.h5" | |
|
79 | count = 0 | |
|
80 | list_data = [] | |
|
81 | ||
|
82 | list_position = os.listdir(last_position) | |
|
83 | ||
|
84 | for entry in sorted(list_position): | |
|
85 | if fnmatch.fnmatch(entry, pattern): | |
|
86 | count = count + 1 | |
|
87 | list_data.append(os.path.join(last_position, entry)) | |
|
88 | ||
|
89 | if len(list_data) > 1: | |
|
90 | list_data = list_data[-2] | |
|
91 | else: | |
|
92 | list_data = False | |
|
93 | ||
|
94 | return count, list_data | |
|
95 | ||
|
96 | def response_data(datadir, old_path_datetime, old_position, new_position): | |
|
97 | path_position = True | |
|
98 | path_datetime = False | |
|
99 | read_files = False | |
|
100 | ||
|
101 | rootdir = os.path.join(datadir, 'position') | |
|
102 | if os.path.isdir(rootdir): | |
|
103 | path_datetime = path_data(os.path.join(datadir, 'position')) | |
|
104 | ||
|
105 | if path_datetime: | |
|
106 | if not old_path_datetime or path_datetime != old_path_datetime: | |
|
107 | old_position, read_files = count_data(path_datetime) | |
|
108 | time.sleep(65) | |
|
109 | new_position, read_files = count_data(path_datetime) | |
|
110 | else: | |
|
111 | time.sleep(65) | |
|
112 | else: | |
|
113 | path_position = False | |
|
114 | ||
|
115 | return path_position, path_datetime, old_position, new_position, read_files | |
|
116 | ||
|
117 | def path_data(rootdir): | |
|
118 | list_=[] | |
|
119 | for it in os.scandir(rootdir): | |
|
120 | if it.is_dir(): | |
|
121 | try: | |
|
122 | datetime.strptime(it.path.split("/")[-1], "%Y-%m-%dT%H-00-00") | |
|
123 | list_.append(it.path) | |
|
124 | except ValueError: | |
|
125 | pass | |
|
126 | ||
|
127 | list_ = sorted(list_, reverse=True) | |
|
128 | try: | |
|
129 | return list_[0] | |
|
130 | except: | |
|
131 | return False | |
|
132 | ||
|
133 | def check_count(datadir): | |
|
134 | old_numbers = 0 | |
|
135 | new_numbers = 0 | |
|
136 | validation = False | |
|
137 | path_datetime = False | |
|
138 | speed = {"ele_speed": False, "azi_speed": False} | |
|
139 | ||
|
140 | path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers) | |
|
141 | ||
|
142 | for u in range(2): | |
|
143 | if new_numbers > old_numbers: | |
|
144 | validation = True | |
|
145 | ||
|
146 | data = hdf5_read(read_files) | |
|
147 | for key, value in data.items(): | |
|
148 | try: | |
|
149 | if not max(data[key]) <= 0.1: | |
|
150 | speed[key] = True | |
|
151 | except: | |
|
152 | pass | |
|
153 | break | |
|
154 | else: | |
|
155 | if u < 1: | |
|
156 | path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers) | |
|
157 | ||
|
158 | return path_position, path_datetime, validation, speed | |
|
159 | ||
|
160 | def restart_pedestal(self): | |
|
161 | if check_experiment(): | |
|
162 | ||
|
163 | all_status = Experiment.objects.filter(status=2) | |
|
164 | id_exp = all_status[0].pk | |
|
165 | datadir_exp = all_status[0].reception_rx.datadir | |
|
166 | datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data') | |
|
167 | datadir_exp = datadir_exp.replace('/rawdata', '') | |
|
168 | ||
|
169 | path_position, path_datetime, validation, speed = check_count(datadir_exp) | |
|
170 | if path_position: | |
|
171 | # Execute the process | |
|
172 | if validation: | |
|
173 | self.stdout.write(f'Acquisition pedestal is running') | |
|
174 | if speed['ele_speed'] or speed['azi_speed']: | |
|
175 | self.stdout.write(f'Pedestal speeds on Azimuth and Elevation are running') | |
|
176 | else: | |
|
177 | for key, value in speed.items(): | |
|
178 | if not value: | |
|
179 | self.stdout.write(f'Speed on {key} is <= 0.1, retry') | |
|
180 | ||
|
181 | pedestal_stop(self, id_exp) | |
|
182 | time.sleep(14) | |
|
183 | #pedestal_reset(self, id_exp) | |
|
184 | #time.sleep(2) | |
|
185 | pedestal_start(self, id_exp) | |
|
186 | ||
|
187 | else: | |
|
188 | if not path_datetime: | |
|
189 | self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00", retry!') | |
|
190 | else: | |
|
191 | self.stdout.write(f'No file increment, retry') | |
|
192 | ||
|
193 | pedestal_stop(self, id_exp) | |
|
194 | time.sleep(14) | |
|
195 | #pedestal_reset(self, id_exp) | |
|
196 | #time.sleep(2) | |
|
197 | pedestal_start(self, id_exp) | |
|
198 | ||
|
199 | else: | |
|
200 | self.stdout.write(f'No such directory: position, fail!') No newline at end of file |
@@ -1,4 +1,5 | |||
|
1 | 1 | #General settings |
|
2 | TZ=America/Lima | |
|
2 | 3 | LC_ALL=C.UTF-8 |
|
3 | 4 | SIRM_SITE=<SIRM SITE> |
|
4 | 5 | PROC_SITE=<PROC SITE> |
@@ -25,14 +26,14 PGDATA=/var/lib/postgresql/data | |||
|
25 | 26 | #Volumes - path |
|
26 | 27 | EXPOSE_SIRM=./volumes/sirm |
|
27 | 28 | EXPOSE_PROC=./volumes/proc |
|
28 | EXPOSE_SCHAIN=./volumes/schain | |
|
29 | 29 | EXPOSE_CAM=/path/to/cam |
|
30 | EXPOSE_SCHAIN=./volumes/schain | |
|
30 | 31 | EXPOSE_NAS=/path/to/nas_data |
|
31 | 32 | EXPOSE_PGDATA=/path/to/pg_data |
|
32 | 33 | EXPOSE_CERTS=/path/to/certs |
|
33 | 34 | EXPOSE_DHPARAM=/path/to/dhparam |
|
34 | 35 | |
|
35 | 36 | #Superuser settings |
|
36 |
SIRM_USER= |
|
|
37 |
SIRM_PASSWORD= |
|
|
38 |
SIRM_EMAIL= |
|
|
37 | SIRM_USER=***** | |
|
38 | SIRM_PASSWORD=******* | |
|
39 | SIRM_EMAIL=*****@igp.gob.pe No newline at end of file |
@@ -98,6 +98,7 services: | |||
|
98 | 98 | - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT} |
|
99 | 99 | - EXPOSE_NAS=${EXPOSE_NAS} |
|
100 | 100 | - PROC_SITE=${PROC_SITE} |
|
101 | - SCHAIN_SITE=${SCHAIN_SITE} | |
|
101 | 102 | - SIRM_USER=${SIRM_USER} |
|
102 | 103 | - SIRM_PASSWORD=${SIRM_PASSWORD} |
|
103 | 104 | - SIRM_EMAIL=${SIRM_EMAIL} |
@@ -113,10 +114,12 services: | |||
|
113 | 114 | - backend_sirm |
|
114 | 115 | labels: |
|
115 | 116 | ofelia.enabled: "true" |
|
116 |
ofelia.job-exec. |
|
|
117 |
ofelia.job-exec. |
|
|
118 |
ofelia.job-exec.restart- |
|
|
119 |
ofelia.job-exec.restart- |
|
|
117 | ofelia.job-exec.restart-reception.schedule: "0 1/5 * * * *" | |
|
118 | ofelia.job-exec.restart-reception.command: "python manage.py restart_reception" | |
|
119 | ofelia.job-exec.restart-pedestal.schedule: "0 2/10 * * * *" | |
|
120 | ofelia.job-exec.restart-pedestal.command: "python manage.py restart_pedestal" | |
|
121 | ofelia.job-exec.restart-experiment.schedule: "0 0 5 * * *" | |
|
122 | ofelia.job-exec.restart-experiment.command: "python manage.py restart_experiment" | |
|
120 | 123 | logging: |
|
121 | 124 | driver: "json-file" |
|
122 | 125 | options: |
@@ -12,4 +12,5 idna==3.3 | |||
|
12 | 12 | urllib3==1.26.9 |
|
13 | 13 | charset-normalizer==2.0.12 |
|
14 | 14 | certifi==2021.10.8 |
|
15 | soupsieve==2.3.1 No newline at end of file | |
|
15 | soupsieve==2.3.1 | |
|
16 | h5py==3.7.0 No newline at end of file |
@@ -5,9 +5,9 import time | |||
|
5 | 5 | |
|
6 | 6 | class Command(BaseCommand): |
|
7 | 7 | """ |
|
8 |
Restart experiment e |
|
|
8 | Restart experiment every night at 05:00 am. | |
|
9 | 9 | Example: |
|
10 |
manage.py restart_exp |
|
|
10 | manage.py restart_experiment | |
|
11 | 11 | """ |
|
12 | 12 | def handle(self, *args, **options): |
|
13 | 13 | restart_experiment(self) |
@@ -4,15 +4,20 from django.shortcuts import get_object_or_404 | |||
|
4 | 4 | import os, fnmatch |
|
5 | 5 | import time |
|
6 | 6 | from datetime import datetime |
|
7 | import requests | |
|
7 | 8 | |
|
8 | 9 | class Command(BaseCommand): |
|
9 | 10 | """ |
|
10 |
Check data acquisition each |
|
|
11 | Check data acquisition each 05 minutes. | |
|
11 | 12 | Example: |
|
12 |
manage.py |
|
|
13 | manage.py restart_reception | |
|
13 | 14 | """ |
|
14 | 15 | def handle(self, *args, **options): |
|
15 |
|
|
|
16 | #start = time.time() | |
|
17 | time.sleep(15) | |
|
18 | restart_acquisition(self) | |
|
19 | #end = time.time() | |
|
20 | #self.stdout.write(f'TIME: "{end - start}"') | |
|
16 | 21 | |
|
17 | 22 | def check_experiment(): |
|
18 | 23 | if len(Experiment.objects.filter(status=2)) > 0: |
@@ -32,6 +37,11 def acquisition_start(self, id_exp): | |||
|
32 | 37 | name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S')) |
|
33 | 38 | exp.reception_rx.start_device(name_experiment = name, restart = True) |
|
34 | 39 | self.stdout.write(f'"{exp.name}" experiment: Data acquisition was restarted') |
|
40 | self.stdout.write(f'Restarting schain...') | |
|
41 | ||
|
42 | r = requests.get('http://'+os.environ.get('SCHAIN_SITE', 'sophy-schain')+'/stop') | |
|
43 | time.sleep(1) | |
|
44 | r = requests.post('http://'+os.environ.get('SCHAIN_SITE', 'sophy-schain')+'/start', json={'name': exp.name}) | |
|
35 | 45 | |
|
36 | 46 | def acquisition_stop(self, id_exp): |
|
37 | 47 | all_status = Experiment.objects.filter(status=2) |
@@ -45,18 +55,36 def acquisition_stop(self, id_exp): | |||
|
45 | 55 | exp.reception_rx.stop_device() |
|
46 | 56 | self.stdout.write(f'"{exp.name}" experiment: Data acquisition "{exp.name}" was stopped') |
|
47 | 57 | |
|
48 |
def count_data( |
|
|
58 | def count_data(last_channel): | |
|
49 | 59 | pattern = "rf@*.h5" |
|
50 | rawdata = {'ch0': 0, 'ch1': 0} | |
|
60 | count = 0 | |
|
61 | list_channel = os.listdir(last_channel) | |
|
62 | ||
|
63 | for entry in sorted(list_channel): | |
|
64 | if fnmatch.fnmatch(entry, pattern): | |
|
65 | count = count + 1 | |
|
66 | return count | |
|
51 | 67 | |
|
52 | for key, value in rawdata.items(): | |
|
53 | last_channel = path_data(os.path.join(datadir, key)) | |
|
54 | if last_channel: | |
|
55 | list_channel = os.listdir(last_channel) | |
|
56 | for entry in sorted(list_channel): | |
|
57 | if fnmatch.fnmatch(entry, pattern): | |
|
58 | rawdata[key] = rawdata[key] + 1 | |
|
59 | return rawdata | |
|
68 | def response_data(datadir, old_channel, old_rawdata, new_rawdata, search): | |
|
69 | path_channels = {'ch0': True, 'ch1': True} | |
|
70 | channel = {'ch0': False, 'ch1': False} | |
|
71 | ||
|
72 | for key, value in path_channels.items(): | |
|
73 | rootdir = os.path.join(datadir, key) | |
|
74 | if os.path.isdir(rootdir): | |
|
75 | channel[key] = path_data(os.path.join(datadir, key)) | |
|
76 | if key in search: | |
|
77 | if channel[key]: | |
|
78 | if not old_channel[key] or channel[key] != old_channel[key]: | |
|
79 | old_rawdata[key] = count_data(channel[key]) | |
|
80 | time.sleep(1) | |
|
81 | new_rawdata[key] = count_data(channel[key]) | |
|
82 | else: | |
|
83 | time.sleep(1) | |
|
84 | else: | |
|
85 | path_channels[key] = False | |
|
86 | ||
|
87 | return path_channels, channel, old_rawdata, new_rawdata | |
|
60 | 88 | |
|
61 | 89 | def path_data(rootdir): |
|
62 | 90 | list_=[] |
@@ -74,35 +102,52 def path_data(rootdir): | |||
|
74 | 102 | except: |
|
75 | 103 | return False |
|
76 | 104 | |
|
77 |
def check_count(datadir |
|
|
78 | diff = {} | |
|
79 | numbers = count_data(datadir) | |
|
105 | def check_count(datadir): | |
|
106 | old_numbers = {'ch0': 0, 'ch1': 0} | |
|
107 | new_numbers = {'ch0': 0, 'ch1': 0} | |
|
108 | validation = {'ch0': False, 'ch1': False} | |
|
109 | channel = {'ch0': False, 'ch1': False} | |
|
80 | 110 | |
|
81 | for key, value in numbers.items(): | |
|
82 | if old_numbers[key] > numbers[key]: | |
|
83 | old_numbers[key] = 0 | |
|
84 | diff[key] = numbers[key] - old_numbers[key] | |
|
85 | return numbers, diff | |
|
111 | path_channels, channel, old_numbers, new_numbers = response_data(datadir, channel, old_numbers, new_numbers, ['ch0', 'ch1']) | |
|
86 | 112 | |
|
87 | def restart_experiment(self): | |
|
88 | old_numbers={'ch0': 0, 'ch1': 0} | |
|
89 | for count in range(5): | |
|
90 | time.sleep(1) | |
|
91 | if check_experiment(): | |
|
92 | all_status = Experiment.objects.filter(status=2) | |
|
93 | id_exp = all_status[0].pk | |
|
94 | datadir_exp = all_status[0].reception_rx.datadir | |
|
95 | datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data') | |
|
96 | ||
|
97 | old_numbers, diff = check_count(datadir_exp, old_numbers) | |
|
98 | if diff['ch0'] > 0 and diff['ch1'] > 0: | |
|
99 | self.stdout.write(f'Data acquisition is running') | |
|
113 | for key, value in validation.items(): | |
|
114 | for _ in range(5): | |
|
115 | if new_numbers[key] > old_numbers[key]: | |
|
116 | validation[key] = True | |
|
117 | break | |
|
100 | 118 | else: |
|
101 | count = count + 1 | |
|
102 | if count == 5: | |
|
103 | acquisition_stop(self, id_exp) | |
|
104 | time.sleep(1) | |
|
105 | acquisition_start(self, id_exp) | |
|
106 | old_numbers={'ch0': 0, 'ch1': 0} | |
|
119 | path_channels, channel, old_numbers, new_numbers = response_data(datadir, channel, old_numbers, new_numbers, [key]) | |
|
120 | ||
|
121 | return path_channels, channel, validation | |
|
122 | ||
|
123 | def restart_acquisition(self): | |
|
124 | if check_experiment(): | |
|
125 | all_status = Experiment.objects.filter(status=2) | |
|
126 | id_exp = all_status[0].pk | |
|
127 | datadir_exp = all_status[0].reception_rx.datadir | |
|
128 | datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data') | |
|
129 | ||
|
130 | path_channels, channel, validation = check_count(datadir_exp) | |
|
131 | ||
|
132 | if path_channels['ch0'] and path_channels['ch1']: | |
|
133 | # Execute the process | |
|
134 | if validation['ch0'] and validation['ch1']: | |
|
135 | self.stdout.write(f'Data acquisition is running') | |
|
136 | else: | |
|
137 | if not channel['ch0'] or not channel['ch1']: | |
|
138 | for key, value in channel.items(): | |
|
139 | if not value: | |
|
140 | self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00": channel["{key}"], retry!') | |
|
107 | 141 | else: |
|
108 | self.stdout.write(f'An error ocurred while trying to read data acquisition, Retry!') No newline at end of file | |
|
142 | for key, value in validation.items(): | |
|
143 | if not value: | |
|
144 | self.stdout.write(f'No file increment: channel["{key}"]') | |
|
145 | ||
|
146 | acquisition_stop(self, id_exp) | |
|
147 | time.sleep(3) | |
|
148 | acquisition_start(self, id_exp) | |
|
149 | ||
|
150 | else: | |
|
151 | for key, value in path_channels.items(): | |
|
152 | if not value: | |
|
153 | self.stdout.write(f'No such directory: channel["{key}"], fail!') No newline at end of file |
General Comments 0
You need to be logged in to leave comments.
Login now