##// END OF EJS Templates
Update restart_reception and Add restart_pedestal
eynilupu -
r443:8c926b45a918
parent child
Show More
@@ -0,0 +1,200
1 from django.core.management.base import BaseCommand
2 from apps.main.models import Experiment
3 from django.shortcuts import get_object_or_404
4 import os, fnmatch
5 import time
6 from datetime import datetime
7 import h5py
8 import numpy as np
9
10 class Command(BaseCommand):
11 """
12 Check pedestal acquisition each 10 minutes.
13 Example:
14 manage.py restart_pedestal
15 """
16 def handle(self, *args, **options):
17 #start = time.time()
18 #time.sleep(1)
19 restart_pedestal(self)
20 #end = time.time()
21 #self.stdout.write(f'TIME: "{end - start}"')
22
23 def check_experiment():
24 if len(Experiment.objects.filter(status=2)) > 0:
25 return True
26 else:
27 return False
28
29 def pedestal_start(self, id_exp):
30 all_status = Experiment.objects.filter(status=2)
31 check_id = False
32
33 if len(all_status) > 0:
34 check_id = all_status[0].pk
35
36 if check_id and check_id == id_exp:
37 exp = get_object_or_404(Experiment, pk=id_exp)
38 name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
39 exp.pedestal.start_device(name_experiment=name)
40 self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition was restarted')
41
42 def pedestal_stop(self, id_exp):
43 all_status = Experiment.objects.filter(status=2)
44 check_id = False
45
46 if len(all_status) > 0:
47 check_id = all_status[0].pk
48
49 if check_id and check_id == id_exp:
50 exp = get_object_or_404(Experiment, pk=id_exp)
51 exp.pedestal.stop_device()
52 self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition "{exp.name}" was stopped')
53
54 def hdf5_list_content(get_file):
55 table_records = np.array(get_file).tolist()
56 table_dim = get_file.ndim
57 table_rows = get_file.shape[0]
58
59 if table_dim == 1 and table_rows >= 1:
60 #type(table_records[0]) -> float
61 return table_records
62 else:
63 return False
64
65 def hdf5_read(file):
66 dataspeed = {"ele_speed": False, "azi_speed": False}
67
68 for key, value in dataspeed.items():
69 with h5py.File(file, 'r') as hdf:
70 get = hdf.get('Data'+'/'+key)
71 if get is not None:
72 # 10 values
73 dataspeed[key] = hdf5_list_content(get)[-10:]
74
75 return dataspeed
76
77 def count_data(last_position):
78 pattern = "pos@*.h5"
79 count = 0
80 list_data = []
81
82 list_position = os.listdir(last_position)
83
84 for entry in sorted(list_position):
85 if fnmatch.fnmatch(entry, pattern):
86 count = count + 1
87 list_data.append(os.path.join(last_position, entry))
88
89 if len(list_data) > 1:
90 list_data = list_data[-2]
91 else:
92 list_data = False
93
94 return count, list_data
95
96 def response_data(datadir, old_path_datetime, old_position, new_position):
97 path_position = True
98 path_datetime = False
99 read_files = False
100
101 rootdir = os.path.join(datadir, 'position')
102 if os.path.isdir(rootdir):
103 path_datetime = path_data(os.path.join(datadir, 'position'))
104
105 if path_datetime:
106 if not old_path_datetime or path_datetime != old_path_datetime:
107 old_position, read_files = count_data(path_datetime)
108 time.sleep(65)
109 new_position, read_files = count_data(path_datetime)
110 else:
111 time.sleep(65)
112 else:
113 path_position = False
114
115 return path_position, path_datetime, old_position, new_position, read_files
116
117 def path_data(rootdir):
118 list_=[]
119 for it in os.scandir(rootdir):
120 if it.is_dir():
121 try:
122 datetime.strptime(it.path.split("/")[-1], "%Y-%m-%dT%H-00-00")
123 list_.append(it.path)
124 except ValueError:
125 pass
126
127 list_ = sorted(list_, reverse=True)
128 try:
129 return list_[0]
130 except:
131 return False
132
133 def check_count(datadir):
134 old_numbers = 0
135 new_numbers = 0
136 validation = False
137 path_datetime = False
138 speed = {"ele_speed": False, "azi_speed": False}
139
140 path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers)
141
142 for u in range(2):
143 if new_numbers > old_numbers:
144 validation = True
145
146 data = hdf5_read(read_files)
147 for key, value in data.items():
148 try:
149 if not max(data[key]) <= 0.1:
150 speed[key] = True
151 except:
152 pass
153 break
154 else:
155 if u < 1:
156 path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers)
157
158 return path_position, path_datetime, validation, speed
159
160 def restart_pedestal(self):
161 if check_experiment():
162
163 all_status = Experiment.objects.filter(status=2)
164 id_exp = all_status[0].pk
165 datadir_exp = all_status[0].reception_rx.datadir
166 datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
167 datadir_exp = datadir_exp.replace('/rawdata', '')
168
169 path_position, path_datetime, validation, speed = check_count(datadir_exp)
170 if path_position:
171 # Execute the process
172 if validation:
173 self.stdout.write(f'Acquisition pedestal is running')
174 if speed['ele_speed'] or speed['azi_speed']:
175 self.stdout.write(f'Pedestal speeds on Azimuth and Elevation are running')
176 else:
177 for key, value in speed.items():
178 if not value:
179 self.stdout.write(f'Speed on {key} is <= 0.1, retry')
180
181 pedestal_stop(self, id_exp)
182 time.sleep(14)
183 #pedestal_reset(self, id_exp)
184 #time.sleep(2)
185 pedestal_start(self, id_exp)
186
187 else:
188 if not path_datetime:
189 self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00", retry!')
190 else:
191 self.stdout.write(f'No file increment, retry')
192
193 pedestal_stop(self, id_exp)
194 time.sleep(14)
195 #pedestal_reset(self, id_exp)
196 #time.sleep(2)
197 pedestal_start(self, id_exp)
198
199 else:
200 self.stdout.write(f'No such directory: position, fail!') No newline at end of file
@@ -1,4 +1,5
1 #General settings
1 #General settings
2 TZ=America/Lima
2 LC_ALL=C.UTF-8
3 LC_ALL=C.UTF-8
3 SIRM_SITE=<SIRM SITE>
4 SIRM_SITE=<SIRM SITE>
4 PROC_SITE=<PROC SITE>
5 PROC_SITE=<PROC SITE>
@@ -25,14 +26,14 PGDATA=/var/lib/postgresql/data
25 #Volumes - path
26 #Volumes - path
26 EXPOSE_SIRM=./volumes/sirm
27 EXPOSE_SIRM=./volumes/sirm
27 EXPOSE_PROC=./volumes/proc
28 EXPOSE_PROC=./volumes/proc
28 EXPOSE_SCHAIN=./volumes/schain
29 EXPOSE_CAM=/path/to/cam
29 EXPOSE_CAM=/path/to/cam
30 EXPOSE_SCHAIN=./volumes/schain
30 EXPOSE_NAS=/path/to/nas_data
31 EXPOSE_NAS=/path/to/nas_data
31 EXPOSE_PGDATA=/path/to/pg_data
32 EXPOSE_PGDATA=/path/to/pg_data
32 EXPOSE_CERTS=/path/to/certs
33 EXPOSE_CERTS=/path/to/certs
33 EXPOSE_DHPARAM=/path/to/dhparam
34 EXPOSE_DHPARAM=/path/to/dhparam
34
35
35 #Superuser settings
36 #Superuser settings
36 SIRM_USER=admin
37 SIRM_USER=*****
37 SIRM_PASSWORD=soporte
38 SIRM_PASSWORD=*******
38 SIRM_EMAIL=admin@igp.gob.pe No newline at end of file
39 SIRM_EMAIL=*****@igp.gob.pe No newline at end of file
@@ -98,6 +98,7 services:
98 - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT}
98 - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT}
99 - EXPOSE_NAS=${EXPOSE_NAS}
99 - EXPOSE_NAS=${EXPOSE_NAS}
100 - PROC_SITE=${PROC_SITE}
100 - PROC_SITE=${PROC_SITE}
101 - SCHAIN_SITE=${SCHAIN_SITE}
101 - SIRM_USER=${SIRM_USER}
102 - SIRM_USER=${SIRM_USER}
102 - SIRM_PASSWORD=${SIRM_PASSWORD}
103 - SIRM_PASSWORD=${SIRM_PASSWORD}
103 - SIRM_EMAIL=${SIRM_EMAIL}
104 - SIRM_EMAIL=${SIRM_EMAIL}
@@ -113,10 +114,12 services:
113 - backend_sirm
114 - backend_sirm
114 labels:
115 labels:
115 ofelia.enabled: "true"
116 ofelia.enabled: "true"
116 ofelia.job-exec.adq-exp.schedule: "@every 10s"
117 ofelia.job-exec.restart-reception.schedule: "0 1/5 * * * *"
117 ofelia.job-exec.adq-exp.command: "python manage.py adq_exp"
118 ofelia.job-exec.restart-reception.command: "python manage.py restart_reception"
118 ofelia.job-exec.restart-exp.schedule: "0 0 0/17 ? * *"
119 ofelia.job-exec.restart-pedestal.schedule: "0 2/10 * * * *"
119 ofelia.job-exec.restart-exp.command: "python manage.py restart_exp"
120 ofelia.job-exec.restart-pedestal.command: "python manage.py restart_pedestal"
121 ofelia.job-exec.restart-experiment.schedule: "0 0 5 * * *"
122 ofelia.job-exec.restart-experiment.command: "python manage.py restart_experiment"
120 logging:
123 logging:
121 driver: "json-file"
124 driver: "json-file"
122 options:
125 options:
@@ -12,4 +12,5 idna==3.3
12 urllib3==1.26.9
12 urllib3==1.26.9
13 charset-normalizer==2.0.12
13 charset-normalizer==2.0.12
14 certifi==2021.10.8
14 certifi==2021.10.8
15 soupsieve==2.3.1 No newline at end of file
15 soupsieve==2.3.1
16 h5py==3.7.0 No newline at end of file
@@ -5,9 +5,9 import time
5
5
6 class Command(BaseCommand):
6 class Command(BaseCommand):
7 """
7 """
8 Restart experiment each 06 hours.
8 Restart experiment every night at 05:00 am.
9 Example:
9 Example:
10 manage.py restart_exp --pk=1
10 manage.py restart_experiment
11 """
11 """
12 def handle(self, *args, **options):
12 def handle(self, *args, **options):
13 restart_experiment(self)
13 restart_experiment(self)
@@ -4,15 +4,20 from django.shortcuts import get_object_or_404
4 import os, fnmatch
4 import os, fnmatch
5 import time
5 import time
6 from datetime import datetime
6 from datetime import datetime
7 import requests
7
8
8 class Command(BaseCommand):
9 class Command(BaseCommand):
9 """
10 """
10 Check data acquisition each 10 seconds.
11 Check data acquisition each 05 minutes.
11 Example:
12 Example:
12 manage.py adq_exp
13 manage.py restart_reception
13 """
14 """
14 def handle(self, *args, **options):
15 def handle(self, *args, **options):
15 restart_experiment(self)
16 #start = time.time()
17 time.sleep(15)
18 restart_acquisition(self)
19 #end = time.time()
20 #self.stdout.write(f'TIME: "{end - start}"')
16
21
17 def check_experiment():
22 def check_experiment():
18 if len(Experiment.objects.filter(status=2)) > 0:
23 if len(Experiment.objects.filter(status=2)) > 0:
@@ -32,6 +37,11 def acquisition_start(self, id_exp):
32 name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
37 name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
33 exp.reception_rx.start_device(name_experiment = name, restart = True)
38 exp.reception_rx.start_device(name_experiment = name, restart = True)
34 self.stdout.write(f'"{exp.name}" experiment: Data acquisition was restarted')
39 self.stdout.write(f'"{exp.name}" experiment: Data acquisition was restarted')
40 self.stdout.write(f'Restarting schain...')
41
42 r = requests.get('http://'+os.environ.get('SCHAIN_SITE', 'sophy-schain')+'/stop')
43 time.sleep(1)
44 r = requests.post('http://'+os.environ.get('SCHAIN_SITE', 'sophy-schain')+'/start', json={'name': exp.name})
35
45
36 def acquisition_stop(self, id_exp):
46 def acquisition_stop(self, id_exp):
37 all_status = Experiment.objects.filter(status=2)
47 all_status = Experiment.objects.filter(status=2)
@@ -45,18 +55,36 def acquisition_stop(self, id_exp):
45 exp.reception_rx.stop_device()
55 exp.reception_rx.stop_device()
46 self.stdout.write(f'"{exp.name}" experiment: Data acquisition "{exp.name}" was stopped')
56 self.stdout.write(f'"{exp.name}" experiment: Data acquisition "{exp.name}" was stopped')
47
57
48 def count_data(datadir):
58 def count_data(last_channel):
49 pattern = "rf@*.h5"
59 pattern = "rf@*.h5"
50 rawdata = {'ch0': 0, 'ch1': 0}
60 count = 0
61 list_channel = os.listdir(last_channel)
62
63 for entry in sorted(list_channel):
64 if fnmatch.fnmatch(entry, pattern):
65 count = count + 1
66 return count
51
67
52 for key, value in rawdata.items():
68 def response_data(datadir, old_channel, old_rawdata, new_rawdata, search):
53 last_channel = path_data(os.path.join(datadir, key))
69 path_channels = {'ch0': True, 'ch1': True}
54 if last_channel:
70 channel = {'ch0': False, 'ch1': False}
55 list_channel = os.listdir(last_channel)
71
56 for entry in sorted(list_channel):
72 for key, value in path_channels.items():
57 if fnmatch.fnmatch(entry, pattern):
73 rootdir = os.path.join(datadir, key)
58 rawdata[key] = rawdata[key] + 1
74 if os.path.isdir(rootdir):
59 return rawdata
75 channel[key] = path_data(os.path.join(datadir, key))
76 if key in search:
77 if channel[key]:
78 if not old_channel[key] or channel[key] != old_channel[key]:
79 old_rawdata[key] = count_data(channel[key])
80 time.sleep(1)
81 new_rawdata[key] = count_data(channel[key])
82 else:
83 time.sleep(1)
84 else:
85 path_channels[key] = False
86
87 return path_channels, channel, old_rawdata, new_rawdata
60
88
61 def path_data(rootdir):
89 def path_data(rootdir):
62 list_=[]
90 list_=[]
@@ -74,35 +102,52 def path_data(rootdir):
74 except:
102 except:
75 return False
103 return False
76
104
77 def check_count(datadir, old_numbers):
105 def check_count(datadir):
78 diff = {}
106 old_numbers = {'ch0': 0, 'ch1': 0}
79 numbers = count_data(datadir)
107 new_numbers = {'ch0': 0, 'ch1': 0}
108 validation = {'ch0': False, 'ch1': False}
109 channel = {'ch0': False, 'ch1': False}
80
110
81 for key, value in numbers.items():
111 path_channels, channel, old_numbers, new_numbers = response_data(datadir, channel, old_numbers, new_numbers, ['ch0', 'ch1'])
82 if old_numbers[key] > numbers[key]:
83 old_numbers[key] = 0
84 diff[key] = numbers[key] - old_numbers[key]
85 return numbers, diff
86
112
87 def restart_experiment(self):
113 for key, value in validation.items():
88 old_numbers={'ch0': 0, 'ch1': 0}
114 for _ in range(5):
89 for count in range(5):
115 if new_numbers[key] > old_numbers[key]:
90 time.sleep(1)
116 validation[key] = True
91 if check_experiment():
117 break
92 all_status = Experiment.objects.filter(status=2)
93 id_exp = all_status[0].pk
94 datadir_exp = all_status[0].reception_rx.datadir
95 datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
96
97 old_numbers, diff = check_count(datadir_exp, old_numbers)
98 if diff['ch0'] > 0 and diff['ch1'] > 0:
99 self.stdout.write(f'Data acquisition is running')
100 else:
118 else:
101 count = count + 1
119 path_channels, channel, old_numbers, new_numbers = response_data(datadir, channel, old_numbers, new_numbers, [key])
102 if count == 5:
120
103 acquisition_stop(self, id_exp)
121 return path_channels, channel, validation
104 time.sleep(1)
122
105 acquisition_start(self, id_exp)
123 def restart_acquisition(self):
106 old_numbers={'ch0': 0, 'ch1': 0}
124 if check_experiment():
125 all_status = Experiment.objects.filter(status=2)
126 id_exp = all_status[0].pk
127 datadir_exp = all_status[0].reception_rx.datadir
128 datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
129
130 path_channels, channel, validation = check_count(datadir_exp)
131
132 if path_channels['ch0'] and path_channels['ch1']:
133 # Execute the process
134 if validation['ch0'] and validation['ch1']:
135 self.stdout.write(f'Data acquisition is running')
136 else:
137 if not channel['ch0'] or not channel['ch1']:
138 for key, value in channel.items():
139 if not value:
140 self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00": channel["{key}"], retry!')
107 else:
141 else:
108 self.stdout.write(f'An error ocurred while trying to read data acquisition, Retry!') No newline at end of file
142 for key, value in validation.items():
143 if not value:
144 self.stdout.write(f'No file increment: channel["{key}"]')
145
146 acquisition_stop(self, id_exp)
147 time.sleep(3)
148 acquisition_start(self, id_exp)
149
150 else:
151 for key, value in path_channels.items():
152 if not value:
153 self.stdout.write(f'No such directory: channel["{key}"], fail!') No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now