##// END OF EJS Templates
Update restart_reception and Add restart_pedestal
eynilupu -
r443:8c926b45a918
parent child
Show More
@@ -0,0 +1,200
1 from django.core.management.base import BaseCommand
2 from apps.main.models import Experiment
3 from django.shortcuts import get_object_or_404
4 import os, fnmatch
5 import time
6 from datetime import datetime
7 import h5py
8 import numpy as np
9
10 class Command(BaseCommand):
11 """
12 Check pedestal acquisition each 10 minutes.
13 Example:
14 manage.py restart_pedestal
15 """
16 def handle(self, *args, **options):
17 #start = time.time()
18 #time.sleep(1)
19 restart_pedestal(self)
20 #end = time.time()
21 #self.stdout.write(f'TIME: "{end - start}"')
22
23 def check_experiment():
24 if len(Experiment.objects.filter(status=2)) > 0:
25 return True
26 else:
27 return False
28
29 def pedestal_start(self, id_exp):
30 all_status = Experiment.objects.filter(status=2)
31 check_id = False
32
33 if len(all_status) > 0:
34 check_id = all_status[0].pk
35
36 if check_id and check_id == id_exp:
37 exp = get_object_or_404(Experiment, pk=id_exp)
38 name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
39 exp.pedestal.start_device(name_experiment=name)
40 self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition was restarted')
41
42 def pedestal_stop(self, id_exp):
43 all_status = Experiment.objects.filter(status=2)
44 check_id = False
45
46 if len(all_status) > 0:
47 check_id = all_status[0].pk
48
49 if check_id and check_id == id_exp:
50 exp = get_object_or_404(Experiment, pk=id_exp)
51 exp.pedestal.stop_device()
52 self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition "{exp.name}" was stopped')
53
54 def hdf5_list_content(get_file):
55 table_records = np.array(get_file).tolist()
56 table_dim = get_file.ndim
57 table_rows = get_file.shape[0]
58
59 if table_dim == 1 and table_rows >= 1:
60 #type(table_records[0]) -> float
61 return table_records
62 else:
63 return False
64
65 def hdf5_read(file):
66 dataspeed = {"ele_speed": False, "azi_speed": False}
67
68 for key, value in dataspeed.items():
69 with h5py.File(file, 'r') as hdf:
70 get = hdf.get('Data'+'/'+key)
71 if get is not None:
72 # 10 values
73 dataspeed[key] = hdf5_list_content(get)[-10:]
74
75 return dataspeed
76
77 def count_data(last_position):
78 pattern = "pos@*.h5"
79 count = 0
80 list_data = []
81
82 list_position = os.listdir(last_position)
83
84 for entry in sorted(list_position):
85 if fnmatch.fnmatch(entry, pattern):
86 count = count + 1
87 list_data.append(os.path.join(last_position, entry))
88
89 if len(list_data) > 1:
90 list_data = list_data[-2]
91 else:
92 list_data = False
93
94 return count, list_data
95
96 def response_data(datadir, old_path_datetime, old_position, new_position):
97 path_position = True
98 path_datetime = False
99 read_files = False
100
101 rootdir = os.path.join(datadir, 'position')
102 if os.path.isdir(rootdir):
103 path_datetime = path_data(os.path.join(datadir, 'position'))
104
105 if path_datetime:
106 if not old_path_datetime or path_datetime != old_path_datetime:
107 old_position, read_files = count_data(path_datetime)
108 time.sleep(65)
109 new_position, read_files = count_data(path_datetime)
110 else:
111 time.sleep(65)
112 else:
113 path_position = False
114
115 return path_position, path_datetime, old_position, new_position, read_files
116
117 def path_data(rootdir):
118 list_=[]
119 for it in os.scandir(rootdir):
120 if it.is_dir():
121 try:
122 datetime.strptime(it.path.split("/")[-1], "%Y-%m-%dT%H-00-00")
123 list_.append(it.path)
124 except ValueError:
125 pass
126
127 list_ = sorted(list_, reverse=True)
128 try:
129 return list_[0]
130 except:
131 return False
132
133 def check_count(datadir):
134 old_numbers = 0
135 new_numbers = 0
136 validation = False
137 path_datetime = False
138 speed = {"ele_speed": False, "azi_speed": False}
139
140 path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers)
141
142 for u in range(2):
143 if new_numbers > old_numbers:
144 validation = True
145
146 data = hdf5_read(read_files)
147 for key, value in data.items():
148 try:
149 if not max(data[key]) <= 0.1:
150 speed[key] = True
151 except:
152 pass
153 break
154 else:
155 if u < 1:
156 path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers)
157
158 return path_position, path_datetime, validation, speed
159
160 def restart_pedestal(self):
161 if check_experiment():
162
163 all_status = Experiment.objects.filter(status=2)
164 id_exp = all_status[0].pk
165 datadir_exp = all_status[0].reception_rx.datadir
166 datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
167 datadir_exp = datadir_exp.replace('/rawdata', '')
168
169 path_position, path_datetime, validation, speed = check_count(datadir_exp)
170 if path_position:
171 # Execute the process
172 if validation:
173 self.stdout.write(f'Acquisition pedestal is running')
174 if speed['ele_speed'] or speed['azi_speed']:
175 self.stdout.write(f'Pedestal speeds on Azimuth and Elevation are running')
176 else:
177 for key, value in speed.items():
178 if not value:
179 self.stdout.write(f'Speed on {key} is <= 0.1, retry')
180
181 pedestal_stop(self, id_exp)
182 time.sleep(14)
183 #pedestal_reset(self, id_exp)
184 #time.sleep(2)
185 pedestal_start(self, id_exp)
186
187 else:
188 if not path_datetime:
189 self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00", retry!')
190 else:
191 self.stdout.write(f'No file increment, retry')
192
193 pedestal_stop(self, id_exp)
194 time.sleep(14)
195 #pedestal_reset(self, id_exp)
196 #time.sleep(2)
197 pedestal_start(self, id_exp)
198
199 else:
200 self.stdout.write(f'No such directory: position, fail!') No newline at end of file
@@ -1,38 +1,39
1 #General settings
1 #General settings
2 TZ=America/Lima
2 LC_ALL=C.UTF-8
3 LC_ALL=C.UTF-8
3 SIRM_SITE=<SIRM SITE>
4 SIRM_SITE=<SIRM SITE>
4 PROC_SITE=<PROC SITE>
5 PROC_SITE=<PROC SITE>
5 CAM_SITE=<CAM SITE>
6 CAM_SITE=<CAM SITE>
6 SCHAIN_SITE=<SCHAIN SITE>
7 SCHAIN_SITE=<SCHAIN SITE>
7 GENERAL_PORT=<GENERAL PORT>
8 GENERAL_PORT=<GENERAL PORT>
8 BROKER_URL=<BROKER SITE>
9 BROKER_URL=<BROKER SITE>
9 SOPHY_TOPIC=<SOPHY TOPIC>
10 SOPHY_TOPIC=<SOPHY TOPIC>
10 TXA_SITE=<IP TXA>
11 TXA_SITE=<IP TXA>
11 TXB_SITE=<IP TXB>
12 TXB_SITE=<IP TXB>
12 SIRM_MAX_UPLOAD_SIZE_MB=<SIZE MB>
13 SIRM_MAX_UPLOAD_SIZE_MB=<SIZE MB>
13
14
14 #Pedestal - az offset
15 #Pedestal - az offset
15 AZ_OFFSET=<AZ OFFSET>
16 AZ_OFFSET=<AZ OFFSET>
16
17
17 #Postgres settings
18 #Postgres settings
18 POSTGRES_PORT_5432_TCP_ADDR=sirm-postgres
19 POSTGRES_PORT_5432_TCP_ADDR=sirm-postgres
19 POSTGRES_PORT_5432_TCP_PORT=5432
20 POSTGRES_PORT_5432_TCP_PORT=5432
20 DB_NAME=radarsys
21 DB_NAME=radarsys
21 DB_USER=docker
22 DB_USER=docker
22 DB_PASSWORD=docker
23 DB_PASSWORD=docker
23 PGDATA=/var/lib/postgresql/data
24 PGDATA=/var/lib/postgresql/data
24
25
25 #Volumes - path
26 #Volumes - path
26 EXPOSE_SIRM=./volumes/sirm
27 EXPOSE_SIRM=./volumes/sirm
27 EXPOSE_PROC=./volumes/proc
28 EXPOSE_PROC=./volumes/proc
28 EXPOSE_SCHAIN=./volumes/schain
29 EXPOSE_CAM=/path/to/cam
29 EXPOSE_CAM=/path/to/cam
30 EXPOSE_SCHAIN=./volumes/schain
30 EXPOSE_NAS=/path/to/nas_data
31 EXPOSE_NAS=/path/to/nas_data
31 EXPOSE_PGDATA=/path/to/pg_data
32 EXPOSE_PGDATA=/path/to/pg_data
32 EXPOSE_CERTS=/path/to/certs
33 EXPOSE_CERTS=/path/to/certs
33 EXPOSE_DHPARAM=/path/to/dhparam
34 EXPOSE_DHPARAM=/path/to/dhparam
34
35
35 #Superuser settings
36 #Superuser settings
36 SIRM_USER=admin
37 SIRM_USER=*****
37 SIRM_PASSWORD=soporte
38 SIRM_PASSWORD=*******
38 SIRM_EMAIL=admin@igp.gob.pe No newline at end of file
39 SIRM_EMAIL=*****@igp.gob.pe No newline at end of file
@@ -1,269 +1,272
1 # docker-compose up -d --build
1 # docker-compose up -d --build
2 version: '3'
2 version: '3'
3
3
4 volumes:
4 volumes:
5 sirm_web:
5 sirm_web:
6 name: sirm_web
6 name: sirm_web
7 driver: local
7 driver: local
8 driver_opts:
8 driver_opts:
9 type: "none"
9 type: "none"
10 o: "bind"
10 o: "bind"
11 device: "${EXPOSE_SIRM}"
11 device: "${EXPOSE_SIRM}"
12 sirm_pgdata:
12 sirm_pgdata:
13 name: sirm_pgdata
13 name: sirm_pgdata
14 driver: local
14 driver: local
15 driver_opts:
15 driver_opts:
16 type: "none"
16 type: "none"
17 o: "bind"
17 o: "bind"
18 device: "${EXPOSE_PGDATA}"
18 device: "${EXPOSE_PGDATA}"
19 sirm_certs:
19 sirm_certs:
20 name: sirm_certs
20 name: sirm_certs
21 driver: local
21 driver: local
22 driver_opts:
22 driver_opts:
23 type: "none"
23 type: "none"
24 o: "bind"
24 o: "bind"
25 device: "${EXPOSE_CERTS}"
25 device: "${EXPOSE_CERTS}"
26 sirm_dhparam:
26 sirm_dhparam:
27 name: sirm_dhparam
27 name: sirm_dhparam
28 driver: local
28 driver: local
29 driver_opts:
29 driver_opts:
30 type: "none"
30 type: "none"
31 o: "bind"
31 o: "bind"
32 device: "${EXPOSE_DHPARAM}"
32 device: "${EXPOSE_DHPARAM}"
33 sirm_proc:
33 sirm_proc:
34 name: sirm_proc
34 name: sirm_proc
35 driver: local
35 driver: local
36 driver_opts:
36 driver_opts:
37 type: "none"
37 type: "none"
38 o: "bind"
38 o: "bind"
39 device: "${EXPOSE_PROC}"
39 device: "${EXPOSE_PROC}"
40 sirm_nas:
40 sirm_nas:
41 name: sirm_nas
41 name: sirm_nas
42 driver: local
42 driver: local
43 driver_opts:
43 driver_opts:
44 type: "none"
44 type: "none"
45 o: "bind"
45 o: "bind"
46 device: "${EXPOSE_NAS}"
46 device: "${EXPOSE_NAS}"
47 sirm_cam:
47 sirm_cam:
48 name: sirm_cam
48 name: sirm_cam
49 driver: local
49 driver: local
50 driver_opts:
50 driver_opts:
51 type: "none"
51 type: "none"
52 o: "bind"
52 o: "bind"
53 device: "${EXPOSE_CAM}"
53 device: "${EXPOSE_CAM}"
54 sirm_schain:
54 sirm_schain:
55 name: sirm_schain
55 name: sirm_schain
56 driver: local
56 driver: local
57 driver_opts:
57 driver_opts:
58 type: "none"
58 type: "none"
59 o: "bind"
59 o: "bind"
60 device: "${EXPOSE_SCHAIN}"
60 device: "${EXPOSE_SCHAIN}"
61
61
62 services:
62 services:
63 sirm-nginx-proxy:
63 sirm-nginx-proxy:
64 container_name: sirm-nginx-proxy
64 container_name: sirm-nginx-proxy
65 restart: always
65 restart: always
66 build:
66 build:
67 context: ./images/
67 context: ./images/
68 dockerfile: nginx-proxy/Dockerfile
68 dockerfile: nginx-proxy/Dockerfile
69 args:
69 args:
70 - SIRM_MAX_UPLOAD_SIZE_MB=${SIRM_MAX_UPLOAD_SIZE_MB}
70 - SIRM_MAX_UPLOAD_SIZE_MB=${SIRM_MAX_UPLOAD_SIZE_MB}
71 depends_on:
71 depends_on:
72 - sirm-web
72 - sirm-web
73 networks:
73 networks:
74 - frontend_sirm
74 - frontend_sirm
75 - backend_sirm
75 - backend_sirm
76 ports:
76 ports:
77 - 0.0.0.0:${GENERAL_PORT}:80
77 - 0.0.0.0:${GENERAL_PORT}:80
78 volumes:
78 volumes:
79 - /var/run/docker.sock:/tmp/docker.sock:ro
79 - /var/run/docker.sock:/tmp/docker.sock:ro
80 - sirm_certs:/etc/nginx/certs:ro
80 - sirm_certs:/etc/nginx/certs:ro
81 - sirm_dhparam:/etc/nginx/dhparam
81 - sirm_dhparam:/etc/nginx/dhparam
82 logging:
82 logging:
83 driver: "json-file"
83 driver: "json-file"
84 options:
84 options:
85 max-size: "12m"
85 max-size: "12m"
86
86
87 sirm-web:
87 sirm-web:
88 container_name: 'sirm-web'
88 container_name: 'sirm-web'
89 restart: always
89 restart: always
90 build:
90 build:
91 context: .
91 context: .
92 environment:
92 environment:
93 - LC_ALL=${LC_ALL}
93 - LC_ALL=${LC_ALL}
94 - DB_USER=${DB_USER}
94 - DB_USER=${DB_USER}
95 - DB_NAME=${DB_NAME}
95 - DB_NAME=${DB_NAME}
96 - DB_PASSWORD=${DB_PASSWORD}
96 - DB_PASSWORD=${DB_PASSWORD}
97 - POSTGRES_PORT_5432_TCP_ADDR=${POSTGRES_PORT_5432_TCP_ADDR}
97 - POSTGRES_PORT_5432_TCP_ADDR=${POSTGRES_PORT_5432_TCP_ADDR}
98 - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT}
98 - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT}
99 - EXPOSE_NAS=${EXPOSE_NAS}
99 - EXPOSE_NAS=${EXPOSE_NAS}
100 - PROC_SITE=${PROC_SITE}
100 - PROC_SITE=${PROC_SITE}
101 - SCHAIN_SITE=${SCHAIN_SITE}
101 - SIRM_USER=${SIRM_USER}
102 - SIRM_USER=${SIRM_USER}
102 - SIRM_PASSWORD=${SIRM_PASSWORD}
103 - SIRM_PASSWORD=${SIRM_PASSWORD}
103 - SIRM_EMAIL=${SIRM_EMAIL}
104 - SIRM_EMAIL=${SIRM_EMAIL}
104 - AZ_OFFSET=${AZ_OFFSET}
105 - AZ_OFFSET=${AZ_OFFSET}
105 - VIRTUAL_HOST=${SIRM_SITE}
106 - VIRTUAL_HOST=${SIRM_SITE}
106 volumes:
107 volumes:
107 - 'sirm_web:/workspace/sirm'
108 - 'sirm_web:/workspace/sirm'
108 - 'sirm_nas:/data'
109 - 'sirm_nas:/data'
109 depends_on:
110 depends_on:
110 - sirm-postgres
111 - sirm-postgres
111 networks:
112 networks:
112 - frontend_sirm
113 - frontend_sirm
113 - backend_sirm
114 - backend_sirm
114 labels:
115 labels:
115 ofelia.enabled: "true"
116 ofelia.enabled: "true"
116 ofelia.job-exec.adq-exp.schedule: "@every 10s"
117 ofelia.job-exec.restart-reception.schedule: "0 1/5 * * * *"
117 ofelia.job-exec.adq-exp.command: "python manage.py adq_exp"
118 ofelia.job-exec.restart-reception.command: "python manage.py restart_reception"
118 ofelia.job-exec.restart-exp.schedule: "0 0 0/17 ? * *"
119 ofelia.job-exec.restart-pedestal.schedule: "0 2/10 * * * *"
119 ofelia.job-exec.restart-exp.command: "python manage.py restart_exp"
120 ofelia.job-exec.restart-pedestal.command: "python manage.py restart_pedestal"
121 ofelia.job-exec.restart-experiment.schedule: "0 0 5 * * *"
122 ofelia.job-exec.restart-experiment.command: "python manage.py restart_experiment"
120 logging:
123 logging:
121 driver: "json-file"
124 driver: "json-file"
122 options:
125 options:
123 max-size: "12m"
126 max-size: "12m"
124
127
125 sirm-job:
128 sirm-job:
126 container_name: 'sirm-job'
129 container_name: 'sirm-job'
127 image: mcuadros/ofelia:latest
130 image: mcuadros/ofelia:latest
128 depends_on:
131 depends_on:
129 - sirm-web
132 - sirm-web
130 networks:
133 networks:
131 - frontend_sirm
134 - frontend_sirm
132 - backend_sirm
135 - backend_sirm
133 command: daemon --docker
136 command: daemon --docker
134 volumes:
137 volumes:
135 - /var/run/docker.sock:/var/run/docker.sock:ro
138 - /var/run/docker.sock:/var/run/docker.sock:ro
136 logging:
139 logging:
137 driver: "json-file"
140 driver: "json-file"
138 options:
141 options:
139 max-size: "12m"
142 max-size: "12m"
140
143
141 sirm-postgres:
144 sirm-postgres:
142 container_name: 'sirm-postgres'
145 container_name: 'sirm-postgres'
143 restart: always
146 restart: always
144 build:
147 build:
145 context: ./images/
148 context: ./images/
146 dockerfile: postgres/Dockerfile
149 dockerfile: postgres/Dockerfile
147 args:
150 args:
148 - PGDATA=${PGDATA}
151 - PGDATA=${PGDATA}
149 environment:
152 environment:
150 - LC_ALL=${LC_ALL}
153 - LC_ALL=${LC_ALL}
151 - DB_USER=${DB_USER}
154 - DB_USER=${DB_USER}
152 - DB_NAME=${DB_NAME}
155 - DB_NAME=${DB_NAME}
153 - DB_PASSWORD=${DB_PASSWORD}
156 - DB_PASSWORD=${DB_PASSWORD}
154 - POSTGRES_PORT_5432_TCP_ADDR=${POSTGRES_PORT_5432_TCP_ADDR}
157 - POSTGRES_PORT_5432_TCP_ADDR=${POSTGRES_PORT_5432_TCP_ADDR}
155 - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT}
158 - POSTGRES_PORT_5432_TCP_PORT=${POSTGRES_PORT_5432_TCP_PORT}
156 volumes:
159 volumes:
157 - sirm_pgdata:/var/lib/postgresql/data
160 - sirm_pgdata:/var/lib/postgresql/data
158 networks:
161 networks:
159 - backend_sirm
162 - backend_sirm
160 logging:
163 logging:
161 driver: "json-file"
164 driver: "json-file"
162 options:
165 options:
163 max-size: "12m"
166 max-size: "12m"
164
167
165 sirm-proc:
168 sirm-proc:
166 container_name: 'sirm-proc'
169 container_name: 'sirm-proc'
167 restart: always
170 restart: always
168 build:
171 build:
169 context: ./volumes/proc/
172 context: ./volumes/proc/
170 environment:
173 environment:
171 - BROKER_URL=${BROKER_URL}
174 - BROKER_URL=${BROKER_URL}
172 - SOPHY_TOPIC=${SOPHY_TOPIC}
175 - SOPHY_TOPIC=${SOPHY_TOPIC}
173 - TXA_SITE=${TXA_SITE}
176 - TXA_SITE=${TXA_SITE}
174 - TXB_SITE=${TXB_SITE}
177 - TXB_SITE=${TXB_SITE}
175 - SCHAIN_SITE=${SCHAIN_SITE}
178 - SCHAIN_SITE=${SCHAIN_SITE}
176 - VIRTUAL_HOST=${PROC_SITE}
179 - VIRTUAL_HOST=${PROC_SITE}
177 volumes:
180 volumes:
178 - 'sirm_proc:/app'
181 - 'sirm_proc:/app'
179 - 'sirm_nas:/data'
182 - 'sirm_nas:/data'
180 networks:
183 networks:
181 - frontend_sirm
184 - frontend_sirm
182 logging:
185 logging:
183 driver: "json-file"
186 driver: "json-file"
184 options:
187 options:
185 max-size: "12m"
188 max-size: "12m"
186
189
187 sirm-monitor:
190 sirm-monitor:
188 container_name: 'sirm-monitor'
191 container_name: 'sirm-monitor'
189 restart: always
192 restart: always
190 image: 'sirm_sirm-proc'
193 image: 'sirm_sirm-proc'
191 command: ["python", "monitor.py"]
194 command: ["python", "monitor.py"]
192 environment:
195 environment:
193 - BROKER_URL=${BROKER_URL}
196 - BROKER_URL=${BROKER_URL}
194 - TXA_SITE=${TXA_SITE}
197 - TXA_SITE=${TXA_SITE}
195 - TXB_SITE=${TXB_SITE}
198 - TXB_SITE=${TXB_SITE}
196 volumes:
199 volumes:
197 - 'sirm_proc:/app'
200 - 'sirm_proc:/app'
198 - 'sirm_nas:/data'
201 - 'sirm_nas:/data'
199 networks:
202 networks:
200 - frontend_sirm
203 - frontend_sirm
201 depends_on:
204 depends_on:
202 - sirm-proc
205 - sirm-proc
203 logging:
206 logging:
204 driver: "json-file"
207 driver: "json-file"
205 options:
208 options:
206 max-size: "12m"
209 max-size: "12m"
207
210
208 sirm-acq:
211 sirm-acq:
209 container_name: 'sirm-acq'
212 container_name: 'sirm-acq'
210 restart: always
213 restart: always
211 image: 'sirm_sirm-proc'
214 image: 'sirm_sirm-proc'
212 command: ["python", "acq.py"]
215 command: ["python", "acq.py"]
213 environment:
216 environment:
214 - BROKER_URL=${BROKER_URL}
217 - BROKER_URL=${BROKER_URL}
215 - TXA_SITE=${TXA_SITE}
218 - TXA_SITE=${TXA_SITE}
216 - TXB_SITE=${TXB_SITE}
219 - TXB_SITE=${TXB_SITE}
217 - PROC_SITE=${PROC_SITE}
220 - PROC_SITE=${PROC_SITE}
218 volumes:
221 volumes:
219 - 'sirm_proc:/app'
222 - 'sirm_proc:/app'
220 - 'sirm_nas:/data'
223 - 'sirm_nas:/data'
221 networks:
224 networks:
222 - frontend_sirm
225 - frontend_sirm
223 depends_on:
226 depends_on:
224 - sirm-proc
227 - sirm-proc
225 logging:
228 logging:
226 driver: "json-file"
229 driver: "json-file"
227 options:
230 options:
228 max-size: "12m"
231 max-size: "12m"
229
232
230 sirm-cam:
233 sirm-cam:
231 container_name: 'sirm-cam'
234 container_name: 'sirm-cam'
232 image: bkjaya1952/ivms4200-v2.8.2.2_ml-linux
235 image: bkjaya1952/ivms4200-v2.8.2.2_ml-linux
233 restart: always
236 restart: always
234 environment:
237 environment:
235 - VIRTUAL_HOST=${CAM_SITE}
238 - VIRTUAL_HOST=${CAM_SITE}
236 volumes:
239 volumes:
237 - 'sirm_cam:/root/.wine/drive_c/iVMS-4200'
240 - 'sirm_cam:/root/.wine/drive_c/iVMS-4200'
238 networks:
241 networks:
239 - frontend_sirm
242 - frontend_sirm
240 logging:
243 logging:
241 driver: "json-file"
244 driver: "json-file"
242 options:
245 options:
243 max-size: "12m"
246 max-size: "12m"
244
247
245 sirm-schain:
248 sirm-schain:
246 container_name: 'sirm-schain'
249 container_name: 'sirm-schain'
247 restart: always
250 restart: always
248 build:
251 build:
249 context: ./volumes/schain/
252 context: ./volumes/schain/
250 environment:
253 environment:
251 - BROKER_URL=${BROKER_URL}
254 - BROKER_URL=${BROKER_URL}
252 - BACKEND=Agg
255 - BACKEND=Agg
253 - TZ=${TZ}
256 - TZ=${TZ}
254 - VIRTUAL_HOST=${SCHAIN_SITE}
257 - VIRTUAL_HOST=${SCHAIN_SITE}
255 volumes:
258 volumes:
256 - 'sirm_nas:/data'
259 - 'sirm_nas:/data'
257 - 'sirm_schain:/app'
260 - 'sirm_schain:/app'
258 networks:
261 networks:
259 - frontend_sirm
262 - frontend_sirm
260 logging:
263 logging:
261 driver: "json-file"
264 driver: "json-file"
262 options:
265 options:
263 max-size: "12m"
266 max-size: "12m"
264
267
265 networks:
268 networks:
266 frontend_sirm:
269 frontend_sirm:
267 name: frontend_sirm
270 name: frontend_sirm
268 backend_sirm:
271 backend_sirm:
269 name: backend_sirm No newline at end of file
272 name: backend_sirm
@@ -1,15 +1,16
1 Django==4.0.3
1 Django==4.0.3
2 django-bootstrap4==22.1
2 django-bootstrap4==22.1
3 psycopg2-binary==2.9.3
3 psycopg2-binary==2.9.3
4 django-polymorphic==3.1.0
4 django-polymorphic==3.1.0
5 gunicorn==20.1.0
5 gunicorn==20.1.0
6 requests==2.27.1
6 requests==2.27.1
7 backports.zoneinfo==0.2.1
7 backports.zoneinfo==0.2.1
8 asgiref==3.5.0
8 asgiref==3.5.0
9 sqlparse==0.4.2
9 sqlparse==0.4.2
10 beautifulsoup4==4.10.0
10 beautifulsoup4==4.10.0
11 idna==3.3
11 idna==3.3
12 urllib3==1.26.9
12 urllib3==1.26.9
13 charset-normalizer==2.0.12
13 charset-normalizer==2.0.12
14 certifi==2021.10.8
14 certifi==2021.10.8
15 soupsieve==2.3.1 No newline at end of file
15 soupsieve==2.3.1
16 h5py==3.7.0 No newline at end of file
@@ -1,43 +1,43
1 from django.core.management.base import BaseCommand
1 from django.core.management.base import BaseCommand
2 from apps.main.models import Experiment
2 from apps.main.models import Experiment
3 from django.shortcuts import get_object_or_404
3 from django.shortcuts import get_object_or_404
4 import time
4 import time
5
5
6 class Command(BaseCommand):
6 class Command(BaseCommand):
7 """
7 """
8 Restart experiment each 06 hours.
8 Restart experiment every night at 05:00 am.
9 Example:
9 Example:
10 manage.py restart_exp --pk=1
10 manage.py restart_experiment
11 """
11 """
12 def handle(self, *args, **options):
12 def handle(self, *args, **options):
13 restart_experiment(self)
13 restart_experiment(self)
14
14
15 def check_experiment():
15 def check_experiment():
16 if len(Experiment.objects.filter(status=2)) > 0:
16 if len(Experiment.objects.filter(status=2)) > 0:
17 return True
17 return True
18 else:
18 else:
19 return False
19 return False
20
20
21 def experiment_start(self, id_exp):
21 def experiment_start(self, id_exp):
22 exp = get_object_or_404(Experiment, pk=id_exp)
22 exp = get_object_or_404(Experiment, pk=id_exp)
23 if exp.status != 2 and exp.mode_stop == 'res':
23 if exp.status != 2 and exp.mode_stop == 'res':
24 exp.status = exp.start()
24 exp.status = exp.start()
25 exp.save()
25 exp.save()
26 self.stdout.write(f'Experiment "{exp.name}" was restarted')
26 self.stdout.write(f'Experiment "{exp.name}" was restarted')
27
27
28 def experiment_stop(self, id_exp):
28 def experiment_stop(self, id_exp):
29 exp = get_object_or_404(Experiment, pk=id_exp)
29 exp = get_object_or_404(Experiment, pk=id_exp)
30 if exp.status == 2 or exp.status == 4 or exp.status == 5:
30 if exp.status == 2 or exp.status == 4 or exp.status == 5:
31 exp.status = exp.stop()
31 exp.status = exp.stop()
32 exp.mode_stop = 'res'
32 exp.mode_stop = 'res'
33 exp.save()
33 exp.save()
34 self.stdout.write(f'Experiment "{exp.name}" was stoped')
34 self.stdout.write(f'Experiment "{exp.name}" was stoped')
35
35
36 def restart_experiment(self):
36 def restart_experiment(self):
37 if check_experiment():
37 if check_experiment():
38 all_status = Experiment.objects.filter(status=2)
38 all_status = Experiment.objects.filter(status=2)
39 id_exp = all_status[0].pk
39 id_exp = all_status[0].pk
40
40
41 experiment_stop(self, id_exp)
41 experiment_stop(self, id_exp)
42 time.sleep(15)
42 time.sleep(15)
43 experiment_start(self, id_exp) No newline at end of file
43 experiment_start(self, id_exp)
@@ -1,108 +1,153
1 from django.core.management.base import BaseCommand
1 from django.core.management.base import BaseCommand
2 from apps.main.models import Experiment
2 from apps.main.models import Experiment
3 from django.shortcuts import get_object_or_404
3 from django.shortcuts import get_object_or_404
4 import os, fnmatch
4 import os, fnmatch
5 import time
5 import time
6 from datetime import datetime
6 from datetime import datetime
7 import requests
7
8
8 class Command(BaseCommand):
9 class Command(BaseCommand):
9 """
10 """
10 Check data acquisition each 10 seconds.
11 Check data acquisition each 05 minutes.
11 Example:
12 Example:
12 manage.py adq_exp
13 manage.py restart_reception
13 """
14 """
14 def handle(self, *args, **options):
15 def handle(self, *args, **options):
15 restart_experiment(self)
16 #start = time.time()
17 time.sleep(15)
18 restart_acquisition(self)
19 #end = time.time()
20 #self.stdout.write(f'TIME: "{end - start}"')
16
21
17 def check_experiment():
22 def check_experiment():
18 if len(Experiment.objects.filter(status=2)) > 0:
23 if len(Experiment.objects.filter(status=2)) > 0:
19 return True
24 return True
20 else:
25 else:
21 return False
26 return False
22
27
23 def acquisition_start(self, id_exp):
28 def acquisition_start(self, id_exp):
24 all_status = Experiment.objects.filter(status=2)
29 all_status = Experiment.objects.filter(status=2)
25 check_id = False
30 check_id = False
26
31
27 if len(all_status) > 0:
32 if len(all_status) > 0:
28 check_id = all_status[0].pk
33 check_id = all_status[0].pk
29
34
30 if check_id and check_id == id_exp:
35 if check_id and check_id == id_exp:
31 exp = get_object_or_404(Experiment, pk=id_exp)
36 exp = get_object_or_404(Experiment, pk=id_exp)
32 name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
37 name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
33 exp.reception_rx.start_device(name_experiment = name, restart = True)
38 exp.reception_rx.start_device(name_experiment = name, restart = True)
34 self.stdout.write(f'"{exp.name}" experiment: Data acquisition was restarted')
39 self.stdout.write(f'"{exp.name}" experiment: Data acquisition was restarted')
40 self.stdout.write(f'Restarting schain...')
41
42 r = requests.get('http://'+os.environ.get('SCHAIN_SITE', 'sophy-schain')+'/stop')
43 time.sleep(1)
44 r = requests.post('http://'+os.environ.get('SCHAIN_SITE', 'sophy-schain')+'/start', json={'name': exp.name})
35
45
36 def acquisition_stop(self, id_exp):
46 def acquisition_stop(self, id_exp):
37 all_status = Experiment.objects.filter(status=2)
47 all_status = Experiment.objects.filter(status=2)
38 check_id = False
48 check_id = False
39
49
40 if len(all_status) > 0:
50 if len(all_status) > 0:
41 check_id = all_status[0].pk
51 check_id = all_status[0].pk
42
52
43 if check_id and check_id == id_exp:
53 if check_id and check_id == id_exp:
44 exp = get_object_or_404(Experiment, pk=id_exp)
54 exp = get_object_or_404(Experiment, pk=id_exp)
45 exp.reception_rx.stop_device()
55 exp.reception_rx.stop_device()
46 self.stdout.write(f'"{exp.name}" experiment: Data acquisition "{exp.name}" was stopped')
56 self.stdout.write(f'"{exp.name}" experiment: Data acquisition "{exp.name}" was stopped')
47
57
48 def count_data(datadir):
58 def count_data(last_channel):
49 pattern = "rf@*.h5"
59 pattern = "rf@*.h5"
50 rawdata = {'ch0': 0, 'ch1': 0}
60 count = 0
61 list_channel = os.listdir(last_channel)
62
63 for entry in sorted(list_channel):
64 if fnmatch.fnmatch(entry, pattern):
65 count = count + 1
66 return count
51
67
52 for key, value in rawdata.items():
68 def response_data(datadir, old_channel, old_rawdata, new_rawdata, search):
53 last_channel = path_data(os.path.join(datadir, key))
69 path_channels = {'ch0': True, 'ch1': True}
54 if last_channel:
70 channel = {'ch0': False, 'ch1': False}
55 list_channel = os.listdir(last_channel)
71
56 for entry in sorted(list_channel):
72 for key, value in path_channels.items():
57 if fnmatch.fnmatch(entry, pattern):
73 rootdir = os.path.join(datadir, key)
58 rawdata[key] = rawdata[key] + 1
74 if os.path.isdir(rootdir):
59 return rawdata
75 channel[key] = path_data(os.path.join(datadir, key))
76 if key in search:
77 if channel[key]:
78 if not old_channel[key] or channel[key] != old_channel[key]:
79 old_rawdata[key] = count_data(channel[key])
80 time.sleep(1)
81 new_rawdata[key] = count_data(channel[key])
82 else:
83 time.sleep(1)
84 else:
85 path_channels[key] = False
86
87 return path_channels, channel, old_rawdata, new_rawdata
60
88
61 def path_data(rootdir):
89 def path_data(rootdir):
62 list_=[]
90 list_=[]
63 for it in os.scandir(rootdir):
91 for it in os.scandir(rootdir):
64 if it.is_dir():
92 if it.is_dir():
65 try:
93 try:
66 datetime.strptime(it.path.split("/")[-1], "%Y-%m-%dT%H-00-00")
94 datetime.strptime(it.path.split("/")[-1], "%Y-%m-%dT%H-00-00")
67 list_.append(it.path)
95 list_.append(it.path)
68 except ValueError:
96 except ValueError:
69 pass
97 pass
70
98
71 list_ = sorted(list_, reverse=True)
99 list_ = sorted(list_, reverse=True)
72 try:
100 try:
73 return list_[0]
101 return list_[0]
74 except:
102 except:
75 return False
103 return False
76
104
77 def check_count(datadir, old_numbers):
105 def check_count(datadir):
78 diff = {}
106 old_numbers = {'ch0': 0, 'ch1': 0}
79 numbers = count_data(datadir)
107 new_numbers = {'ch0': 0, 'ch1': 0}
108 validation = {'ch0': False, 'ch1': False}
109 channel = {'ch0': False, 'ch1': False}
80
110
81 for key, value in numbers.items():
111 path_channels, channel, old_numbers, new_numbers = response_data(datadir, channel, old_numbers, new_numbers, ['ch0', 'ch1'])
82 if old_numbers[key] > numbers[key]:
83 old_numbers[key] = 0
84 diff[key] = numbers[key] - old_numbers[key]
85 return numbers, diff
86
112
87 def restart_experiment(self):
113 for key, value in validation.items():
88 old_numbers={'ch0': 0, 'ch1': 0}
114 for _ in range(5):
89 for count in range(5):
115 if new_numbers[key] > old_numbers[key]:
90 time.sleep(1)
116 validation[key] = True
91 if check_experiment():
117 break
92 all_status = Experiment.objects.filter(status=2)
93 id_exp = all_status[0].pk
94 datadir_exp = all_status[0].reception_rx.datadir
95 datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
96
97 old_numbers, diff = check_count(datadir_exp, old_numbers)
98 if diff['ch0'] > 0 and diff['ch1'] > 0:
99 self.stdout.write(f'Data acquisition is running')
100 else:
118 else:
101 count = count + 1
119 path_channels, channel, old_numbers, new_numbers = response_data(datadir, channel, old_numbers, new_numbers, [key])
102 if count == 5:
120
103 acquisition_stop(self, id_exp)
121 return path_channels, channel, validation
104 time.sleep(1)
122
105 acquisition_start(self, id_exp)
123 def restart_acquisition(self):
106 old_numbers={'ch0': 0, 'ch1': 0}
124 if check_experiment():
125 all_status = Experiment.objects.filter(status=2)
126 id_exp = all_status[0].pk
127 datadir_exp = all_status[0].reception_rx.datadir
128 datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
129
130 path_channels, channel, validation = check_count(datadir_exp)
131
132 if path_channels['ch0'] and path_channels['ch1']:
133 # Execute the process
134 if validation['ch0'] and validation['ch1']:
135 self.stdout.write(f'Data acquisition is running')
136 else:
137 if not channel['ch0'] or not channel['ch1']:
138 for key, value in channel.items():
139 if not value:
140 self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00": channel["{key}"], retry!')
107 else:
141 else:
108 self.stdout.write(f'An error ocurred while trying to read data acquisition, Retry!') No newline at end of file
142 for key, value in validation.items():
143 if not value:
144 self.stdout.write(f'No file increment: channel["{key}"]')
145
146 acquisition_stop(self, id_exp)
147 time.sleep(3)
148 acquisition_start(self, id_exp)
149
150 else:
151 for key, value in path_channels.items():
152 if not value:
153 self.stdout.write(f'No such directory: channel["{key}"], fail!') No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now