##// END OF EJS Templates
Add 'reset pedestal' option
Add 'reset pedestal' option

File last commit:

r444:d8e453ba0459
r444:d8e453ba0459
Show More
restart_pedestal.py
209 lines | 6.6 KiB | text/x-python | PythonLexer
from django.core.management.base import BaseCommand
from apps.main.models import Experiment
from django.shortcuts import get_object_or_404
import os, fnmatch
import time
from datetime import datetime
import h5py
import numpy as np
class Command(BaseCommand):
"""
Check pedestal acquisition each 10 minutes.
Example:
manage.py restart_pedestal
"""
def handle(self, *args, **options):
#start = time.time()
#time.sleep(1)
restart_pedestal(self)
#end = time.time()
#self.stdout.write(f'TIME: "{end - start}"')
def check_experiment():
if len(Experiment.objects.filter(status=2)) > 0:
return True
else:
return False
def pedestal_start(self, id_exp):
all_status = Experiment.objects.filter(status=2)
check_id = False
if len(all_status) > 0:
check_id = all_status[0].pk
if check_id and check_id == id_exp:
exp = get_object_or_404(Experiment, pk=id_exp)
name = '{}-R@{}'.format(exp.name, datetime.now().strftime('%Y-%m-%dT%H-%M-%S'))
exp.pedestal.start_device(name_experiment=name)
self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition was restarted')
def pedestal_reset(self, id_exp):
all_status = Experiment.objects.filter(status=2)
check_id = False
if len(all_status) > 0:
check_id = all_status[0].pk
if check_id and check_id == id_exp:
exp = get_object_or_404(Experiment, pk=id_exp)
exp.pedestal.reset_device()
self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition is resetting')
def pedestal_stop(self, id_exp):
all_status = Experiment.objects.filter(status=2)
check_id = False
if len(all_status) > 0:
check_id = all_status[0].pk
if check_id and check_id == id_exp:
exp = get_object_or_404(Experiment, pk=id_exp)
exp.pedestal.stop_device()
self.stdout.write(f'"{exp.name}" experiment: Pedestal acquisition was stopped')
def hdf5_list_content(get_file):
table_records = np.array(get_file).tolist()
table_dim = get_file.ndim
table_rows = get_file.shape[0]
if table_dim == 1 and table_rows >= 1:
#type(table_records[0]) -> float
return table_records
else:
return False
def hdf5_read(file):
dataspeed = {"ele_speed": False, "azi_speed": False}
for key, value in dataspeed.items():
with h5py.File(file, 'r') as hdf:
get = hdf.get('Data'+'/'+key)
if get is not None:
# 10 values
dataspeed[key] = hdf5_list_content(get)[-10:]
return dataspeed
def count_data(last_position):
pattern = "pos@*.h5"
count = 0
list_data = []
list_position = os.listdir(last_position)
for entry in sorted(list_position):
if fnmatch.fnmatch(entry, pattern):
count = count + 1
list_data.append(os.path.join(last_position, entry))
if len(list_data) > 1:
list_data = list_data[-2]
else:
list_data = False
return count, list_data
def response_data(datadir, old_path_datetime, old_position, new_position):
path_position = True
path_datetime = False
read_files = False
rootdir = os.path.join(datadir, 'position')
if os.path.isdir(rootdir):
path_datetime = path_data(os.path.join(datadir, 'position'))
if path_datetime:
if not old_path_datetime or path_datetime != old_path_datetime:
old_position, read_files = count_data(path_datetime)
time.sleep(65)
new_position, read_files = count_data(path_datetime)
else:
time.sleep(65)
else:
path_position = False
return path_position, path_datetime, old_position, new_position, read_files
def path_data(rootdir):
list_=[]
for it in os.scandir(rootdir):
if it.is_dir():
try:
datetime.strptime(it.path.split("/")[-1], "%Y-%m-%dT%H-00-00")
list_.append(it.path)
except ValueError:
pass
list_ = sorted(list_, reverse=True)
try:
return list_[0]
except:
return False
def check_count(datadir):
old_numbers = 0
new_numbers = 0
validation = False
path_datetime = False
speed = {"ele_speed": False, "azi_speed": False}
path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers)
for u in range(2):
if new_numbers > old_numbers:
validation = True
data = hdf5_read(read_files)
for key, value in data.items():
try:
if not max(data[key]) <= 0.1:
speed[key] = True
except:
pass
break
else:
if u < 1:
path_position, path_datetime, old_numbers, new_numbers, read_files = response_data(datadir, path_datetime, old_numbers, new_numbers)
return path_position, path_datetime, validation, speed
def restart_pedestal(self):
if check_experiment():
all_status = Experiment.objects.filter(status=2)
id_exp = all_status[0].pk
datadir_exp = all_status[0].reception_rx.datadir
datadir_exp = datadir_exp.replace(os.environ.get('EXPOSE_NAS', '/DATA_RM/DATA'), '/data')
datadir_exp = datadir_exp.replace('/rawdata', '')
path_position, path_datetime, validation, speed = check_count(datadir_exp)
if path_position:
# Execute the process
if validation:
self.stdout.write(f'Acquisition pedestal is running')
if speed['ele_speed'] or speed['azi_speed']:
self.stdout.write(f'Pedestal speeds on Azimuth and Elevation are running')
else:
for key, value in speed.items():
if not value:
self.stdout.write(f'Speed on {key} is <= 0.1, retry')
pedestal_stop(self, id_exp)
time.sleep(14)
pedestal_start(self, id_exp)
else:
if not path_datetime:
self.stdout.write(f'No such directory with datetime format "%Y-%m-%dT%H-00-00", retry!')
else:
self.stdout.write(f'No file increment, retry')
pedestal_reset(self, id_exp)
time.sleep(14)
pedestal_stop(self, id_exp)
time.sleep(14)
pedestal_start(self, id_exp)
else:
self.stdout.write(f'No such directory: position, fail!')