generalConfig, config: use pathlib
- switch to pathlib - represent multiple confdirs as list of Paths internally, not string with pathsep Change-Id: I1418e561641e27cd904af0762be056cd66ee1919 Reviewed-on: https://forge.frm2.tum.de/review/c/secop/frappy/+/34464 Reviewed-by: Enrico Faulhaber <enrico.faulhaber@frm2.tum.de> Tested-by: Jenkins Automated Tests <pedersen+jenkins@frm2.tum.de> Reviewed-by: Alexander Zaft <a.zaft@fz-juelich.de> Reviewed-by: Markus Zolliker <markus.zolliker@psi.ch>
This commit is contained in:
parent
fe0aa3d7d5
commit
13db0d6bc6
@ -20,6 +20,7 @@
|
|||||||
# *****************************************************************************
|
# *****************************************************************************
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
|
||||||
@ -128,8 +129,7 @@ class Config(dict):
|
|||||||
|
|
||||||
|
|
||||||
def process_file(filename, log):
|
def process_file(filename, log):
|
||||||
with open(filename, 'rb') as f:
|
config_text = filename.read_bytes()
|
||||||
config_text = f.read()
|
|
||||||
node = NodeCollector()
|
node = NodeCollector()
|
||||||
mods = Collector(Mod)
|
mods = Collector(Mod)
|
||||||
ns = {'Node': node.add, 'Mod': mods.add, 'Param': Param, 'Command': Param, 'Group': Group}
|
ns = {'Node': node.add, 'Mod': mods.add, 'Param': Param, 'Command': Param, 'Group': Group}
|
||||||
@ -149,22 +149,21 @@ def process_file(filename, log):
|
|||||||
def to_config_path(cfgfile, log):
|
def to_config_path(cfgfile, log):
|
||||||
candidates = [cfgfile + e for e in ['_cfg.py', '.py', '']]
|
candidates = [cfgfile + e for e in ['_cfg.py', '.py', '']]
|
||||||
if os.sep in cfgfile: # specified as full path
|
if os.sep in cfgfile: # specified as full path
|
||||||
filename = cfgfile if os.path.exists(cfgfile) else None
|
file = Path(cfgfile) if Path(cfgfile).exists() else None
|
||||||
else:
|
else:
|
||||||
for filename in [os.path.join(d, candidate)
|
for file in [Path(d) / candidate
|
||||||
for d in generalConfig.confdir.split(os.pathsep)
|
for d in generalConfig.confdir
|
||||||
for candidate in candidates]:
|
for candidate in candidates]:
|
||||||
if os.path.exists(filename):
|
if file.exists():
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
filename = None
|
file = None
|
||||||
|
if file is None:
|
||||||
if filename is None:
|
|
||||||
raise ConfigError(f"Couldn't find cfg file {cfgfile!r} in {generalConfig.confdir}")
|
raise ConfigError(f"Couldn't find cfg file {cfgfile!r} in {generalConfig.confdir}")
|
||||||
if not filename.endswith('_cfg.py'):
|
if not file.name.endswith('_cfg.py'):
|
||||||
log.warning("Config files should end in '_cfg.py': %s", os.path.basename(filename))
|
log.warning("Config files should end in '_cfg.py': %s", file.name)
|
||||||
log.debug('Using config file %s for %s', filename, cfgfile)
|
log.debug('Using config file %s for %s', file, cfgfile)
|
||||||
return filename
|
return file
|
||||||
|
|
||||||
|
|
||||||
def load_config(cfgfiles, log):
|
def load_config(cfgfiles, log):
|
||||||
|
@ -31,6 +31,7 @@ import threading
|
|||||||
import traceback
|
import traceback
|
||||||
from configparser import ConfigParser
|
from configparser import ConfigParser
|
||||||
from os import environ, path
|
from os import environ, path
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
SECoP_DEFAULT_PORT = 10767
|
SECoP_DEFAULT_PORT = 10767
|
||||||
@ -70,28 +71,32 @@ class GeneralConfig:
|
|||||||
"""
|
"""
|
||||||
cfg = {}
|
cfg = {}
|
||||||
mandatory = 'piddir', 'logdir', 'confdir'
|
mandatory = 'piddir', 'logdir', 'confdir'
|
||||||
repodir = path.abspath(path.join(path.dirname(__file__), '..', '..'))
|
repodir = Path(__file__).parents[2].expanduser().resolve()
|
||||||
# create default paths
|
# create default paths
|
||||||
if (path.splitext(sys.executable)[1] == ".exe"
|
if (Path(sys.executable).suffix == ".exe"
|
||||||
and not path.basename(sys.executable).startswith('python')):
|
and not Path(sys.executable).name.startswith('python')):
|
||||||
# special MS windows environment
|
# special MS windows environment
|
||||||
self.update_defaults(piddir='./', logdir='./log', confdir='./')
|
self.update_defaults(piddir=Path('./'), logdir=Path('./log'), confdir=Path('./'))
|
||||||
elif path.exists(path.join(repodir, 'cfg')):
|
elif path.exists(path.join(repodir, 'cfg')):
|
||||||
# running from git repo
|
# running from git repo
|
||||||
self.set_default('confdir', path.join(repodir, 'cfg'))
|
self.set_default('confdir', repodir / 'cfg')
|
||||||
# take logdir and piddir from <repodir>/cfg/generalConfig.cfg
|
# take logdir and piddir from <repodir>/cfg/generalConfig.cfg
|
||||||
else:
|
else:
|
||||||
# running on installed system (typically with systemd)
|
# running on installed system (typically with systemd)
|
||||||
self.update_defaults(piddir='/var/run/frappy', logdir='/var/log', confdir='/etc/frappy')
|
self.update_defaults(
|
||||||
|
piddir=Path('/var/run/frappy'),
|
||||||
|
logdir=Path('/var/log'),
|
||||||
|
confdir=Path('/etc/frappy')
|
||||||
|
)
|
||||||
if configfile is None:
|
if configfile is None:
|
||||||
configfile = environ.get('FRAPPY_CONFIG_FILE')
|
configfile = environ.get('FRAPPY_CONFIG_FILE')
|
||||||
if configfile:
|
if configfile:
|
||||||
configfile = path.expanduser(configfile)
|
configfile = Path(configfile).expanduser()
|
||||||
if not path.exists(configfile):
|
if not configfile.exists():
|
||||||
raise FileNotFoundError(configfile)
|
raise FileNotFoundError(configfile)
|
||||||
else:
|
else:
|
||||||
configfile = path.join(self['confdir'], 'generalConfig.cfg')
|
configfile = self['confdir'] / 'generalConfig.cfg'
|
||||||
if not path.exists(configfile):
|
if not configfile.exists():
|
||||||
configfile = None
|
configfile = None
|
||||||
if configfile:
|
if configfile:
|
||||||
parser = ConfigParser()
|
parser = ConfigParser()
|
||||||
@ -100,16 +105,16 @@ class GeneralConfig:
|
|||||||
# only the FRAPPY section is relevant, other sections might be used by others
|
# only the FRAPPY section is relevant, other sections might be used by others
|
||||||
for key, value in parser['FRAPPY'].items():
|
for key, value in parser['FRAPPY'].items():
|
||||||
if value.startswith('./'):
|
if value.startswith('./'):
|
||||||
cfg[key] = path.abspath(path.join(repodir, value))
|
cfg[key] = (repodir / value).absolute()
|
||||||
else:
|
else:
|
||||||
# expand ~ to username, also in path lists separated with ':'
|
# expand ~ to username, also in path lists separated with ':'
|
||||||
cfg[key] = ':'.join(path.expanduser(v) for v in value.split(':'))
|
cfg[key] = ':'.join(path.expanduser(v) for v in value.split(':'))
|
||||||
if cfg.get('confdir') is None:
|
if cfg.get('confdir') is None:
|
||||||
cfg['confdir'] = path.dirname(configfile)
|
cfg['confdir'] = configfile.parent
|
||||||
for key in mandatory:
|
for key in mandatory:
|
||||||
env = environ.get(f'FRAPPY_{key.upper()}')
|
env = environ.get(f'FRAPPY_{key.upper()}')
|
||||||
if env is not None:
|
if env is not None:
|
||||||
cfg[key] = env
|
cfg[key] = Path(env)
|
||||||
missing_keys = [
|
missing_keys = [
|
||||||
key for key in mandatory
|
key for key in mandatory
|
||||||
if cfg.get(key) is None and self.defaults.get(key) is None
|
if cfg.get(key) is None and self.defaults.get(key) is None
|
||||||
@ -119,6 +124,8 @@ class GeneralConfig:
|
|||||||
raise KeyError(f"missing value for {' and '.join(missing_keys)} in {configfile}")
|
raise KeyError(f"missing value for {' and '.join(missing_keys)} in {configfile}")
|
||||||
raise KeyError('missing %s'
|
raise KeyError('missing %s'
|
||||||
% ' and '.join('FRAPPY_%s' % k.upper() for k in missing_keys))
|
% ' and '.join('FRAPPY_%s' % k.upper() for k in missing_keys))
|
||||||
|
if isinstance(cfg['confdir'], Path):
|
||||||
|
cfg['confdir'] = [cfg['confdir']]
|
||||||
# this is not customizable
|
# this is not customizable
|
||||||
cfg['basedir'] = repodir
|
cfg['basedir'] = repodir
|
||||||
self._config = cfg
|
self._config = cfg
|
||||||
|
@ -75,9 +75,9 @@ class PersistentMixin(Module):
|
|||||||
|
|
||||||
def __init__(self, name, logger, cfgdict, srv):
|
def __init__(self, name, logger, cfgdict, srv):
|
||||||
super().__init__(name, logger, cfgdict, srv)
|
super().__init__(name, logger, cfgdict, srv)
|
||||||
persistentdir = os.path.join(generalConfig.logdir, 'persistent')
|
persistentdir = generalConfig.logdir / 'persistent'
|
||||||
os.makedirs(persistentdir, exist_ok=True)
|
os.makedirs(persistentdir, exist_ok=True)
|
||||||
self.persistentFile = os.path.join(persistentdir, f'{self.secNode.equipment_id}.{self.name}.json')
|
self.persistentFile = persistentdir / f'{self.secNode.equipment_id}.{self.name}.json'
|
||||||
self.initData = {} # "factory" settings
|
self.initData = {} # "factory" settings
|
||||||
loaded = self.loadPersistentData()
|
loaded = self.loadPersistentData()
|
||||||
for pname, pobj in self.parameters.items():
|
for pname, pobj in self.parameters.items():
|
||||||
@ -147,10 +147,10 @@ class PersistentMixin(Module):
|
|||||||
if getattr(v, 'persistent', False)}
|
if getattr(v, 'persistent', False)}
|
||||||
if data != self.persistentData:
|
if data != self.persistentData:
|
||||||
self.persistentData = data
|
self.persistentData = data
|
||||||
persistentdir = os.path.dirname(self.persistentFile)
|
persistentdir = self.persistentFile.parent
|
||||||
tmpfile = self.persistentFile + '.tmp'
|
tmpfile = self.persistentFile.parent / (self.persistentFile.name + '.tmp')
|
||||||
if not os.path.isdir(persistentdir):
|
if not persistentdir.is_dir():
|
||||||
os.makedirs(persistentdir, exist_ok=True)
|
persistentdir.mkdir(parents=True, exist_ok=True)
|
||||||
try:
|
try:
|
||||||
with open(tmpfile, 'w', encoding='utf-8') as f:
|
with open(tmpfile, 'w', encoding='utf-8') as f:
|
||||||
json.dump(self.persistentData, f, indent=2)
|
json.dump(self.persistentData, f, indent=2)
|
||||||
|
@ -112,7 +112,7 @@ class Server:
|
|||||||
raise ConfigError('No interface specified in configuration or arguments!')
|
raise ConfigError('No interface specified in configuration or arguments!')
|
||||||
|
|
||||||
self._cfgfiles = cfgfiles
|
self._cfgfiles = cfgfiles
|
||||||
self._pidfile = os.path.join(generalConfig.piddir, name + '.pid')
|
self._pidfile = generalConfig.piddir / (name + '.pid')
|
||||||
signal.signal(signal.SIGINT, self.signal_handler)
|
signal.signal(signal.SIGINT, self.signal_handler)
|
||||||
signal.signal(signal.SIGTERM, self.signal_handler)
|
signal.signal(signal.SIGTERM, self.signal_handler)
|
||||||
|
|
||||||
@ -127,9 +127,9 @@ class Server:
|
|||||||
def start(self):
|
def start(self):
|
||||||
if not DaemonContext:
|
if not DaemonContext:
|
||||||
raise ConfigError('can not daemonize, as python-daemon is not installed')
|
raise ConfigError('can not daemonize, as python-daemon is not installed')
|
||||||
piddir = os.path.dirname(self._pidfile)
|
piddir = self._pidfile.parent
|
||||||
if not os.path.isdir(piddir):
|
if not piddir.is_dir():
|
||||||
os.makedirs(piddir)
|
piddir.mkdir(parents=True)
|
||||||
pidfile = pidlockfile.TimeoutPIDLockFile(self._pidfile)
|
pidfile = pidlockfile.TimeoutPIDLockFile(self._pidfile)
|
||||||
|
|
||||||
if pidfile.is_locked():
|
if pidfile.is_locked():
|
||||||
|
@ -34,7 +34,7 @@ import json
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
from os.path import expanduser, join, exists
|
from pathlib import Path
|
||||||
|
|
||||||
from frappy.client import ProxyClient
|
from frappy.client import ProxyClient
|
||||||
from frappy.datatypes import ArrayOf, BoolType, \
|
from frappy.datatypes import ArrayOf, BoolType, \
|
||||||
@ -72,19 +72,21 @@ SERVICE_NAMES = {
|
|||||||
'addon': 'addons',
|
'addon': 'addons',
|
||||||
}
|
}
|
||||||
|
|
||||||
SEA_DIR = expanduser('~/sea')
|
SEA_DIR = Path('~/sea').expanduser()
|
||||||
seaconfdir = os.environ.get('FRAPPY_SEA_DIR')
|
seaconfdir = os.environ.get('FRAPPY_SEA_DIR')
|
||||||
if seaconfdir is None or not exists(seaconfdir):
|
if seaconfdir is None or not Path(seaconfdir).expanduser().absolute().exists():
|
||||||
for confdir in generalConfig.confdir.split(os.pathsep):
|
for confdir in generalConfig.confdir:
|
||||||
seaconfdir = join(confdir, 'sea')
|
seaconfdir = confdir / 'sea'
|
||||||
if exists(seaconfdir):
|
if seaconfdir.exists():
|
||||||
break
|
break
|
||||||
|
else:
|
||||||
|
seaconfdir = Path(seaconfdir).expanduser().absolute()
|
||||||
|
|
||||||
|
|
||||||
def get_sea_port(instance):
|
def get_sea_port(instance):
|
||||||
for filename in ('sea_%s.tcl' % instance, 'sea.tcl'):
|
for filename in ('sea_%s.tcl' % instance, 'sea.tcl'):
|
||||||
try:
|
try:
|
||||||
with open(join(SEA_DIR, filename), encoding='utf-8') as f:
|
with open(SEA_DIR / filename, encoding='utf-8') as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
linesplit = line.split()
|
linesplit = line.split()
|
||||||
if len(linesplit) == 3:
|
if len(linesplit) == 3:
|
||||||
@ -375,16 +377,16 @@ class SeaConfigCreator(SeaClient):
|
|||||||
stripped, _, ext = filename.rpartition('.')
|
stripped, _, ext = filename.rpartition('.')
|
||||||
service = SERVICE_NAMES[ext]
|
service = SERVICE_NAMES[ext]
|
||||||
seaconn = 'sea_' + service
|
seaconn = 'sea_' + service
|
||||||
cfgfile = join(seaconfdir, stripped + '_cfg.py')
|
cfgfile = seaconfdir / (stripped + '_cfg.py')
|
||||||
with open(cfgfile, 'w', encoding='utf-8') as fp:
|
with cfgfile.open('w', encoding='utf-8') as fp:
|
||||||
fp.write(CFG_HEADER % {'config': filename, 'seaconn': seaconn, 'service': service,
|
fp.write(CFG_HEADER % {'config': filename, 'seaconn': seaconn, 'service': service,
|
||||||
'nodedescr': description.get(filename, filename)})
|
'nodedescr': description.get(filename, filename)})
|
||||||
for obj in descr:
|
for obj in descr:
|
||||||
fp.write(CFG_MODULE % {'modcls': modcls[obj], 'module': obj, 'seaconn': seaconn})
|
fp.write(CFG_MODULE % {'modcls': modcls[obj], 'module': obj, 'seaconn': seaconn})
|
||||||
content = json.dumps(descr).replace('}, {', '},\n{').replace('[{', '[\n{').replace('}]}, ', '}]},\n\n')
|
content = json.dumps(descr).replace('}, {', '},\n{').replace('[{', '[\n{').replace('}]}, ', '}]},\n\n')
|
||||||
result.append('%s\n' % cfgfile)
|
result.append('%s\n' % cfgfile)
|
||||||
with open(join(seaconfdir, filename + '.json'), 'w', encoding='utf-8') as fp:
|
fpath = seaconfdir / (filename + '.json')
|
||||||
fp.write(content + '\n')
|
fpath.write_text(content + '\n', encoding='utf-8')
|
||||||
result.append('%s: %s' % (filename, ','.join(n for n in descr)))
|
result.append('%s: %s' % (filename, ','.join(n for n in descr)))
|
||||||
raise SystemExit('; '.join(result))
|
raise SystemExit('; '.join(result))
|
||||||
|
|
||||||
@ -482,7 +484,7 @@ class SeaModule(Module):
|
|||||||
cfgdict['description'] = '%s@%s%s' % (
|
cfgdict['description'] = '%s@%s%s' % (
|
||||||
name, json_file, '' if rel_paths == '.' else f' (rel_paths={rel_paths})')
|
name, json_file, '' if rel_paths == '.' else f' (rel_paths={rel_paths})')
|
||||||
|
|
||||||
with open(join(seaconfdir, json_file), encoding='utf-8') as fp:
|
with open(seaconfdir / json_file, encoding='utf-8') as fp:
|
||||||
content = json.load(fp)
|
content = json.load(fp)
|
||||||
descr = content[sea_object]
|
descr = content[sea_object]
|
||||||
if rel_paths == '*' or not rel_paths:
|
if rel_paths == '*' or not rel_paths:
|
||||||
|
@ -64,7 +64,7 @@ def direc(tmp_path_factory):
|
|||||||
ff.touch()
|
ff.touch()
|
||||||
fff.touch()
|
fff.touch()
|
||||||
pyfile.write_text(PY_FILE)
|
pyfile.write_text(PY_FILE)
|
||||||
generalConfig.testinit(confdir=f'{a}:{b}', piddir=str(d))
|
generalConfig.testinit(confdir=[a, b], piddir=d)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ files = [('config', 'a/config_cfg.py'),
|
|||||||
|
|
||||||
@pytest.mark.parametrize('file, res', files)
|
@pytest.mark.parametrize('file, res', files)
|
||||||
def test_to_cfg_path(log, direc, file, res):
|
def test_to_cfg_path(log, direc, file, res):
|
||||||
assert to_config_path(file, log).endswith(res)
|
assert str(to_config_path(file, log)).endswith(res)
|
||||||
|
|
||||||
|
|
||||||
def test_cfg_not_existing(direc, log):
|
def test_cfg_not_existing(direc, log):
|
||||||
@ -132,7 +132,7 @@ def do_asserts(ret):
|
|||||||
|
|
||||||
|
|
||||||
def test_process_file(direc, log):
|
def test_process_file(direc, log):
|
||||||
ret = process_file(str(direc / 'a' / 'pyfile_cfg.py'), log)
|
ret = process_file(direc / 'a' / 'pyfile_cfg.py', log)
|
||||||
do_asserts(ret)
|
do_asserts(ret)
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
from pathlib import Path
|
||||||
import pytest
|
import pytest
|
||||||
from frappy.config import Param
|
from frappy.config import Param
|
||||||
from frappy.core import Module, ScaledInteger, IntRange, StringType, StructOf
|
from frappy.core import Module, ScaledInteger, IntRange, StringType, StructOf
|
||||||
@ -74,7 +75,7 @@ save_tests = [
|
|||||||
]
|
]
|
||||||
@pytest.mark.parametrize('cfg, data', save_tests)
|
@pytest.mark.parametrize('cfg, data', save_tests)
|
||||||
def test_save(tmpdir, cfg, data):
|
def test_save(tmpdir, cfg, data):
|
||||||
generalConfig.logdir = tmpdir
|
generalConfig.logdir = Path(tmpdir)
|
||||||
|
|
||||||
cfg['description'] = ''
|
cfg['description'] = ''
|
||||||
m = Mod('m', logger, cfg, ServerStub('savetest'))
|
m = Mod('m', logger, cfg, ServerStub('savetest'))
|
||||||
@ -101,7 +102,7 @@ load_tests = [
|
|||||||
]
|
]
|
||||||
@pytest.mark.parametrize('cfg, data, written', load_tests)
|
@pytest.mark.parametrize('cfg, data, written', load_tests)
|
||||||
def test_load(tmpdir, cfg, data, written):
|
def test_load(tmpdir, cfg, data, written):
|
||||||
generalConfig.logdir = tmpdir
|
generalConfig.logdir = Path(tmpdir)
|
||||||
|
|
||||||
os.makedirs(join(tmpdir, 'persistent'), exist_ok=True)
|
os.makedirs(join(tmpdir, 'persistent'), exist_ok=True)
|
||||||
with open(join(tmpdir, 'persistent', 'loadtest.m.json'), 'w', encoding='utf-8') as f:
|
with open(join(tmpdir, 'persistent', 'loadtest.m.json'), 'w', encoding='utf-8') as f:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user