forked from archiver_config/sf_databuffer
started bufferutils command
This commit is contained in:
220
bufferutils.py
Normal file
220
bufferutils.py
Normal file
@@ -0,0 +1,220 @@
|
||||
import argparse
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
from io import StringIO
|
||||
from pathlib import Path
|
||||
|
||||
base_directory = Path(".")
|
||||
upload_url = "https://dispatcher-api.psi.ch/sf/configuration/upload"
|
||||
# upload_url = "http://localhost:1234"
|
||||
|
||||
|
||||
def _remove_comments(text):
|
||||
""" remove c-style comments.
|
||||
text: blob of text with comments (can include newlines)
|
||||
returns: text with comments removed
|
||||
|
||||
# Stolen from https://www.saltycrane.com/blog/2007/11/remove-c-comments-python/
|
||||
"""
|
||||
|
||||
pattern = r"""
|
||||
## --------- COMMENT ---------
|
||||
/\* ## Start of /* ... */ comment
|
||||
[^*]*\*+ ## Non-* followed by 1-or-more *'s
|
||||
( ##
|
||||
[^/*][^*]*\*+ ##
|
||||
)* ## 0-or-more things which don't start with /
|
||||
## but do end with '*'
|
||||
/ ## End of /* ... */ comment
|
||||
| ## -OR- various things which aren't comments:
|
||||
( ##
|
||||
## ------ " ... " STRING ------
|
||||
" ## Start of " ... " string
|
||||
( ##
|
||||
\\. ## Escaped char
|
||||
| ## -OR-
|
||||
[^"\\] ## Non "\ characters
|
||||
)* ##
|
||||
" ## End of " ... " string
|
||||
| ## -OR-
|
||||
##
|
||||
## ------ ' ... ' STRING ------
|
||||
' ## Start of ' ... ' string
|
||||
( ##
|
||||
\\. ## Escaped char
|
||||
| ## -OR-
|
||||
[^'\\] ## Non '\ characters
|
||||
)* ##
|
||||
' ## End of ' ... ' string
|
||||
| ## -OR-
|
||||
##
|
||||
## ------ ANYTHING ELSE -------
|
||||
. ## Anything other char
|
||||
[^/"'\\]* ## Chars which doesn't start a comment, string
|
||||
) ## or escape
|
||||
"""
|
||||
regex = re.compile(pattern, re.VERBOSE | re.MULTILINE | re.DOTALL)
|
||||
non_comments = [m.group(2) for m in regex.finditer(text) if m.group(2)]
|
||||
|
||||
return "".join(non_comments)
|
||||
|
||||
|
||||
def _load_file(file_path):
|
||||
with open(file_path) as file_h:
|
||||
text = file_h.read()
|
||||
text = _remove_comments(text)
|
||||
config = json.loads(text)
|
||||
return config
|
||||
|
||||
|
||||
def remove_labels(sources):
|
||||
"""
|
||||
Remove labels from the source config
|
||||
:param sources:
|
||||
:return: source config without labels
|
||||
"""
|
||||
new_sources = []
|
||||
for source in sources["sources"]:
|
||||
source.pop('labels', None)
|
||||
new_sources.append(source)
|
||||
return {"sources": new_sources}
|
||||
|
||||
|
||||
def remove_labeled_source(sources, label):
|
||||
"""
|
||||
Remove (a) source(s) with a specific label
|
||||
:param sources:
|
||||
:param label:
|
||||
:return: source config without the source(s) with the given label
|
||||
"""
|
||||
return {"sources": [x for x in sources["sources"] if "labels" not in x or (label not in x['labels'])]}
|
||||
|
||||
|
||||
def get_labels(sources):
|
||||
"""
|
||||
Retrieve all used labels in the source configurations
|
||||
:param sources: source config
|
||||
:return: list of available labels
|
||||
"""
|
||||
labels = set([item for x in sources["sources"] if "labels" in x for item in x["labels"]])
|
||||
return labels
|
||||
|
||||
|
||||
def get_labeled_sources(sources, label):
|
||||
"""
|
||||
Get source(s) with the given label
|
||||
:param sources:
|
||||
:param label:
|
||||
:return: list of source config that contains label
|
||||
"""
|
||||
return [x for x in sources["sources"] if "labels" in x and 'SARES20-CAMS142-M5' in x['labels']]
|
||||
|
||||
|
||||
def read_files(files_dir, file_type):
|
||||
"""
|
||||
Read sources or policies files
|
||||
:param files_dir:
|
||||
:param file_type: "sources" or "policies"
|
||||
:return:
|
||||
"""
|
||||
sources = []
|
||||
for file in files_dir.iterdir():
|
||||
logging.info(f"Read file: {file}")
|
||||
config = _load_file(file)
|
||||
sources.extend(config[file_type])
|
||||
return {file_type: sources}
|
||||
|
||||
|
||||
def upload_sources_and_policies(sources, policies):
|
||||
"""
|
||||
Upload sources and policies definition to the data/imagebuffer
|
||||
:param sources: sources to upload
|
||||
:param policies: policies to upload
|
||||
:return:
|
||||
"""
|
||||
|
||||
upload_files = [("files", ("all.sources", StringIO(json.dumps(sources)))),
|
||||
("files", ("all.policies", StringIO(json.dumps(policies))))]
|
||||
|
||||
test_response = requests.post(upload_url, files=upload_files)
|
||||
|
||||
if test_response.ok:
|
||||
print("Upload completed successfully!")
|
||||
# print(test_response.text)
|
||||
else:
|
||||
print("Something went wrong!")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Utility commands to work with the databuffer",
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
epilog=f"--------\n\n"
|
||||
f"Examples:\n"
|
||||
"bufferutils upload\n"
|
||||
"bufferutils restart --label <label>\n"
|
||||
)
|
||||
subparsers = parser.add_subparsers(title='command',
|
||||
description='valid commands',
|
||||
dest='command',
|
||||
help='command to execute')
|
||||
|
||||
parser_upload = subparsers.add_parser('upload',
|
||||
help="upload configuration",
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
|
||||
parser_restart = subparsers.add_parser('restart',
|
||||
help="restart a source",
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
|
||||
parser_restart.add_argument('-l',
|
||||
'--label',
|
||||
default=None,
|
||||
help="label that identifies the source(s) to restart")
|
||||
|
||||
parser_list = subparsers.add_parser('list',
|
||||
help="list",
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser_list.add_argument('--label',
|
||||
action="store_true",
|
||||
help="list labels")
|
||||
|
||||
arguments = parser.parse_args()
|
||||
|
||||
#
|
||||
# UPLOAD
|
||||
if arguments.command == 'upload':
|
||||
print("upload")
|
||||
sources = read_files(base_directory / Path("sources"), "sources")
|
||||
policies = read_files(base_directory / Path("policies"), "policies")
|
||||
|
||||
# Just to make sure that the additional labels entry does not break the backend - remove it
|
||||
# sources = remove_labels(sources)
|
||||
|
||||
upload_sources_and_policies(sources, policies)
|
||||
|
||||
#
|
||||
# RESTART
|
||||
elif arguments.command == 'restart':
|
||||
print("restart")
|
||||
pass
|
||||
|
||||
#
|
||||
# LIST
|
||||
elif arguments.command == 'list':
|
||||
if arguments.label:
|
||||
sources = read_files(base_directory / Path("sources"), "sources")
|
||||
for label in get_labels(sources):
|
||||
print(label)
|
||||
else:
|
||||
print("Not yet implemented")
|
||||
parser_list.print_usage()
|
||||
|
||||
else:
|
||||
parser.print_usage()
|
||||
return -1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user