Relative CLI datetime, cache clear api, work on cache read

This commit is contained in:
Dominik Werder
2021-05-27 12:07:44 +02:00
parent b3b2b3e4f7
commit 7aecf59195
11 changed files with 409 additions and 232 deletions

View File

@@ -10,7 +10,7 @@
</head>
<body>
<h1>Retrieval 4.0 Documentation</h1>
<h1>Retrieval Documentation</h1>
<h2>HTTP API documentation</h2>
@@ -27,172 +27,73 @@
<p><strong>URL:</strong> https://data-api.psi.ch/api/4/binned</p>
<p><strong>Query parameters:</strong></p>
<ul>
<li>channel_backend</li>
<li>channel_name</li>
<li>beg_date</li>
<li>end_date</li>
<li>bin_count</li>
<li>channel_backend (e.g. "sf-databuffer")</li>
<li>channel_name (e.g. "SLAAR-LSCP4-LAS6891:CH7:1")</li>
<li>beg_date (e.g. "2021-05-26T07:10:00.000Z")</li>
<li>end_date (e.g. "2021-05-26T07:16:00.000Z")</li>
<li>bin_count (e.g. "6")</li>
</ul>
<p><strong>Request header:</strong> "Accept" must be "application/json"</p>
<p><strong>Example:</strong></p>
<pre>http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer&channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-21T00:00:00.000Z&end_date=2021-05-21T02:00:00.000Z&bin_count=20</pre>
<p><strong>Result body example:</strong></p>
<pre>
[
{
"backend": "sf-databuffer",
"channels": [
"SARES20-LSCP9:CH0:2",
"SARES20-LSCP9:CH0:1"
]
},
{
"backend": "hipa-archive",
"channels": [],
"error": {
"code": "Error" // can be: "Error" | "Timeout" (more to be added in the future)
}
}
]
</pre>
<h4>CURL example:</h4>
<pre>
curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer&channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-21T00:00:00.000Z&end_date=2021-05-21T02:00:00.000Z&bin_count=20'
curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer
&channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-25T00:00:00.000Z&end_date=2021-05-26T00:00:00.000Z&bin_count=3'
</pre>
<p>Answer:</p>
<h4>Partial result</h4>
<p>Note the keys <strong>continue_at</strong> and <strong>missing_bins</strong>.</p>
<pre>
{
"counts": [
458,
459,
458,
459,
459,
458,
459,
458,
459,
459,
458,
459,
458,
459,
458,
459,
459,
458,
459,
458,
459,
458,
459,
459
],
"ts_bin_edges": [
"2021-05-21T00:00:00.000Z",
"2021-05-21T00:05:00.000Z",
"2021-05-21T00:10:00.000Z",
"2021-05-21T00:15:00.000Z",
"2021-05-21T00:20:00.000Z",
"2021-05-21T00:25:00.000Z",
"2021-05-21T00:30:00.000Z",
"2021-05-21T00:35:00.000Z",
"2021-05-21T00:40:00.000Z",
"2021-05-21T00:45:00.000Z",
"2021-05-21T00:50:00.000Z",
"2021-05-21T00:55:00.000Z",
"2021-05-21T01:00:00.000Z",
"2021-05-21T01:05:00.000Z",
"2021-05-21T01:10:00.000Z",
"2021-05-21T01:15:00.000Z",
"2021-05-21T01:20:00.000Z",
"2021-05-21T01:25:00.000Z",
"2021-05-21T01:30:00.000Z",
"2021-05-21T01:35:00.000Z",
"2021-05-21T01:40:00.000Z",
"2021-05-21T01:45:00.000Z",
"2021-05-21T01:50:00.000Z",
"2021-05-21T01:55:00.000Z",
"2021-05-21T02:00:00.000Z"
]
"continue_at": "2021-05-25T16:00:00.000Z",
"missing_bins": 2,
"avgs": [
340.87640380859375,
340.7442321777344,
340.58685302734375,
341.04608154296875
],
"counts": [
143076,
143077,
143076,
143076
],
"maxs": [
452,
452,
459,
458
],
"mins": [
231,
240,
239,
239
],
"ts_bin_edges": [
"2021-05-25T00:00:00.000Z",
"2021-05-25T04:00:00.000Z",
"2021-05-25T08:00:00.000Z",
"2021-05-25T12:00:00.000Z",
"2021-05-25T16:00:00.000Z"
]
}
</pre>
<h4>Complete result</h4>
<p>A complete result will not have a <strong>continue_at</strong> key.</p>
<h4>Finalised range</h4>
<p>If the server can determine that no more data will be added to the requested time range
then it will add the flag <strong>finalised_range</strong> to the response.</p>
<a name="channel-search-configs"></a>
<h2>Channel Search, with return of configuration information</h2>
<p><strong>Method:</strong> POST</p>
<p><strong>URL:</strong> https://data-api.psi.ch/api/1/channels/config</p>
<p><strong>Request body:</strong> JSON with search parameters</p>
<p><strong>Request body outline:</strong></p>
<pre>
{
"regex": "[Optional: Regular expression to search in channel name]",
"sourceRegex": "[Optional: Search in sourcename of the channel]",
"descriptionRegex": "[Optional: Search in the channel's description]",
"backends": ["gls-archive", "hipa-archive", "sf-databuffer"]
}
</pre>
<p><strong>Result body example:</strong></p>
<p>Assuming that "hipa-archive" would be unavailable:</p>
<pre>
[
{
"backend": "sf-databuffer",
"channels": [
{
"backend": "sf-databuffer",
"description": "",
"name": "SARES20-LSCP9:CH0:2",
"shape": [
512
],
"source": "tcp://SARES20-CVME-01:9999",
"type": "Float32",
"unit": ""
},
{
"backend": "sf-databuffer",
"description": "",
"name": "SARES20-LSCP9:CH0:1",
"shape": [
512
],
"source": "tcp://SARES20-CVME-01:9999",
"type": "Int16",
"unit": ""
}
]
},
{
"backend": "hipa-archive",
"channels": [],
"error": {
"code": "Error" // can be: "Error" | "Timeout" (more to be added in the future)
}
}
]
</pre>
<p>Notes:</p>
<p>The search constraints are AND'ed together.</p>
<p>If some backend responds with an error, that error is indicated by the error key in the affected backend (see example above).</p>
<h4>CURL example:</h4>
<pre>
QUERY='{ "regex": "LSCP9:CH0", "backends": ["sf-databuffer"] }'
curl -H 'Content-Type: application/json' -H 'Accept: application/json' -d "$QUERY" https://data-api.psi.ch/api/1/channels/config
</pre>
<h2>Feedback and comments</h2>
<p>Feedback is very much appreciated:</p>
<h2>Feedback and comments very much appreciated!</h2>
<p>dominik.werder@psi.ch</p>
<p>or please assign me a JIRA ticket.</p>
<div id="footer"></div>
</body>
</html>

View File

@@ -24,29 +24,6 @@ function load_status_main(ev) {
.then(kk => {
const js = kk[0];
const ts2 = kk[1];
if (false) {
const response = document.getElementById("response");
// Different ways to do the same thing:
//response.querySelectorAll("*").forEach(n => n.remove());
//response.innerHTML = "";
response.textContent = "";
while (response.firstChild) {
response.removeChild(response.lastChild);
response.lastChild.remove();
}
response.replaceChildren();
//response.replaceChild();
//JSON.stringify(js, null, 2);
//for (let machine of js) {
// console.log(typeof(machine));
//}
const dat2 = js.hosts;
sort_default(dat2);
response.appendChild(render_retrieval_metrics_as_table(dat2));
response.appendChild(render_host_memory_as_table(dat2));
//response.appendChild(render_host_memStd_as_table(dat2));
response.appendChild(render_host_bufferPools_as_table(dat2));
}
{
let b = document.getElementById("load_status");
b.innerHTML = "Loaded (" + (ts2 - ts1) + " ms)";
@@ -60,7 +37,78 @@ function load_status_main(ev) {
});
}
var g_config = {
function clear_element() {
// different methods:
response.querySelectorAll("*").forEach(n => n.remove());
response.innerHTML = "";
response.replaceChildren();
response.replaceChild();
}
function sort_default(hosts) {
hosts.sort((a, b) => {
if (a.inst < b.inst) return -1;
if (a.inst > b.inst) return +1;
if (a.host < b.host) return -1;
if (a.host > b.host) return +1;
});
}
function show_json_response(js) {
const response = document.getElementById("response");
response.textContent = "";
while (response.firstChild) {
response.removeChild(response.lastChild);
//response.lastChild.remove();
}
//JSON.stringify(js, null, 2);
//for (let machine of js) {
// console.log(typeof(machine));
//}
//const dat2 = js.hosts;
//sort_default(dat2);
response.textContent = JSON.stringify(js, null, 2);
}
function clear_cache_all(ev) {
const ts1 = Date.now();
const dom_ev = ev;
const b = ev.target;
b.classList.remove("loaded");
b.classList.add("loading");
b.value = b.dataset.btnLabel + " (loading)";
const body = {
hosts: "",
};
const fetch_init = {
method: "get",
/*headers: {
retrieval_instance: document.getElementById("retrieval_instance").value,
},*/
/*body: JSON.stringify(body),*/
};
fetch(g_config.api_base + "gather/clear_cache", fetch_init)
.then(x => Promise.all([x.json(), Date.now()]))
.then(g_config.ui_delay_test)
.then(g_config.ui_delay_blink)
.then(kk => {
const js = kk[0];
show_json_response(js);
const ts2 = kk[1];
{
let b = document.getElementById("load_status");
b.innerHTML = "Loaded (" + (ts2 - ts1) + " ms)";
}
{
let b = dom_ev.target;
b.classList.remove("loading");
b.classList.add("loaded");
b.setAttribute("value", b.dataset.btnLabel);
}
});
}
const g_config = {
api_base: "http://localhost:8059/api/4/",
ui_delay_test: x => x,
ui_delay_blink: x => new Promise(resolve => setTimeout(() => resolve(x), 50)),
@@ -72,6 +120,8 @@ function config_for_test() {
}
function init() {
// keydown event..
document.getElementById("btn_clear_cache").addEventListener("click", clear_cache_all)
}
window.addEventListener("load", ev => {
@@ -79,7 +129,7 @@ window.addEventListener("load", ev => {
config_for_test();
}
init();
const init_load_ele = document.getElementById("btn_load");
const init_load_ele = document.getElementById("none");
if (init_load_ele != null) {
init_load_ele.click();
}

View File

@@ -1,5 +1,5 @@
<!doctype html>
<html>
<html lang="en">
<head>
<meta charset="utf-8"/>
<title>Main Status</title>
@@ -8,11 +8,15 @@
<script src="script.js" type="text/javascript"></script>
</head>
<body>
<h1>Retrieval - Main Status</h1>
<p>attach the event handlers via the on window load main event handler.</p>
<p class="buttonrow">
<input type="button" id="btn_load" data-btn-label="Reload Overview" value="Reload" onclick="load_status_main(event)"/>
<input id="retrieval_instance" type="text" value="main">
<input type="button" id="btn_load" data-btn-label="Reload" value="Reload"/>
<input type="button" id="btn_clear_cache" data-btn-label="Clear Cache" value="Clear Cache"/>
<!--<input id="retrieval_instance" type="text" value="main">-->
</p>
<p id="load_status"></p>

View File

@@ -23,7 +23,7 @@ p {
body {
font-family: monospace;
font-size: 80%;
font-size: 100%;
line-height: 1.4;
color: #000;
}
@@ -72,12 +72,12 @@ code#output {
}
p#load_status {
margin-top: 10em;
margin-top: 0em;
}
p.buttonrow {
position: fixed;
margin-top: 20px;
--position: fixed;
--margin-top: 20px;
}
div#footer {