From 894079d9361b2f51fdded774f3f04c1709ef0d88 Mon Sep 17 00:00:00 2001 From: Dominik Werder Date: Thu, 27 May 2021 17:26:36 +0200 Subject: [PATCH] WIP rename query parameter names --- disk/src/binned.rs | 7 ++--- disk/src/cache.rs | 34 ++++++++++++------------- httpret/static/documentation/index.html | 26 +++++++++---------- retrieval/src/client.rs | 6 ++--- 4 files changed, 37 insertions(+), 36 deletions(-) diff --git a/disk/src/binned.rs b/disk/src/binned.rs index 7b505bb..2333299 100644 --- a/disk/src/binned.rs +++ b/disk/src/binned.rs @@ -82,16 +82,17 @@ impl Collectable for MinMaxAvgScalarBinBatch { #[derive(Debug, Serialize, Deserialize)] pub struct MinMaxAvgScalarBinBatchCollectedJsonResult { + #[serde(rename = "tsBinEdges")] ts_bin_edges: Vec, counts: Vec, mins: Vec, maxs: Vec, avgs: Vec, - #[serde(skip_serializing_if = "Bool::is_false")] + #[serde(skip_serializing_if = "Bool::is_false", rename = "finalisedRange")] finalised_range: bool, - #[serde(skip_serializing_if = "Zero::is_zero")] + #[serde(skip_serializing_if = "Zero::is_zero", rename = "missingBins")] missing_bins: u32, - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(skip_serializing_if = "Option::is_none", rename = "continueAt")] continue_at: Option, } diff --git a/disk/src/cache.rs b/disk/src/cache.rs index dfd4076..f99bd2f 100644 --- a/disk/src/cache.rs +++ b/disk/src/cache.rs @@ -47,7 +47,7 @@ impl CacheUsage { } pub fn from_params(params: &BTreeMap) -> Result { - let ret = params.get("cache_usage").map_or(Ok::<_, Error>(CacheUsage::Use), |k| { + let ret = params.get("cacheUsage").map_or(Ok::<_, Error>(CacheUsage::Use), |k| { if k == "use" { Ok(CacheUsage::Use) } else if k == "ignore" { @@ -55,7 +55,7 @@ impl CacheUsage { } else if k == "recreate" { Ok(CacheUsage::Recreate) } else { - Err(Error::with_msg(format!("unexpected cache_usage {:?}", k)))? + Err(Error::with_msg(format!("unexpected cacheUsage {:?}", k)))? } })?; Ok(ret) @@ -95,35 +95,35 @@ pub struct BinnedQuery { impl BinnedQuery { pub fn from_request(req: &http::request::Parts) -> Result { let params = netpod::query_params(req.uri.query()); - let beg_date = params.get("beg_date").ok_or(Error::with_msg("missing beg_date"))?; - let end_date = params.get("end_date").ok_or(Error::with_msg("missing end_date"))?; - let disk_stats_every = params.get("disk_stats_every_kb").map_or("2000", |k| k); + let beg_date = params.get("begDate").ok_or(Error::with_msg("missing begDate"))?; + let end_date = params.get("endDate").ok_or(Error::with_msg("missing endDate"))?; + let disk_stats_every = params.get("diskStatsEveryKb").map_or("2000", |k| k); let disk_stats_every = disk_stats_every .parse() - .map_err(|e| Error::with_msg(format!("can not parse disk_stats_every_kb {:?}", e)))?; + .map_err(|e| Error::with_msg(format!("can not parse diskStatsEveryKb {:?}", e)))?; let ret = BinnedQuery { range: NanoRange { beg: beg_date.parse::>()?.to_nanos(), end: end_date.parse::>()?.to_nanos(), }, bin_count: params - .get("bin_count") - .ok_or(Error::with_msg("missing bin_count"))? + .get("binCount") + .ok_or(Error::with_msg("missing binCount"))? .parse() - .map_err(|e| Error::with_msg(format!("can not parse bin_count {:?}", e)))?, + .map_err(|e| Error::with_msg(format!("can not parse binCount {:?}", e)))?, agg_kind: params - .get("agg_kind") + .get("aggKind") .map_or("DimXBins1", |k| k) .parse() - .map_err(|e| Error::with_msg(format!("can not parse agg_kind {:?}", e)))?, + .map_err(|e| Error::with_msg(format!("can not parse aggKind {:?}", e)))?, channel: channel_from_params(¶ms)?, cache_usage: CacheUsage::from_params(¶ms)?, disk_stats_every: ByteSize::kb(disk_stats_every), report_error: params - .get("report_error") + .get("reportError") .map_or("false", |k| k) .parse() - .map_err(|e| Error::with_msg(format!("can not parse report_error {:?}", e)))?, + .map_err(|e| Error::with_msg(format!("can not parse reportError {:?}", e)))?, }; info!("BinnedQuery::from_request {:?}", ret); Ok(ret) @@ -251,12 +251,12 @@ impl PreBinnedQuery { fn channel_from_params(params: &BTreeMap) -> Result { let ret = Channel { backend: params - .get("channel_backend") - .ok_or(Error::with_msg("missing channel_backend"))? + .get("channelBackend") + .ok_or(Error::with_msg("missing channelBackend"))? .into(), name: params - .get("channel_name") - .ok_or(Error::with_msg("missing channel_name"))? + .get("channelName") + .ok_or(Error::with_msg("missing channelName"))? .into(), }; Ok(ret) diff --git a/httpret/static/documentation/index.html b/httpret/static/documentation/index.html index 6717bf8..8c726b2 100644 --- a/httpret/static/documentation/index.html +++ b/httpret/static/documentation/index.html @@ -29,31 +29,31 @@

URL: http://sf-daqbuf-21:8380/api/4/binned

Query parameters:

    -
  • channel_backend (e.g. "sf-databuffer")
  • -
  • channel_name (e.g. "SLAAR-LSCP4-LAS6891:CH7:1")
  • -
  • beg_date (e.g. "2021-05-26T07:10:00.000Z")
  • -
  • end_date (e.g. "2021-05-26T07:16:00.000Z")
  • -
  • bin_count (e.g. "6")
  • +
  • channelBackend (e.g. "sf-databuffer")
  • +
  • channelName (e.g. "SLAAR-LSCP4-LAS6891:CH7:1")
  • +
  • begDate (e.g. "2021-05-26T07:10:00.000Z")
  • +
  • endDate (e.g. "2021-05-26T07:16:00.000Z")
  • +
  • binCount (e.g. "6")

Request header: "Accept" must be "application/json"

CURL example:

-curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer
-  &channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-25T00:00:00.000Z&end_date=2021-05-26T00:00:00.000Z&bin_count=3'
+curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channelBackend=sf-databuffer
+  &channelName=SLAAR-LSCP4-LAS6891:CH7:1&begDate=2021-05-25T00:00:00.000Z&endDate=2021-05-26T00:00:00.000Z&binCount=3'
 

Partial result

If the requested range takes longer time to retrieve, then a partial result with at least one bin is returned.

The partial result will contain the necessary information to send another request with a range that starts with the first missing bin.

-

This information is provided by the continue_at and missing_bins fields.

+

This information is provided by the continueAt and missingBins fields.

This enables the user agent to start the presentation to the user while updating the UI as new bins are retrieved.

Example:

 {
-  "continue_at": "2021-05-25T16:00:00.000Z",
-  "missing_bins": 2,
+  "continueAt": "2021-05-25T16:00:00.000Z",
+  "missingBins": 2,
   "avgs": [
     340.87640380859375,
     340.7442321777344,
@@ -78,7 +78,7 @@ starts with the first missing bin.

239, 239 ], - "ts_bin_edges": [ + "tsBinEdges": [ "2021-05-25T00:00:00.000Z", "2021-05-25T04:00:00.000Z", "2021-05-25T08:00:00.000Z", @@ -89,11 +89,11 @@ starts with the first missing bin.

Complete result

-

A complete result will not have a continue_at key.

+

A complete result will not have a continueAt key.

Finalised range

If the server can determine that no more data will be added to the requested time range -then it will add the flag finalised_range: true to the response.

+then it will add the flag finalisedRange: true to the response.

diff --git a/retrieval/src/client.rs b/retrieval/src/client.rs index 26df6a8..89e2163 100644 --- a/retrieval/src/client.rs +++ b/retrieval/src/client.rs @@ -55,9 +55,9 @@ pub async fn get_binned( let date_fmt = "%Y-%m-%dT%H:%M:%S.%3fZ"; let uri = format!( concat!( - "http://{}:{}/api/4/binned?channel_backend={}&channel_name={}", - "&beg_date={}&end_date={}&bin_count={}&cache_usage={}", - "&disk_stats_every_kb={}&report_error=true", + "http://{}:{}/api/4/binned?channelBackend={}&channelName={}", + "&begDate={}&endDate={}&binCount={}&cacheUsage={}", + "&diskStatsEveryKb={}&reportError=true", ), host, port,