WIP rename query parameter names

This commit is contained in:
Dominik Werder
2021-05-27 17:26:36 +02:00
parent c9cbb20341
commit 894079d936
4 changed files with 37 additions and 36 deletions

View File

@@ -82,16 +82,17 @@ impl Collectable for MinMaxAvgScalarBinBatch {
#[derive(Debug, Serialize, Deserialize)]
pub struct MinMaxAvgScalarBinBatchCollectedJsonResult {
#[serde(rename = "tsBinEdges")]
ts_bin_edges: Vec<IsoDateTime>,
counts: Vec<u64>,
mins: Vec<f32>,
maxs: Vec<f32>,
avgs: Vec<f32>,
#[serde(skip_serializing_if = "Bool::is_false")]
#[serde(skip_serializing_if = "Bool::is_false", rename = "finalisedRange")]
finalised_range: bool,
#[serde(skip_serializing_if = "Zero::is_zero")]
#[serde(skip_serializing_if = "Zero::is_zero", rename = "missingBins")]
missing_bins: u32,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(skip_serializing_if = "Option::is_none", rename = "continueAt")]
continue_at: Option<IsoDateTime>,
}

View File

@@ -47,7 +47,7 @@ impl CacheUsage {
}
pub fn from_params(params: &BTreeMap<String, String>) -> Result<Self, Error> {
let ret = params.get("cache_usage").map_or(Ok::<_, Error>(CacheUsage::Use), |k| {
let ret = params.get("cacheUsage").map_or(Ok::<_, Error>(CacheUsage::Use), |k| {
if k == "use" {
Ok(CacheUsage::Use)
} else if k == "ignore" {
@@ -55,7 +55,7 @@ impl CacheUsage {
} else if k == "recreate" {
Ok(CacheUsage::Recreate)
} else {
Err(Error::with_msg(format!("unexpected cache_usage {:?}", k)))?
Err(Error::with_msg(format!("unexpected cacheUsage {:?}", k)))?
}
})?;
Ok(ret)
@@ -95,35 +95,35 @@ pub struct BinnedQuery {
impl BinnedQuery {
pub fn from_request(req: &http::request::Parts) -> Result<Self, Error> {
let params = netpod::query_params(req.uri.query());
let beg_date = params.get("beg_date").ok_or(Error::with_msg("missing beg_date"))?;
let end_date = params.get("end_date").ok_or(Error::with_msg("missing end_date"))?;
let disk_stats_every = params.get("disk_stats_every_kb").map_or("2000", |k| k);
let beg_date = params.get("begDate").ok_or(Error::with_msg("missing begDate"))?;
let end_date = params.get("endDate").ok_or(Error::with_msg("missing endDate"))?;
let disk_stats_every = params.get("diskStatsEveryKb").map_or("2000", |k| k);
let disk_stats_every = disk_stats_every
.parse()
.map_err(|e| Error::with_msg(format!("can not parse disk_stats_every_kb {:?}", e)))?;
.map_err(|e| Error::with_msg(format!("can not parse diskStatsEveryKb {:?}", e)))?;
let ret = BinnedQuery {
range: NanoRange {
beg: beg_date.parse::<DateTime<Utc>>()?.to_nanos(),
end: end_date.parse::<DateTime<Utc>>()?.to_nanos(),
},
bin_count: params
.get("bin_count")
.ok_or(Error::with_msg("missing bin_count"))?
.get("binCount")
.ok_or(Error::with_msg("missing binCount"))?
.parse()
.map_err(|e| Error::with_msg(format!("can not parse bin_count {:?}", e)))?,
.map_err(|e| Error::with_msg(format!("can not parse binCount {:?}", e)))?,
agg_kind: params
.get("agg_kind")
.get("aggKind")
.map_or("DimXBins1", |k| k)
.parse()
.map_err(|e| Error::with_msg(format!("can not parse agg_kind {:?}", e)))?,
.map_err(|e| Error::with_msg(format!("can not parse aggKind {:?}", e)))?,
channel: channel_from_params(&params)?,
cache_usage: CacheUsage::from_params(&params)?,
disk_stats_every: ByteSize::kb(disk_stats_every),
report_error: params
.get("report_error")
.get("reportError")
.map_or("false", |k| k)
.parse()
.map_err(|e| Error::with_msg(format!("can not parse report_error {:?}", e)))?,
.map_err(|e| Error::with_msg(format!("can not parse reportError {:?}", e)))?,
};
info!("BinnedQuery::from_request {:?}", ret);
Ok(ret)
@@ -251,12 +251,12 @@ impl PreBinnedQuery {
fn channel_from_params(params: &BTreeMap<String, String>) -> Result<Channel, Error> {
let ret = Channel {
backend: params
.get("channel_backend")
.ok_or(Error::with_msg("missing channel_backend"))?
.get("channelBackend")
.ok_or(Error::with_msg("missing channelBackend"))?
.into(),
name: params
.get("channel_name")
.ok_or(Error::with_msg("missing channel_name"))?
.get("channelName")
.ok_or(Error::with_msg("missing channelName"))?
.into(),
};
Ok(ret)

View File

@@ -29,31 +29,31 @@
<p><strong>URL:</strong> http://sf-daqbuf-21:8380/api/4/binned</p>
<p><strong>Query parameters:</strong></p>
<ul>
<li>channel_backend (e.g. "sf-databuffer")</li>
<li>channel_name (e.g. "SLAAR-LSCP4-LAS6891:CH7:1")</li>
<li>beg_date (e.g. "2021-05-26T07:10:00.000Z")</li>
<li>end_date (e.g. "2021-05-26T07:16:00.000Z")</li>
<li>bin_count (e.g. "6")</li>
<li>channelBackend (e.g. "sf-databuffer")</li>
<li>channelName (e.g. "SLAAR-LSCP4-LAS6891:CH7:1")</li>
<li>begDate (e.g. "2021-05-26T07:10:00.000Z")</li>
<li>endDate (e.g. "2021-05-26T07:16:00.000Z")</li>
<li>binCount (e.g. "6")</li>
</ul>
<p><strong>Request header:</strong> "Accept" must be "application/json"</p>
<h4>CURL example:</h4>
<pre>
curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer
&channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-25T00:00:00.000Z&end_date=2021-05-26T00:00:00.000Z&bin_count=3'
curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channelBackend=sf-databuffer
&channelName=SLAAR-LSCP4-LAS6891:CH7:1&begDate=2021-05-25T00:00:00.000Z&endDate=2021-05-26T00:00:00.000Z&binCount=3'
</pre>
<h4>Partial result</h4>
<p>If the requested range takes longer time to retrieve, then a partial result with at least one bin is returned.</p>
<p>The partial result will contain the necessary information to send another request with a range that
starts with the first missing bin.</p>
<p>This information is provided by the <strong>continue_at</strong> and <strong>missing_bins</strong> fields.</p>
<p>This information is provided by the <strong>continueAt</strong> and <strong>missingBins</strong> fields.</p>
<p>This enables the user agent to start the presentation to the user while updating the UI as new bins are retrieved.</p>
<p>Example:</p>
<pre>
{
"continue_at": "2021-05-25T16:00:00.000Z",
"missing_bins": 2,
"continueAt": "2021-05-25T16:00:00.000Z",
"missingBins": 2,
"avgs": [
340.87640380859375,
340.7442321777344,
@@ -78,7 +78,7 @@ starts with the first missing bin.</p>
239,
239
],
"ts_bin_edges": [
"tsBinEdges": [
"2021-05-25T00:00:00.000Z",
"2021-05-25T04:00:00.000Z",
"2021-05-25T08:00:00.000Z",
@@ -89,11 +89,11 @@ starts with the first missing bin.</p>
</pre>
<h4>Complete result</h4>
<p>A complete result will not have a <strong>continue_at</strong> key.</p>
<p>A complete result will not have a <strong>continueAt</strong> key.</p>
<h4>Finalised range</h4>
<p>If the server can determine that no more data will be added to the requested time range
then it will add the flag <strong>finalised_range: true</strong> to the response.</p>
then it will add the flag <strong>finalisedRange: true</strong> to the response.</p>
<a id="search-channel"></a>

View File

@@ -55,9 +55,9 @@ pub async fn get_binned(
let date_fmt = "%Y-%m-%dT%H:%M:%S.%3fZ";
let uri = format!(
concat!(
"http://{}:{}/api/4/binned?channel_backend={}&channel_name={}",
"&beg_date={}&end_date={}&bin_count={}&cache_usage={}",
"&disk_stats_every_kb={}&report_error=true",
"http://{}:{}/api/4/binned?channelBackend={}&channelName={}",
"&begDate={}&endDate={}&binCount={}&cacheUsage={}",
"&diskStatsEveryKb={}&reportError=true",
),
host,
port,