Factor out stream kind dependence into type parameter

This commit is contained in:
Dominik Werder
2021-05-21 10:47:42 +02:00
parent 61564f502e
commit a959250af9
3 changed files with 279 additions and 29 deletions

View File

@@ -1,19 +1,20 @@
use crate::agg::binnedt::{AggregatableTdim, AggregatorTdim};
use crate::agg::binnedt::{AggregatableTdim, AggregatorTdim, IntoBinnedT};
use crate::agg::scalarbinbatch::{MinMaxAvgScalarBinBatch, MinMaxAvgScalarBinBatchAggregator};
use crate::agg::streams::{Collectable, Collected, StreamItem, ToJsonResult};
use crate::agg::AggregatableXdim1Bin;
use crate::binned::scalar::binned_scalar_stream;
use crate::binned::scalar::{adapter_to_stream_item, binned_stream};
use crate::binnedstream::{BinnedScalarStreamFromPreBinnedPatches, BinnedStream};
use crate::cache::BinnedQuery;
use crate::cache::{BinnedQuery, MergedFromRemotes};
use crate::channelconfig::{extract_matching_config_entry, read_local_config};
use crate::frame::makeframe::make_frame;
use crate::raw::EventsQuery;
use bytes::Bytes;
use chrono::{TimeZone, Utc};
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use netpod::log::*;
use netpod::{AggKind, BinnedRange, NodeConfigCached};
use netpod::{AggKind, BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchIterator, PreBinnedPatchRange};
use num_traits::Zero;
use serde::{Deserialize, Serialize, Serializer};
use std::pin::Pin;
@@ -233,12 +234,12 @@ pub async fn binned_bytes_for_http(
info!("binned_bytes_for_http found config entry {:?}", entry);
match query.agg_kind() {
AggKind::DimXBins1 => {
let res = binned_scalar_stream(node_config, query).await?;
let res = binned_stream(node_config, query, BinnedStreamKindScalar::new()).await?;
let ret = BinnedBytesForHttpStream::new(res.binned_stream);
Ok(Box::pin(ret))
}
AggKind::DimXBinsN(_) => {
let res = binned_scalar_stream(node_config, query).await?;
let res = binned_stream(node_config, query, BinnedStreamKindScalar::new()).await?;
let ret = BinnedBytesForHttpStream::new(res.binned_stream);
Ok(Box::pin(ret))
}
@@ -387,8 +388,78 @@ pub async fn binned_json(node_config: &NodeConfigCached, query: &BinnedQuery) ->
// TODO create the matching stream based on AggKind and ConfigEntry.
let t = binned_scalar_stream(node_config, query).await?;
let t = binned_stream(node_config, query, BinnedStreamKindScalar::new()).await?;
let collected = collect_all(t.binned_stream, t.range.count as u32).await?;
let ret = collected.to_json_result();
let ret = collected.to_json_result()?;
Ok(serde_json::to_value(ret)?)
}
pub trait BinnedStreamKind {
type BinnedStreamItem: MakeBytesFrame;
type BinnedStreamType: Stream + Send + 'static;
fn new_binned_from_prebinned(
query: &BinnedQuery,
range: BinnedRange,
pre_range: PreBinnedPatchRange,
node_config: &NodeConfigCached,
) -> Result<Self::BinnedStreamType, Error>;
fn new_binned_from_merged(
evq: EventsQuery,
perf_opts: PerfOpts,
range: BinnedRange,
node_config: &NodeConfigCached,
) -> Result<Self::BinnedStreamType, Error>;
}
pub struct BinnedStreamKindScalar {}
pub struct BinnedStreamKindWave {}
impl BinnedStreamKindScalar {
pub fn new() -> Self {
Self {}
}
}
impl BinnedStreamKindWave {
pub fn new() -> Self {
Self {}
}
}
impl BinnedStreamKind for BinnedStreamKindScalar {
type BinnedStreamItem = Result<StreamItem<BinnedScalarStreamItem>, Error>;
type BinnedStreamType = BinnedStream<Self::BinnedStreamItem>;
fn new_binned_from_prebinned(
query: &BinnedQuery,
range: BinnedRange,
pre_range: PreBinnedPatchRange,
node_config: &NodeConfigCached,
) -> Result<Self::BinnedStreamType, Error> {
let s = BinnedScalarStreamFromPreBinnedPatches::new(
PreBinnedPatchIterator::from_range(pre_range),
query.channel().clone(),
range.clone(),
query.agg_kind().clone(),
query.cache_usage().clone(),
node_config,
query.disk_stats_every().clone(),
)?;
Ok(BinnedStream::new(Box::pin(s))?)
}
fn new_binned_from_merged(
evq: EventsQuery,
perf_opts: PerfOpts,
range: BinnedRange,
node_config: &NodeConfigCached,
) -> Result<Self::BinnedStreamType, Error> {
let s = MergedFromRemotes::new(evq, perf_opts, node_config.node_config.cluster.clone())
.into_binned_t(range.clone())
.map(adapter_to_stream_item);
Ok(BinnedStream::new(Box::pin(s))?)
}
}

View File

@@ -1,14 +1,14 @@
use crate::agg::binnedt::IntoBinnedT;
use crate::agg::scalarbinbatch::MinMaxAvgScalarBinBatchStreamItem;
use crate::agg::streams::StreamItem;
use crate::binned::{BinnedScalarStreamItem, BinnedStreamRes};
use crate::binnedstream::{BinnedScalarStreamFromPreBinnedPatches, BinnedStream};
use crate::cache::{BinnedQuery, MergedFromRemotes};
use crate::binned::{BinnedScalarStreamItem, BinnedStreamKind, BinnedStreamRes};
use crate::binnedstream::BinnedStream;
use crate::cache::BinnedQuery;
use crate::raw::EventsQuery;
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use netpod::log::*;
use netpod::{BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchIterator, PreBinnedPatchRange};
use netpod::{BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchRange};
pub fn adapter_to_stream_item(
k: Result<StreamItem<MinMaxAvgScalarBinBatchStreamItem>, Error>,
@@ -30,10 +30,14 @@ pub fn adapter_to_stream_item(
}
}
pub async fn binned_scalar_stream(
pub async fn binned_stream<BK>(
node_config: &NodeConfigCached,
query: &BinnedQuery,
) -> Result<BinnedStreamRes<Result<StreamItem<BinnedScalarStreamItem>, Error>>, Error> {
stream_kind: BK,
) -> Result<BinnedStreamRes<<BK::BinnedStreamType as Stream>::Item>, Error>
where
BK: BinnedStreamKind,
{
if query.channel().backend != node_config.node.backend {
let err = Error::with_msg(format!(
"backend mismatch node: {} requested: {}",
@@ -57,16 +61,8 @@ pub async fn binned_scalar_stream(
);
return Err(Error::with_msg(msg));
}
let s1 = BinnedScalarStreamFromPreBinnedPatches::new(
PreBinnedPatchIterator::from_range(pre_range),
query.channel().clone(),
range.clone(),
query.agg_kind().clone(),
query.cache_usage().clone(),
node_config,
query.disk_stats_every().clone(),
)?;
let s = BinnedStream::new(Box::pin(s1))?;
let s = BK::new_binned_from_prebinned(query, range.clone(), pre_range, node_config)?;
let s = BinnedStream::new(Box::pin(s))?;
let ret = BinnedStreamRes {
binned_stream: s,
range,
@@ -84,9 +80,7 @@ pub async fn binned_scalar_stream(
agg_kind: query.agg_kind().clone(),
};
// TODO do I need to set up more transformations or binning to deliver the requested data?
let s = MergedFromRemotes::new(evq, perf_opts, node_config.node_config.cluster.clone());
let s = s.into_binned_t(range.clone());
let s = s.map(adapter_to_stream_item);
let s = BK::new_binned_from_merged(evq, perf_opts, range.clone(), node_config)?;
let s = BinnedStream::new(Box::pin(s))?;
let ret = BinnedStreamRes {
binned_stream: s,

View File

@@ -3,11 +3,196 @@
<head>
<meta charset="utf-8"/>
<title>Retrieval Documentation</title>
<meta name="keywords" content="PSI, DAQ, Databuffer">
<meta name="author" content="Dominik Werder">
<link rel="shortcut icon" href="about:blank"/>
<link rel="stylesheet" href="style.css"/>
</head>
<body>
<h1>Retrieval 4.0 Documentation</h1>
<p>Some docs to be shown here...</p>
<h2>HTTP API documentation</h2>
<p>This API follows the common convention that the addition of a key to a json object is not considered a breaking change.</p>
<p>Currently available:</p>
<ul>
<li><a href="#query-binned">Query binned data</a></li>
</ul>
<a id="query-binned"></a>
<h2>Query binned data</h2>
<p><strong>Method:</strong> GET</p>
<p><strong>URL:</strong> https://data-api.psi.ch/api/4/binned</p>
<p><strong>Query parameters:</strong></p>
<ul>
<li>channel_backend</li>
<li>channel_name</li>
<li>beg_date</li>
<li>end_date</li>
<li>bin_count</li>
</ul>
<p><strong>Request header:</strong> "Accept" must be "application/json"</p>
<p><strong>Example:</strong></p>
<pre>http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer&channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-21T00:00:00.000Z&end_date=2021-05-21T02:00:00.000Z&bin_count=20</pre>
<p><strong>Result body example:</strong></p>
<pre>
[
{
"backend": "sf-databuffer",
"channels": [
"SARES20-LSCP9:CH0:2",
"SARES20-LSCP9:CH0:1"
]
},
{
"backend": "hipa-archive",
"channels": [],
"error": {
"code": "Error" // can be: "Error" | "Timeout" (more to be added in the future)
}
}
]
</pre>
<h4>CURL example:</h4>
<pre>
curl -H 'Accept: application/json' 'http://sf-daqbuf-21:8380/api/4/binned?channel_backend=sf-databuffer&channel_name=SLAAR-LSCP4-LAS6891:CH7:1&beg_date=2021-05-21T00:00:00.000Z&end_date=2021-05-21T02:00:00.000Z&bin_count=20'
</pre>
<p>Answer:</p>
<pre>
{
"counts": [
458,
459,
458,
459,
459,
458,
459,
458,
459,
459,
458,
459,
458,
459,
458,
459,
459,
458,
459,
458,
459,
458,
459,
459
],
"ts_bin_edges": [
"2021-05-21T00:00:00.000Z",
"2021-05-21T00:05:00.000Z",
"2021-05-21T00:10:00.000Z",
"2021-05-21T00:15:00.000Z",
"2021-05-21T00:20:00.000Z",
"2021-05-21T00:25:00.000Z",
"2021-05-21T00:30:00.000Z",
"2021-05-21T00:35:00.000Z",
"2021-05-21T00:40:00.000Z",
"2021-05-21T00:45:00.000Z",
"2021-05-21T00:50:00.000Z",
"2021-05-21T00:55:00.000Z",
"2021-05-21T01:00:00.000Z",
"2021-05-21T01:05:00.000Z",
"2021-05-21T01:10:00.000Z",
"2021-05-21T01:15:00.000Z",
"2021-05-21T01:20:00.000Z",
"2021-05-21T01:25:00.000Z",
"2021-05-21T01:30:00.000Z",
"2021-05-21T01:35:00.000Z",
"2021-05-21T01:40:00.000Z",
"2021-05-21T01:45:00.000Z",
"2021-05-21T01:50:00.000Z",
"2021-05-21T01:55:00.000Z",
"2021-05-21T02:00:00.000Z"
]
}
</pre>
<a name="channel-search-configs"></a>
<h2>Channel Search, with return of configuration information</h2>
<p><strong>Method:</strong> POST</p>
<p><strong>URL:</strong> https://data-api.psi.ch/api/1/channels/config</p>
<p><strong>Request body:</strong> JSON with search parameters</p>
<p><strong>Request body outline:</strong></p>
<pre>
{
"regex": "[Optional: Regular expression to search in channel name]",
"sourceRegex": "[Optional: Search in sourcename of the channel]",
"descriptionRegex": "[Optional: Search in the channel's description]",
"backends": ["gls-archive", "hipa-archive", "sf-databuffer"]
}
</pre>
<p><strong>Result body example:</strong></p>
<p>Assuming that "hipa-archive" would be unavailable:</p>
<pre>
[
{
"backend": "sf-databuffer",
"channels": [
{
"backend": "sf-databuffer",
"description": "",
"name": "SARES20-LSCP9:CH0:2",
"shape": [
512
],
"source": "tcp://SARES20-CVME-01:9999",
"type": "Float32",
"unit": ""
},
{
"backend": "sf-databuffer",
"description": "",
"name": "SARES20-LSCP9:CH0:1",
"shape": [
512
],
"source": "tcp://SARES20-CVME-01:9999",
"type": "Int16",
"unit": ""
}
]
},
{
"backend": "hipa-archive",
"channels": [],
"error": {
"code": "Error" // can be: "Error" | "Timeout" (more to be added in the future)
}
}
]
</pre>
<p>Notes:</p>
<p>The search constraints are AND'ed together.</p>
<p>If some backend responds with an error, that error is indicated by the error key in the affected backend (see example above).</p>
<h4>CURL example:</h4>
<pre>
QUERY='{ "regex": "LSCP9:CH0", "backends": ["sf-databuffer"] }'
curl -H 'Content-Type: application/json' -H 'Accept: application/json' -d "$QUERY" https://data-api.psi.ch/api/1/channels/config
</pre>
<h2>Feedback and comments</h2>
<p>Feedback is very much appreciated:</p>
<p>dominik.werder@psi.ch</p>
<p>or please assign me a JIRA ticket.</p>
<div id="footer"></div>
</body>
</html>