This commit is contained in:
Dominik Werder
2023-04-25 09:08:14 +02:00
parent 95af6c359c
commit 498ff3612b
36 changed files with 1500 additions and 260 deletions

View File

@@ -1,5 +1,5 @@
#[cfg(test)]
mod api1_parse;
mod data_api_python;
use crate::nodes::require_test_hosts_running;
use crate::test::api1::api1_parse::Api1Frame;
@@ -8,10 +8,14 @@ use futures_util::Future;
use httpclient::http_post;
use httpret::api1::Api1ScalarType;
use netpod::log::*;
use netpod::query::api1::{Api1Query, Api1Range, ChannelTuple};
use netpod::query::api1::Api1Query;
use netpod::query::api1::Api1Range;
use netpod::query::api1::ChannelTuple;
use std::fmt;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
fn testrun<T, F>(fut: F) -> Result<T, Error>
where
F: Future<Output = Result<T, Error>>,
@@ -52,7 +56,7 @@ fn events_f64_plain() -> Result<(), Error> {
let accept = "application/octet-stream";
let range = Api1Range::new("1970-01-01T00:00:00Z".try_into()?, "1970-01-01T00:01:00Z".try_into()?)?;
// TODO the channel list needs to get pre-processed to check for backend prefix!
let ch = ChannelTuple::new("test-disk-databuffer".into(), "scalar-i32-be".into());
let ch = ChannelTuple::new(TEST_BACKEND.into(), "scalar-i32-be".into());
let qu = Api1Query::new(range, vec![ch]);
let body = serde_json::to_string(&qu)?;
let buf = http_post(url, accept, body.into()).await?;

View File

@@ -0,0 +1,83 @@
use crate::err::ErrConv;
use crate::nodes::require_test_hosts_running;
use chrono::Utc;
use err::Error;
use http::StatusCode;
use hyper::Body;
use netpod::log::*;
use netpod::range::evrange::NanoRange;
use netpod::Channel;
use netpod::Cluster;
use netpod::HostPort;
use netpod::APP_JSON;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
// Fetches all data, not streaming, meant for basic test cases that fit in memory.
async fn fetch_data_api_python_blob(
channels: Vec<Channel>,
beg_date: &str,
end_date: &str,
cluster: &Cluster,
) -> Result<Vec<u8>, Error> {
let t1 = Utc::now();
let node0 = &cluster.nodes[0];
let beg_date = beg_date.parse()?;
let end_date = end_date.parse()?;
let _range = NanoRange::from_date_time(beg_date, end_date);
let start_date = beg_date.format("%Y-%m-%dT%H:%M:%S%.3fZ").to_string();
let end_date = end_date.format("%Y-%m-%dT%H:%M:%S%.3fZ").to_string();
let query = serde_json::json!({
"range": {
"type": "date",
"startDate": start_date,
"endDate": end_date,
},
"channels": channels.iter().map(|x| x.name()).collect::<Vec<_>>(),
});
let query_str = serde_json::to_string_pretty(&query)?;
let hp = HostPort::from_node(node0);
let url = Url::parse(&format!("http://{}:{}/api/1/query", hp.host, hp.port))?;
info!("http get {}", url);
let req = hyper::Request::builder()
.method(http::Method::POST)
.uri(url.to_string())
.header(http::header::CONTENT_TYPE, APP_JSON)
//.header(http::header::ACCEPT, APP_JSON)
.body(Body::from(query_str))
.ec()?;
let client = hyper::Client::new();
let res = client.request(req).await.ec()?;
if res.status() != StatusCode::OK {
error!("client response {:?}", res);
return Err(Error::with_msg_no_trace(format!("bad result {res:?}")));
}
let buf = hyper::body::to_bytes(res.into_body()).await.ec()?;
let t2 = chrono::Utc::now();
let ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
// TODO add timeout
info!("time {} ms body len {}", ms, buf.len());
Ok(buf.into())
}
#[test]
fn api3_hdf_dim0_00() -> Result<(), Error> {
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let jsv = fetch_data_api_python_blob(
vec![Channel {
backend: TEST_BACKEND.into(),
name: "test-gen-i32-dim0-v00".into(),
series: None,
}],
"1970-01-01T00:20:04.000Z",
"1970-01-01T00:21:10.000Z",
cluster,
)
.await?;
Ok(())
};
taskrun::run(fut)
}

View File

@@ -19,6 +19,8 @@ use query::api4::binned::BinnedQuery;
use serde_json::Value as JsonValue;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
pub fn make_query<S: Into<String>>(
name: S,
beg_date: &str,
@@ -26,7 +28,7 @@ pub fn make_query<S: Into<String>>(
bin_count_min: u32,
) -> Result<BinnedQuery, Error> {
let channel = Channel {
backend: "test-inmem".into(),
backend: TEST_BACKEND.into(),
name: name.into(),
series: None,
};
@@ -44,8 +46,9 @@ fn binned_d0_json_00() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
name: "scalar-i32-be".into(),
backend: TEST_BACKEND.into(),
//name: "scalar-i32-be".into(),
name: "test-gen-i32-dim0-v01".into(),
series: None,
},
"1970-01-01T00:20:04.000Z",
@@ -55,19 +58,36 @@ fn binned_d0_json_00() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.range_final(), true);
assert_eq!(res.len(), 8);
assert_eq!(res.ts1_off_ms()[0], 0);
assert_eq!(res.ts2_off_ms()[0], 5000);
assert_eq!(res.counts()[0], 5);
assert_eq!(res.counts()[1], 10);
assert_eq!(res.counts()[7], 7);
assert_eq!(res.mins()[0], 2405);
assert_eq!(res.maxs()[0], 2409);
assert_eq!(res.mins()[1], 2410);
assert_eq!(res.maxs()[1], 2419);
assert_eq!(res.ts_anchor_sec(), 1200);
{
let a1: Vec<_> = res.ts1_off_ms().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..8).into_iter().map(|x| 5000 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.ts2_off_ms().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..8).into_iter().map(|x| 5000 + 5000 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.counts().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..8).into_iter().map(|_| 10).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.mins().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..8).into_iter().map(|x| 2400 + 10 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.maxs().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..8).into_iter().map(|x| 2409 + 10 * x).collect();
assert_eq!(a1, a2);
}
Ok(())
};
taskrun::run(fut)
@@ -80,8 +100,8 @@ fn binned_d0_json_01() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
name: "scalar-i32-be".into(),
backend: TEST_BACKEND.into(),
name: "test-gen-i32-dim0-v01".into(),
series: None,
},
"1970-01-01T00:20:10.000Z",
@@ -91,11 +111,37 @@ fn binned_d0_json_01() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 13);
assert_eq!(res.range_final(), true);
assert_eq!(res.len(), 13);
assert_eq!(res.ts_anchor_sec(), 1200);
let nb = res.len();
{
let a1: Vec<_> = res.ts1_off_ms().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 300 * 1000 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.ts2_off_ms().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 300 * 1000 * (1 + x)).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.counts().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|_| 600).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.mins().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 2400 + 600 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.maxs().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 2999 + 600 * x).collect();
assert_eq!(a1, a2);
}
Ok(())
};
taskrun::run(fut)
@@ -108,8 +154,8 @@ fn binned_d0_json_02() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
name: "wave-f64-be-n21".into(),
backend: TEST_BACKEND.into(),
name: "test-gen-f64-dim1-v00".into(),
series: None,
},
"1970-01-01T00:20:10.000Z",
@@ -119,7 +165,7 @@ fn binned_d0_json_02() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 13);
@@ -136,7 +182,7 @@ fn binned_d0_json_03() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: "wave-f64-be-n21".into(),
series: None,
},
@@ -148,7 +194,7 @@ fn binned_d0_json_03() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 4);
@@ -168,7 +214,7 @@ fn binned_d0_json_04() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
@@ -180,7 +226,7 @@ fn binned_d0_json_04() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 17);
@@ -199,7 +245,7 @@ fn binned_d0_json_05() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
@@ -211,7 +257,7 @@ fn binned_d0_json_05() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 0);
// TODO make disk parse faster and avoid timeout
@@ -230,7 +276,7 @@ fn binned_d0_json_06() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
@@ -242,7 +288,7 @@ fn binned_d0_json_06() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1210);
assert_eq!(res.len(), 20);
@@ -260,7 +306,7 @@ fn binned_d0_json_07() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
@@ -272,7 +318,7 @@ fn binned_d0_json_07() -> Result<(), Error> {
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 11);
@@ -300,6 +346,7 @@ fn binned_inmem_d0_json_00() -> Result<(), Error> {
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 14);
assert_eq!(res.range_final(), true);
assert_eq!(res.timed_out(), false);
{
let v1: Vec<_> = res.counts().iter().map(|x| *x).collect();
assert_eq!(&v1, &[5; 14]);
@@ -326,9 +373,10 @@ fn binned_inmem_d0_json_00() -> Result<(), Error> {
}
{
let v1: Vec<_> = res.avgs().iter().map(|x| *x).collect();
let v2: Vec<_> = (0..14).into_iter().map(|x| 1204 + 5 * x).collect();
//assert_eq!(f32_cmp_near(res.avgs()[0], 42.0), true);
//assert_eq!(&v1, &v2);
let v2: Vec<_> = (0..14).into_iter().map(|x| 1202. + 5. * x as f32).collect();
for (a, b) in v1.into_iter().zip(v2.into_iter()) {
assert_eq!(f32_cmp_near(a, b), true);
}
}
Ok(())
};

View File

@@ -18,6 +18,8 @@ use query::api4::events::PlainEventsQuery;
use serde_json::Value as JsonValue;
use url::Url;
const BACKEND: &str = "testbackend-00";
#[test]
fn events_plain_json_00() -> Result<(), Error> {
let fut = async {
@@ -25,7 +27,7 @@ fn events_plain_json_00() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = events_plain_json(
Channel {
backend: "test-inmem".into(),
backend: BACKEND.into(),
name: "inmem-d0-i32".into(),
series: None,
},
@@ -55,7 +57,7 @@ fn events_plain_json_01() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = events_plain_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: BACKEND.into(),
name: "scalar-i32-be".into(),
series: None,
},
@@ -83,7 +85,7 @@ fn events_plain_json_02_range_incomplete() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = events_plain_json(
Channel {
backend: "test-disk-databuffer".into(),
backend: BACKEND.into(),
name: "scalar-i32-be".into(),
series: None,
},

View File

@@ -9,9 +9,11 @@ use netpod::range::evrange::NanoRange;
use netpod::Channel;
use query::api4::events::PlainEventsQuery;
const BACKEND: &str = "testbackend-00";
pub fn make_query<S: Into<String>>(name: S, beg_date: &str, end_date: &str) -> Result<PlainEventsQuery, Error> {
let channel = Channel {
backend: "test-inmem".into(),
backend: BACKEND.into(),
name: name.into(),
series: None,
};

View File

@@ -27,6 +27,8 @@ use streams::frames::inmem::InMemoryFrameAsyncReadStream;
use tokio::io::AsyncRead;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
#[test]
fn get_binned_binary() {
taskrun::run(get_binned_binary_inner()).unwrap();
@@ -105,7 +107,7 @@ where
let node0 = &cluster.nodes[0];
let beg_date = beg_date.parse()?;
let end_date = end_date.parse()?;
let channel_backend = "testbackend";
let channel_backend = TEST_BACKEND;
let perf_opts = PerfOpts::default();
let channel = Channel {
backend: channel_backend.into(),

View File

@@ -27,10 +27,12 @@ use streams::frames::inmem::InMemoryFrameAsyncReadStream;
use tokio::io::AsyncRead;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
fn ch_adhoc(name: &str) -> Channel {
Channel {
series: None,
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: name.into(),
}
}
@@ -38,7 +40,7 @@ fn ch_adhoc(name: &str) -> Channel {
pub fn ch_gen(name: &str) -> Channel {
Channel {
series: None,
backend: "test-disk-databuffer".into(),
backend: TEST_BACKEND.into(),
name: name.into(),
}
}
@@ -78,7 +80,7 @@ async fn get_plain_events_binary(
let node0 = &cluster.nodes[0];
let beg_date: DateTime<Utc> = beg_date.parse()?;
let end_date: DateTime<Utc> = end_date.parse()?;
let channel_backend = "testbackend";
let channel_backend = TEST_BACKEND;
let perf_opts = PerfOpts::default();
let channel = Channel {
backend: channel_backend.into(),

View File

@@ -16,6 +16,8 @@ use query::api4::binned::BinnedQuery;
use std::time::Duration;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
#[test]
fn time_weighted_json_03() -> Result<(), Error> {
async fn inner() -> Result<(), Error> {
@@ -103,7 +105,7 @@ async fn get_json_common(
let node0 = &cluster.nodes[0];
let beg_date: DateTime<Utc> = beg_date.parse()?;
let end_date: DateTime<Utc> = end_date.parse()?;
let channel_backend = "testbackend";
let channel_backend = TEST_BACKEND;
let channel = Channel {
backend: channel_backend.into(),
name: channel_name.into(),