WIP fix tests

This commit is contained in:
Dominik Werder
2023-05-03 17:34:50 +02:00
parent 479cec75e7
commit 03854395ff
28 changed files with 402 additions and 717 deletions

View File

@@ -3,7 +3,6 @@ mod api1;
#[cfg(test)]
mod api4;
pub mod archapp;
pub mod binnedbinary;
pub mod binnedjson;
#[cfg(test)]
mod events;
@@ -14,94 +13,6 @@ use bytes::BytesMut;
use err::Error;
use std::future::Future;
fn f32_cmp_near(x: f32, y: f32, abs: f32, rel: f32) -> bool {
/*let x = {
let mut a = x.to_le_bytes();
a[0] &= 0xf0;
f32::from_ne_bytes(a)
};
let y = {
let mut a = y.to_le_bytes();
a[0] &= 0xf0;
f32::from_ne_bytes(a)
};
x == y*/
let ad = (x - y).abs();
ad <= abs || (ad / y).abs() <= rel
}
fn f64_cmp_near(x: f64, y: f64, abs: f64, rel: f64) -> bool {
/*let x = {
let mut a = x.to_le_bytes();
a[0] &= 0x00;
a[1] &= 0x00;
f64::from_ne_bytes(a)
};
let y = {
let mut a = y.to_le_bytes();
a[0] &= 0x00;
a[1] &= 0x00;
f64::from_ne_bytes(a)
};
x == y*/
let ad = (x - y).abs();
ad <= abs || (ad / y).abs() <= rel
}
fn f32_iter_cmp_near<A, B>(a: A, b: B, abs: f32, rel: f32) -> bool
where
A: IntoIterator<Item = f32>,
B: IntoIterator<Item = f32>,
{
let mut a = a.into_iter();
let mut b = b.into_iter();
loop {
let x = a.next();
let y = b.next();
if let (Some(x), Some(y)) = (x, y) {
if !f32_cmp_near(x, y, abs, rel) {
return false;
}
} else if x.is_some() || y.is_some() {
return false;
} else {
return true;
}
}
}
fn f64_iter_cmp_near<A, B>(a: A, b: B, abs: f64, rel: f64) -> bool
where
A: IntoIterator<Item = f64>,
B: IntoIterator<Item = f64>,
{
let mut a = a.into_iter();
let mut b = b.into_iter();
loop {
let x = a.next();
let y = b.next();
if let (Some(x), Some(y)) = (x, y) {
if !f64_cmp_near(x, y, abs, rel) {
return false;
}
} else if x.is_some() || y.is_some() {
return false;
} else {
return true;
}
}
}
#[test]
fn test_f32_iter_cmp_near() {
let a = [-127.553e17];
let b = [-127.554e17];
assert_eq!(f32_iter_cmp_near(a, b, 0.001, 0.001), false);
let a = [-127.55300e17];
let b = [-127.55301e17];
assert_eq!(f32_iter_cmp_near(a, b, 0.001, 0.001), true);
}
fn run_test<F>(f: F) -> Result<(), Error>
where
F: Future<Output = Result<(), Error>> + Send,

View File

@@ -48,6 +48,10 @@ fn test_is_monitonic_strict() {
#[test]
fn events_f64_plain() -> Result<(), Error> {
// TODO re-enable with in-memory generated config and event data.
if true {
return Ok(());
}
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;

View File

@@ -1,13 +1,13 @@
use crate::err::ErrConv;
use crate::nodes::require_test_hosts_running;
use crate::test::api4::common::fetch_binned_json;
use crate::test::f32_cmp_near;
use crate::test::f32_iter_cmp_near;
use crate::test::f64_iter_cmp_near;
use chrono::Utc;
use err::Error;
use http::StatusCode;
use hyper::Body;
use items_0::test::f32_cmp_near;
use items_0::test::f32_iter_cmp_near;
use items_0::test::f64_iter_cmp_near;
use items_0::WithLen;
use items_2::binsdim0::BinsDim0CollectedResult;
use netpod::log::*;
@@ -23,7 +23,7 @@ use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
pub fn make_query<S: Into<String>>(
fn make_query<S: Into<String>>(
name: S,
beg_date: &str,
end_date: &str,
@@ -49,7 +49,6 @@ fn binned_d0_json_00() -> Result<(), Error> {
let jsv = get_binned_json(
Channel {
backend: TEST_BACKEND.into(),
//name: "scalar-i32-be".into(),
name: "test-gen-i32-dim0-v01".into(),
series: None,
},
@@ -61,7 +60,6 @@ fn binned_d0_json_00() -> Result<(), Error> {
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.range_final(), true);
assert_eq!(res.len(), 8);
assert_eq!(res.ts_anchor_sec(), 1200);
@@ -119,7 +117,6 @@ fn binned_d0_json_01a() -> Result<(), Error> {
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.range_final(), true);
assert_eq!(res.len(), 11);
assert_eq!(res.ts_anchor_sec(), 1200);
@@ -178,7 +175,6 @@ fn binned_d0_json_01b() -> Result<(), Error> {
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.range_final(), true);
assert_eq!(res.len(), 13);
assert_eq!(res.ts_anchor_sec(), 1200);
@@ -238,7 +234,6 @@ fn binned_d0_json_02() -> Result<(), Error> {
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.range_final(), true);
assert_eq!(res.len(), 10);
assert_eq!(res.ts_anchor_sec(), 1200);
@@ -270,7 +265,7 @@ fn binned_d0_json_02() -> Result<(), Error> {
}
{
let a1: Vec<_> = res.avgs().iter().map(|x| *x).collect();
let a2 = vec![46.2, 105.9, 78.0, 88.3, 98.9, 70.8, 107.3, 74.1, 93.3, 94.3];
let a2 = vec![46.2, 40.4, 48.6, 40.6, 45.8, 45.1, 41.1, 48.5, 40.1, 46.8];
assert_eq!(f32_iter_cmp_near(a1, a2, 0.05, 0.05), true);
}
Ok(())
@@ -286,10 +281,9 @@ fn binned_d0_json_03() -> Result<(), Error> {
let jsv = get_binned_json(
Channel {
backend: TEST_BACKEND.into(),
name: "wave-f64-be-n21".into(),
name: "test-gen-f64-dim1-v00".into(),
series: None,
},
// TODO This test was meant to ask `AggKind::DimXBinsN(3)`
"1970-01-01T00:20:10.000Z",
"1970-01-01T01:20:20.000Z",
2,
@@ -298,12 +292,15 @@ fn binned_d0_json_03() -> Result<(), Error> {
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 4);
assert_eq!(res.range_final(), true);
assert_eq!(res.counts()[0], 300);
assert_eq!(res.counts()[3], 8);
assert_eq!(res.len(), 4);
assert_eq!(res.ts_anchor_sec(), 1200);
let nb = res.len();
{
let a1: Vec<_> = res.counts().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|_| 12000).collect();
assert_eq!(a1, a2);
}
Ok(())
};
taskrun::run(fut)
@@ -317,111 +314,51 @@ fn binned_d0_json_04() -> Result<(), Error> {
let jsv = get_binned_json(
Channel {
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
name: "test-gen-i32-dim0-v01".into(),
series: None,
},
"1970-01-01T00:20:10.000Z",
"1970-01-01T04:20:30.000Z",
// TODO must use AggKind::DimXBins1
20,
cluster,
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 17);
// TODO I would expect rangeFinal to be set, or?
assert_eq!(res.range_final(), false);
Ok(())
};
taskrun::run(fut)
}
#[test]
fn binned_d0_json_05() -> Result<(), Error> {
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
"1970-01-01T00:20:10.000Z",
"1970-01-01T10:20:30.000Z",
// TODO must use AggKind::DimXBins1
10,
cluster,
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 0);
// TODO make disk parse faster and avoid timeout
assert_eq!(res.len(), 11);
assert_eq!(res.range_final(), false);
Ok(())
};
taskrun::run(fut)
}
#[test]
fn binned_d0_json_06() -> Result<(), Error> {
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
"1970-01-01T00:20:10.000Z",
"1970-01-01T00:20:20.000Z",
// TODO must use AggKind::TimeWeightedScalar
20,
cluster,
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1210);
assert_eq!(res.len(), 20);
assert_eq!(res.range_final(), true);
Ok(())
};
taskrun::run(fut)
}
#[test]
fn binned_d0_json_07() -> Result<(), Error> {
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let jsv = get_binned_json(
Channel {
backend: TEST_BACKEND.into(),
name: "const-regular-scalar-i32-be".into(),
series: None,
},
"1970-01-01T00:20:11.000Z",
"1970-01-01T00:30:20.000Z",
// TODO must use AggKind::TimeWeightedScalar
10,
cluster,
)
.await?;
debug!("Receveided a response json value: {jsv:?}");
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.len(), 25);
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 11);
assert_eq!(res.range_final(), true);
let nb = res.len();
{
let a1: Vec<_> = res.ts1_off_ms().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 600 * 1000 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.ts2_off_ms().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 600 * 1000 * (1 + x)).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.counts().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|_| 1200).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.mins().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 2400 + 1200 * x).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.maxs().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 2399 + 1200 * (1 + x)).collect();
assert_eq!(a1, a2);
}
{
let a1: Vec<_> = res.avgs().iter().map(|x| *x).collect();
let a2: Vec<_> = (0..nb as _).into_iter().map(|x| 3000. + 1200. * x as f32).collect();
assert_eq!(f32_iter_cmp_near(a1, a2, 0.001, 0.001), true);
}
Ok(())
};
taskrun::run(fut)
@@ -440,11 +377,10 @@ fn binned_inmem_d0_json_00() -> Result<(), Error> {
)?;
let jsv = fetch_binned_json(query, cluster).await?;
let res: BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1200);
assert_eq!(res.len(), 14);
assert_eq!(res.range_final(), true);
assert_eq!(res.timed_out(), false);
assert_eq!(res.len(), 14);
assert_eq!(res.ts_anchor_sec(), 1200);
{
let v1: Vec<_> = res.counts().iter().map(|x| *x).collect();
assert_eq!(&v1, &[5; 14]);

View File

@@ -38,11 +38,11 @@ pub async fn fetch_events_json(query: PlainEventsQuery, cluster: &Cluster) -> Re
let s = String::from_utf8_lossy(&buf);
let res: JsonValue = serde_json::from_str(&s)?;
let pretty = serde_json::to_string_pretty(&res)?;
info!("{pretty}");
debug!("fetch_binned_json pretty: {pretty}");
let t2 = chrono::Utc::now();
let ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
// TODO add timeout
info!("time {} ms", ms);
debug!("time {} ms", ms);
Ok(res)
}
@@ -71,10 +71,10 @@ pub async fn fetch_binned_json(query: BinnedQuery, cluster: &Cluster) -> Result<
let s = String::from_utf8_lossy(&buf);
let res: JsonValue = serde_json::from_str(&s)?;
let pretty = serde_json::to_string_pretty(&res)?;
info!("{pretty}");
debug!("fetch_binned_json pretty: {pretty}");
let t2 = chrono::Utc::now();
let ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
// TODO add timeout
info!("time {} ms", ms);
debug!("time {} ms", ms);
Ok(res)
}

View File

@@ -1,6 +1,6 @@
use crate::err::ErrConv;
use crate::nodes::require_test_hosts_running;
use crate::test::f32_iter_cmp_near;
use crate::test::api4::common::fetch_events_json;
use chrono::Utc;
use err::Error;
use http::StatusCode;
@@ -18,61 +18,41 @@ use query::api4::events::PlainEventsQuery;
use serde_json::Value as JsonValue;
use url::Url;
const BACKEND: &str = "testbackend-00";
const TEST_BACKEND: &str = "testbackend-00";
fn make_query<S: Into<String>>(
name: S,
beg_date: &str,
end_date: &str,
//bin_count_min: u32,
) -> Result<PlainEventsQuery, Error> {
let channel = Channel {
backend: TEST_BACKEND.into(),
name: name.into(),
series: None,
};
let beg_date = beg_date.parse()?;
let end_date = end_date.parse()?;
let range = NanoRange::from_date_time(beg_date, end_date);
let query = PlainEventsQuery::new(channel, range).for_time_weighted_scalar();
Ok(query)
}
#[test]
fn events_plain_json_00() -> Result<(), Error> {
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let jsv = events_plain_json(
Channel {
backend: BACKEND.into(),
name: "inmem-d0-i32".into(),
series: None,
},
let query = make_query(
"test-gen-i32-dim0-v01",
"1970-01-01T00:20:04.000Z",
"1970-01-01T00:21:10.000Z",
cluster,
)
.await?;
)?;
let jsv = fetch_events_json(query, cluster).await?;
let res: EventsDim0CollectorOutput<i32> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1204);
assert_eq!(res.len(), 66);
Ok(())
};
taskrun::run(fut)
}
#[test]
fn events_plain_json_01() -> Result<(), Error> {
// TODO
// not worth to re-enable, getting rid of databuffer.
if true {
return Ok(());
}
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let jsv = events_plain_json(
Channel {
backend: BACKEND.into(),
name: "scalar-i32-be".into(),
series: None,
},
"1970-01-01T00:20:10.000Z",
"1970-01-01T00:20:13.000Z",
cluster,
)
.await?;
let res: EventsDim0CollectorOutput<i32> = serde_json::from_value(jsv)?;
assert_eq!(res.ts_anchor_sec(), 1210);
assert_eq!(res.pulse_anchor(), 2420);
let exp = [2420., 2421., 2422., 2423., 2424., 2425.];
assert_eq!(f32_iter_cmp_near(res.values_to_f32(), exp, 0.01, 0.01), true);
assert_eq!(res.range_final(), true);
assert_eq!(res.timed_out(), false);
// Tim-weighted will use one event before:
assert_eq!(res.len(), 133);
assert_eq!(res.ts_anchor_sec(), 1203);
Ok(())
};
taskrun::run(fut)
@@ -85,8 +65,8 @@ fn events_plain_json_02_range_incomplete() -> Result<(), Error> {
let cluster = &rh.cluster;
let jsv = events_plain_json(
Channel {
backend: BACKEND.into(),
name: "scalar-i32-be".into(),
backend: TEST_BACKEND.into(),
name: "test-gen-i32-dim0-v01".into(),
series: None,
},
"1970-01-03T23:59:55.000Z",

View File

@@ -1,7 +1,7 @@
use crate::nodes::require_test_hosts_running;
use crate::test::api4::common::fetch_events_json;
use crate::test::f32_iter_cmp_near;
use err::Error;
use items_0::test::f32_iter_cmp_near;
use items_0::WithLen;
use items_2::eventsdim0::EventsDim0CollectorOutput;
use netpod::log::*;
@@ -29,12 +29,16 @@ fn events_plain_json_00() -> Result<(), Error> {
let fut = async {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let query = make_query("inmem-d0-i32", "1970-01-01T00:20:04.000Z", "1970-01-01T00:21:10.000Z")?;
let query = make_query(
"test-gen-i32-dim0-v01",
"1970-01-01T00:20:04.000Z",
"1970-01-01T00:21:10.000Z",
)?;
let jsv = fetch_events_json(query, cluster).await?;
let res: EventsDim0CollectorOutput<i64> = serde_json::from_value(jsv)?;
// inmem was meant just for functional test, ignores the requested time range
assert_eq!(res.ts_anchor_sec(), 1204);
assert_eq!(res.len(), 66);
assert_eq!(res.len(), 132);
Ok(())
};
taskrun::run(fut)

View File

@@ -8,6 +8,10 @@ use netpod::log::*;
#[test]
fn get_events_1() -> Result<(), Error> {
if true {
return Ok(());
}
// TODO re-use test data in dedicated archapp converter.
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async {

View File

@@ -1,223 +0,0 @@
use crate::err::ErrConv;
use crate::nodes::require_test_hosts_running;
use chrono::Utc;
use disk::streamlog::Streamlog;
use err::Error;
use futures_util::StreamExt;
use futures_util::TryStreamExt;
use http::StatusCode;
use httpclient::HttpBodyAsAsyncRead;
use hyper::Body;
use items_0::streamitem::StreamItem;
use items_0::subfr::SubFrId;
use netpod::log::*;
use netpod::query::CacheUsage;
use netpod::range::evrange::NanoRange;
use netpod::AppendToUrl;
use netpod::Channel;
use netpod::Cluster;
use netpod::HostPort;
use netpod::PerfOpts;
use netpod::APP_OCTET;
use query::api4::binned::BinnedQuery;
use serde::de::DeserializeOwned;
use std::fmt;
use std::future::ready;
use streams::frames::inmem::InMemoryFrameAsyncReadStream;
use tokio::io::AsyncRead;
use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
#[test]
fn get_binned_binary() {
taskrun::run(get_binned_binary_inner()).unwrap();
}
async fn get_binned_binary_inner() -> Result<(), Error> {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
if true {
get_binned_channel::<i32>(
"scalar-i32-be",
"1970-01-01T00:20:10.000Z",
"1970-01-01T00:20:50.000Z",
3,
cluster,
true,
4,
)
.await?;
}
if true {
return Ok(());
};
if true {
get_binned_channel::<f64>(
"wave-f64-be-n21",
"1970-01-01T00:20:10.000Z",
"1970-01-01T00:20:30.000Z",
2,
cluster,
true,
2,
)
.await?;
}
if true {
get_binned_channel::<u16>(
"wave-u16-le-n77",
"1970-01-01T01:11:00.000Z",
"1970-01-01T01:35:00.000Z",
7,
cluster,
true,
24,
)
.await?;
}
if true {
get_binned_channel::<u16>(
"wave-u16-le-n77",
"1970-01-01T01:42:00.000Z",
"1970-01-01T03:55:00.000Z",
2,
cluster,
true,
3,
)
.await?;
}
Ok(())
}
async fn get_binned_channel<NTY>(
channel_name: &str,
beg_date: &str,
end_date: &str,
bin_count: u32,
cluster: &Cluster,
expect_range_complete: bool,
expect_bin_count: u64,
) -> Result<BinnedResponse, Error>
where
NTY: fmt::Debug + SubFrId + DeserializeOwned,
{
let t1 = Utc::now();
let node0 = &cluster.nodes[0];
let beg_date = beg_date.parse()?;
let end_date = end_date.parse()?;
let channel_backend = TEST_BACKEND;
let perf_opts = PerfOpts::default();
let channel = Channel {
backend: channel_backend.into(),
name: channel_name.into(),
series: None,
};
let range = NanoRange::from_date_time(beg_date, end_date).into();
// TODO before, these tests were all fixed using AggKind::DimXBins1
let mut query = BinnedQuery::new(channel, range, bin_count).for_time_weighted_scalar();
query.set_cache_usage(CacheUsage::Ignore);
query.set_buf_len_disk_io(1024 * 16);
let hp = HostPort::from_node(node0);
let mut url = Url::parse(&format!("http://{}:{}/api/4/binned", hp.host, hp.port))?;
query.append_to_url(&mut url);
let url = url;
debug!("get_binned_channel get {}", url);
let req = hyper::Request::builder()
.method(http::Method::GET)
.uri(url.to_string())
.header(http::header::ACCEPT, APP_OCTET)
.body(Body::empty())
.ec()?;
let client = hyper::Client::new();
let res = client.request(req).await.ec()?;
if res.status() != StatusCode::OK {
error!("client response {:?}", res);
}
let s1 = HttpBodyAsAsyncRead::new(res);
let s2 = InMemoryFrameAsyncReadStream::new(s1, perf_opts.inmem_bufcap);
let res = consume_binned_response::<NTY, _>(s2).await?;
let t2 = chrono::Utc::now();
let ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
debug!("get_cached_0 DONE bin_count {} time {} ms", res.bin_count, ms);
if !res.is_valid() {
Err(Error::with_msg(format!("invalid response: {:?}", res)))
} else if res.range_complete_count == 0 && expect_range_complete {
Err(Error::with_msg(format!("expect range complete: {:?}", res)))
} else if res.bin_count != expect_bin_count {
Err(Error::with_msg(format!("bin count mismatch: {:?}", res)))
} else {
Ok(res)
}
}
#[allow(unused)]
#[derive(Debug)]
pub struct BinnedResponse {
bin_count: u64,
err_item_count: u64,
data_item_count: u64,
bytes_read: u64,
range_complete_count: u64,
log_item_count: u64,
#[allow(unused)]
stats_item_count: u64,
}
impl BinnedResponse {
pub fn new() -> Self {
Self {
bin_count: 0,
err_item_count: 0,
data_item_count: 0,
bytes_read: 0,
range_complete_count: 0,
log_item_count: 0,
stats_item_count: 0,
}
}
pub fn is_valid(&self) -> bool {
if self.range_complete_count > 1 {
false
} else {
true
}
}
}
// TODO
async fn consume_binned_response<NTY, T>(inp: InMemoryFrameAsyncReadStream<T>) -> Result<BinnedResponse, Error>
where
NTY: fmt::Debug + SubFrId + DeserializeOwned,
T: AsyncRead + Unpin,
{
let s1 = inp
.map_err(|e| error!("TEST GOT ERROR {:?}", e))
.filter_map(|item| {
let g = match item {
Ok(item) => match item {
StreamItem::Log(item) => {
Streamlog::emit(&item);
None
}
StreamItem::Stats(item) => {
// TODO collect somewhere
debug!("Stats: {:?}", item);
None
}
StreamItem::DataItem(_frame) => {
err::todo();
Some(Ok(()))
}
},
Err(e) => Some(Err(Error::with_msg(format!("WEIRD EMPTY ERROR {:?}", e)))),
};
ready(g)
})
.fold(BinnedResponse::new(), |a, _x| ready(a));
let ret = s1.await;
debug!("BinnedResponse: {:?}", ret);
Ok(ret)
}

View File

@@ -4,7 +4,11 @@ use err::Error;
#[test]
fn get_sls_archive_1() -> Result<(), Error> {
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {
let rh = require_sls_test_host_running()?;
@@ -28,7 +32,11 @@ fn get_sls_archive_1() -> Result<(), Error> {
#[test]
fn get_sls_archive_3() -> Result<(), Error> {
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {
let rh = require_sls_test_host_running()?;
@@ -52,7 +60,11 @@ fn get_sls_archive_3() -> Result<(), Error> {
#[test]
fn get_sls_archive_wave_2() -> Result<(), Error> {
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {
let rh = require_sls_test_host_running()?;

View File

@@ -2,7 +2,11 @@ use super::*;
#[test]
fn get_scalar_2_events() -> Result<(), Error> {
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {
let rh = require_sls_test_host_running()?;
@@ -47,6 +51,10 @@ fn get_scalar_2_events() -> Result<(), Error> {
#[test]
fn get_scalar_2_binned() -> Result<(), Error> {
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {
@@ -71,6 +79,10 @@ fn get_scalar_2_binned() -> Result<(), Error> {
#[test]
fn get_wave_1_events() -> Result<(), Error> {
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {
@@ -114,6 +126,10 @@ fn get_wave_1_events() -> Result<(), Error> {
#[test]
fn get_wave_1_binned() -> Result<(), Error> {
if true {
return Ok(());
}
// TODO re-use test data in dedicated convert application.
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
#[cfg(DISABLED)]
let fut = async move {

View File

@@ -1,5 +1,4 @@
use crate::err::ErrConv;
use crate::nodes::require_test_hosts_running;
use chrono::DateTime;
use chrono::Utc;
use err::Error;
@@ -18,78 +17,12 @@ use url::Url;
const TEST_BACKEND: &str = "testbackend-00";
#[test]
fn time_weighted_json_03() -> Result<(), Error> {
async fn inner() -> Result<(), Error> {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
let res = get_json_common(
"const-regular-scalar-i32-be",
"1970-01-01T00:20:11.000Z",
"1970-01-01T00:30:20.000Z",
10,
//AggKind::TimeWeightedScalar,
cluster,
11,
true,
)
.await?;
let v = res.avgs[0];
assert!(v > 41.9999 && v < 42.0001);
Ok(())
}
super::run_test(inner())
}
#[test]
fn time_weighted_json_10() -> Result<(), Error> {
async fn inner() -> Result<(), Error> {
error!("TODO this test asked for DimXBins1");
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
get_json_common(
"scalar-i32-be",
"1970-01-01T00:20:10.000Z",
"1970-01-01T01:20:30.000Z",
10,
//AggKind::DimXBins1,
cluster,
13,
true,
)
.await?;
Ok(())
}
super::run_test(inner())
}
#[test]
fn time_weighted_json_20() -> Result<(), Error> {
async fn inner() -> Result<(), Error> {
let rh = require_test_hosts_running()?;
let cluster = &rh.cluster;
get_json_common(
"wave-f64-be-n21",
"1970-01-01T00:20:10.000Z",
"1970-01-01T01:20:45.000Z",
10,
//AggKind::TimeWeightedScalar,
cluster,
13,
true,
)
.await?;
Ok(())
}
super::run_test(inner())
}
// For waveform with N x-bins, see test::binnedjson
struct DataResult {
avgs: Vec<f64>,
}
// TODO compare if I want to recycle some of this:
#[allow(unused)]
async fn get_json_common(
channel_name: &str,
beg_date: &str,