Move binned type, add tests
This commit is contained in:
@@ -14,6 +14,20 @@ use bytes::BytesMut;
|
||||
use err::Error;
|
||||
use std::future::Future;
|
||||
|
||||
fn f32_cmp_near(x: f32, y: f32) -> bool {
|
||||
let x = {
|
||||
let mut a = x.to_le_bytes();
|
||||
a[0] &= 0xf0;
|
||||
f32::from_ne_bytes(a)
|
||||
};
|
||||
let y = {
|
||||
let mut a = y.to_le_bytes();
|
||||
a[0] &= 0xf0;
|
||||
f32::from_ne_bytes(a)
|
||||
};
|
||||
x == y
|
||||
}
|
||||
|
||||
fn f32_iter_cmp_near<A, B>(a: A, b: B) -> bool
|
||||
where
|
||||
A: IntoIterator<Item = f32>,
|
||||
@@ -25,17 +39,7 @@ where
|
||||
let x = a.next();
|
||||
let y = b.next();
|
||||
if let (Some(x), Some(y)) = (x, y) {
|
||||
let x = {
|
||||
let mut a = x.to_ne_bytes();
|
||||
a[0] &= 0xf0;
|
||||
f32::from_ne_bytes(a)
|
||||
};
|
||||
let y = {
|
||||
let mut a = y.to_ne_bytes();
|
||||
a[0] &= 0xf0;
|
||||
f32::from_ne_bytes(a)
|
||||
};
|
||||
if x != y {
|
||||
if !f32_cmp_near(x, y) {
|
||||
return false;
|
||||
}
|
||||
} else if x.is_some() || y.is_some() {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::err::ErrConv;
|
||||
use crate::nodes::require_test_hosts_running;
|
||||
use crate::test::f32_cmp_near;
|
||||
use chrono::{DateTime, Utc};
|
||||
use err::Error;
|
||||
use http::StatusCode;
|
||||
@@ -16,7 +17,7 @@ fn binned_d0_json_00() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = binned_d0_json(
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "scalar-i32-be".into(),
|
||||
@@ -28,17 +29,235 @@ fn binned_d0_json_00() -> Result<(), Error> {
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
info!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::eventsdim0::EventsDim0CollectorOutput<i32> = serde_json::from_value(jsv)?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.len(), 20);
|
||||
assert_eq!(res.ts_anchor_sec(), 0);
|
||||
assert_eq!(res.ts_anchor_sec(), 1200);
|
||||
assert_eq!(res.len(), 8);
|
||||
assert_eq!(res.ts1_off_ms()[0], 0);
|
||||
assert_eq!(res.ts2_off_ms()[0], 5000);
|
||||
assert_eq!(res.counts()[0], 5);
|
||||
assert_eq!(res.counts()[1], 10);
|
||||
assert_eq!(res.counts()[7], 7);
|
||||
assert_eq!(res.mins()[0], 2405);
|
||||
assert_eq!(res.maxs()[0], 2409);
|
||||
assert_eq!(res.mins()[1], 2410);
|
||||
assert_eq!(res.maxs()[1], 2419);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
async fn binned_d0_json(
|
||||
#[test]
|
||||
fn binned_d0_json_01() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "scalar-i32-be".into(),
|
||||
series: None,
|
||||
},
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T01:20:30.000Z",
|
||||
10,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 1200);
|
||||
assert_eq!(res.len(), 13);
|
||||
assert_eq!(res.range_final(), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binned_d0_json_02() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "wave-f64-be-n21".into(),
|
||||
series: None,
|
||||
},
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T01:20:45.000Z",
|
||||
10,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 1200);
|
||||
assert_eq!(res.len(), 13);
|
||||
assert_eq!(res.range_final(), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binned_d0_json_03() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "wave-f64-be-n21".into(),
|
||||
series: None,
|
||||
},
|
||||
// TODO This test was meant to ask `AggKind::DimXBinsN(3)`
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T01:20:20.000Z",
|
||||
2,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<f64> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 1200);
|
||||
assert_eq!(res.len(), 4);
|
||||
assert_eq!(res.range_final(), true);
|
||||
assert_eq!(res.counts()[0], 300);
|
||||
assert_eq!(res.counts()[3], 8);
|
||||
assert_eq!(f32_cmp_near(res.avgs()[0], 44950.00390625), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binned_d0_json_04() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "const-regular-scalar-i32-be".into(),
|
||||
series: None,
|
||||
},
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T04:20:30.000Z",
|
||||
// TODO must use AggKind::DimXBins1
|
||||
20,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 1200);
|
||||
assert_eq!(res.len(), 17);
|
||||
// TODO I would expect rangeFinal to be set, or?
|
||||
assert_eq!(res.range_final(), false);
|
||||
assert_eq!(f32_cmp_near(res.avgs()[0], 42.0), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binned_d0_json_05() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "const-regular-scalar-i32-be".into(),
|
||||
series: None,
|
||||
},
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T10:20:30.000Z",
|
||||
// TODO must use AggKind::DimXBins1
|
||||
10,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 0);
|
||||
assert_eq!(res.len(), 3);
|
||||
assert_eq!(res.range_final(), false);
|
||||
assert_eq!(f32_cmp_near(res.avgs()[0], 42.0), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binned_d0_json_06() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "const-regular-scalar-i32-be".into(),
|
||||
series: None,
|
||||
},
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T00:20:20.000Z",
|
||||
// TODO must use AggKind::TimeWeightedScalar
|
||||
20,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 1210);
|
||||
assert_eq!(res.len(), 20);
|
||||
assert_eq!(res.range_final(), true);
|
||||
assert_eq!(f32_cmp_near(res.avgs()[0], 42.0), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binned_d0_json_07() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let jsv = get_binned_json(
|
||||
Channel {
|
||||
backend: "test-disk-databuffer".into(),
|
||||
name: "const-regular-scalar-i32-be".into(),
|
||||
series: None,
|
||||
},
|
||||
"1970-01-01T00:20:11.000Z",
|
||||
"1970-01-01T00:30:20.000Z",
|
||||
// TODO must use AggKind::TimeWeightedScalar
|
||||
10,
|
||||
cluster,
|
||||
)
|
||||
.await?;
|
||||
debug!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::binsdim0::BinsDim0CollectedResult<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.ts_anchor_sec(), 1200);
|
||||
assert_eq!(res.len(), 11);
|
||||
assert_eq!(res.range_final(), true);
|
||||
assert_eq!(f32_cmp_near(res.avgs()[0], 42.0), true);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
async fn get_binned_json(
|
||||
channel: Channel,
|
||||
beg_date: &str,
|
||||
end_date: &str,
|
||||
@@ -55,7 +274,7 @@ async fn binned_d0_json(
|
||||
let mut url = Url::parse(&format!("http://{}:{}/api/4/binned", hp.host, hp.port))?;
|
||||
query.append_to_url(&mut url);
|
||||
let url = url;
|
||||
info!("http get {}", url);
|
||||
debug!("http get {}", url);
|
||||
let req = hyper::Request::builder()
|
||||
.method(http::Method::GET)
|
||||
.uri(url.to_string())
|
||||
@@ -65,8 +284,11 @@ async fn binned_d0_json(
|
||||
let client = hyper::Client::new();
|
||||
let res = client.request(req).await.ec()?;
|
||||
if res.status() != StatusCode::OK {
|
||||
error!("client response {:?}", res);
|
||||
return Err(Error::with_msg_no_trace(format!("bad result {res:?}")));
|
||||
error!("error response {:?}", res);
|
||||
let buf = hyper::body::to_bytes(res.into_body()).await.ec()?;
|
||||
let s = String::from_utf8_lossy(&buf);
|
||||
error!("body of error response: {s}");
|
||||
return Err(Error::with_msg_no_trace(format!("error response")));
|
||||
}
|
||||
let buf = hyper::body::to_bytes(res.into_body()).await.ec()?;
|
||||
let s = String::from_utf8_lossy(&buf);
|
||||
|
||||
@@ -31,8 +31,8 @@ fn events_plain_json_00() -> Result<(), Error> {
|
||||
info!("Receveided a response json value: {jsv:?}");
|
||||
let res: items_2::eventsdim0::EventsDim0CollectorOutput<i32> = serde_json::from_value(jsv)?;
|
||||
// inmem was meant just for functional test, ignores the requested time range
|
||||
assert_eq!(res.len(), 20);
|
||||
assert_eq!(res.ts_anchor_sec(), 0);
|
||||
assert_eq!(res.len(), 60);
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use super::binnedjson::ScalarEventsResponse;
|
||||
#![allow(unused)]
|
||||
use super::events::get_plain_events_json;
|
||||
use crate::nodes::require_archapp_test_host_running;
|
||||
use crate::test::events::ch_gen;
|
||||
@@ -8,6 +8,8 @@ use netpod::log::*;
|
||||
|
||||
#[test]
|
||||
fn get_events_1() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async {
|
||||
let rh = require_archapp_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
|
||||
@@ -1,117 +1,11 @@
|
||||
mod channelarchiver;
|
||||
|
||||
use crate::err::ErrConv;
|
||||
use crate::nodes::{require_sls_test_host_running, require_test_hosts_running};
|
||||
use chrono::{DateTime, Utc};
|
||||
use err::Error;
|
||||
use http::StatusCode;
|
||||
use hyper::Body;
|
||||
use netpod::log::*;
|
||||
use netpod::query::{BinnedQuery, CacheUsage, PlainEventsQuery};
|
||||
use netpod::{f64_close, AppendToUrl};
|
||||
use netpod::{AggKind, Channel, Cluster, NanoRange, APP_JSON};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
use url::Url;
|
||||
|
||||
#[test]
|
||||
fn get_binned_json_0() {
|
||||
taskrun::run(get_binned_json_0_inner()).unwrap();
|
||||
}
|
||||
|
||||
async fn get_binned_json_0_inner() -> Result<(), Error> {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
get_binned_json_common(
|
||||
"scalar-i32-be",
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T01:20:30.000Z",
|
||||
10,
|
||||
AggKind::DimXBins1,
|
||||
cluster,
|
||||
13,
|
||||
true,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_binned_json_1() {
|
||||
taskrun::run(get_binned_json_1_inner()).unwrap();
|
||||
}
|
||||
|
||||
async fn get_binned_json_1_inner() -> Result<(), Error> {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
get_binned_json_common(
|
||||
"wave-f64-be-n21",
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T01:20:45.000Z",
|
||||
10,
|
||||
AggKind::DimXBins1,
|
||||
cluster,
|
||||
13,
|
||||
true,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_binned_json_2() {
|
||||
taskrun::run(get_binned_json_2_inner()).unwrap();
|
||||
}
|
||||
|
||||
async fn get_binned_json_2_inner() -> Result<(), Error> {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
get_binned_json_common(
|
||||
"wave-f64-be-n21",
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T00:20:20.000Z",
|
||||
2,
|
||||
AggKind::DimXBinsN(3),
|
||||
cluster,
|
||||
2,
|
||||
true,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn check_close_events(a: &WaveEventsResponse, b: &WaveEventsResponse, jsstr: &String) -> Result<(), Error> {
|
||||
match a.is_close(b) {
|
||||
Ok(true) => Ok(()),
|
||||
Ok(false) => {
|
||||
error!("Mismatch, original JSON:\n{}", jsstr);
|
||||
Err(Error::with_msg_no_trace("mismatch"))
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Mismatch, original JSON:\n{}", jsstr);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_close(a: &BinnedResponse, b: &BinnedResponse, jsstr: &String) -> Result<(), Error> {
|
||||
match a.is_close(b) {
|
||||
Ok(true) => Ok(()),
|
||||
Ok(false) => {
|
||||
error!("Mismatch, original JSON:\n{}", jsstr);
|
||||
Err(Error::with_msg_no_trace("mismatch"))
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Mismatch, original JSON:\n{}", jsstr);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_sls_archive_1() -> Result<(), Error> {
|
||||
// TODO OFFENDING TEST
|
||||
if true {
|
||||
return Ok(());
|
||||
}
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
@@ -124,8 +18,8 @@ fn get_sls_archive_1() -> Result<(), Error> {
|
||||
let endstr = "2021-11-10T01:01:00Z";
|
||||
let (res, jsstr) =
|
||||
get_binned_json_common_res(channel, begstr, endstr, 10, AggKind::TimeWeightedScalar, cluster).await?;
|
||||
let exp = r##"{"avgs":[24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875],"counts":[0,0,0,0,0,0,0,0,0,0,0,0],"finalisedRange":true,"maxs":[24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875],"mins":[24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875],"tsAnchor":1636506000,"tsMs":[0,5000,10000,15000,20000,25000,30000,35000,40000,45000,50000,55000,60000],"tsNs":[0,0,0,0,0,0,0,0,0,0,0,0,0]}"##;
|
||||
let exp: BinnedResponse = serde_json::from_str(exp).unwrap();
|
||||
let exp = r##"{"avgs":[24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875],"counts":[0,0,0,0,0,0,0,0,0,0,0,0],"rangeFinal":true,"maxs":[24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875],"mins":[24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875,24.37225341796875],"tsAnchor":1636506000,"tsMs":[0,5000,10000,15000,20000,25000,30000,35000,40000,45000,50000,55000,60000],"tsNs":[0,0,0,0,0,0,0,0,0,0,0,0,0]}"##;
|
||||
let exp: String = serde_json::from_str(exp).unwrap();
|
||||
check_close(&res, &exp, &jsstr)?;
|
||||
Ok(())
|
||||
};
|
||||
@@ -134,6 +28,8 @@ fn get_sls_archive_1() -> Result<(), Error> {
|
||||
|
||||
#[test]
|
||||
fn get_sls_archive_3() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
@@ -156,6 +52,8 @@ fn get_sls_archive_3() -> Result<(), Error> {
|
||||
|
||||
#[test]
|
||||
fn get_sls_archive_wave_2() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
@@ -175,275 +73,3 @@ fn get_sls_archive_wave_2() -> Result<(), Error> {
|
||||
};
|
||||
taskrun::run(fut)
|
||||
}
|
||||
|
||||
async fn get_binned_json_common(
|
||||
channel_name: &str,
|
||||
beg_date: &str,
|
||||
end_date: &str,
|
||||
bin_count: u32,
|
||||
agg_kind: AggKind,
|
||||
cluster: &Cluster,
|
||||
expect_bin_count: u32,
|
||||
expect_finalised_range: bool,
|
||||
) -> Result<(), Error> {
|
||||
let t1 = Utc::now();
|
||||
let node0 = &cluster.nodes[0];
|
||||
let beg_date: DateTime<Utc> = beg_date.parse()?;
|
||||
let end_date: DateTime<Utc> = end_date.parse()?;
|
||||
let channel_backend = "testbackend";
|
||||
let channel = Channel {
|
||||
backend: channel_backend.into(),
|
||||
name: channel_name.into(),
|
||||
series: None,
|
||||
};
|
||||
let range = NanoRange::from_date_time(beg_date, end_date);
|
||||
let mut query = BinnedQuery::new(channel, range, bin_count, agg_kind);
|
||||
query.set_timeout(Duration::from_millis(15000));
|
||||
query.set_cache_usage(CacheUsage::Ignore);
|
||||
let mut url = Url::parse(&format!("http://{}:{}/api/4/binned", node0.host, node0.port))?;
|
||||
query.append_to_url(&mut url);
|
||||
let url = url;
|
||||
debug!("get_binned_json_common get {}", url);
|
||||
let req = hyper::Request::builder()
|
||||
.method(http::Method::GET)
|
||||
.uri(url.to_string())
|
||||
.header(http::header::ACCEPT, APP_JSON)
|
||||
.body(Body::empty())
|
||||
.ec()?;
|
||||
let client = hyper::Client::new();
|
||||
let res = client.request(req).await.ec()?;
|
||||
if res.status() != StatusCode::OK {
|
||||
error!("get_binned_json_common client response {:?}", res);
|
||||
}
|
||||
let res = hyper::body::to_bytes(res.into_body()).await.ec()?;
|
||||
let t2 = chrono::Utc::now();
|
||||
let ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
|
||||
debug!("get_binned_json_common DONE time {} ms", ms);
|
||||
let res = String::from_utf8_lossy(&res).to_string();
|
||||
let res: serde_json::Value = serde_json::from_str(res.as_str())?;
|
||||
// TODO assert more
|
||||
debug!(
|
||||
"result from endpoint: --------------\n{}\n--------------",
|
||||
serde_json::to_string_pretty(&res)?
|
||||
);
|
||||
// TODO enable in future:
|
||||
if false {
|
||||
if expect_finalised_range {
|
||||
if !res
|
||||
.get("finalisedRange")
|
||||
.ok_or(Error::with_msg("missing finalisedRange"))?
|
||||
.as_bool()
|
||||
.ok_or(Error::with_msg("key finalisedRange not bool"))?
|
||||
{
|
||||
return Err(Error::with_msg("expected finalisedRange"));
|
||||
}
|
||||
} else if res.get("finalisedRange").is_some() {
|
||||
return Err(Error::with_msg("expect absent finalisedRange"));
|
||||
}
|
||||
}
|
||||
if res.get("counts").unwrap().as_array().unwrap().len() != expect_bin_count as usize {
|
||||
return Err(Error::with_msg(format!("expect_bin_count {}", expect_bin_count)));
|
||||
}
|
||||
if res.get("mins").unwrap().as_array().unwrap().len() != expect_bin_count as usize {
|
||||
return Err(Error::with_msg(format!("expect_bin_count {}", expect_bin_count)));
|
||||
}
|
||||
if res.get("maxs").unwrap().as_array().unwrap().len() != expect_bin_count as usize {
|
||||
return Err(Error::with_msg(format!("expect_bin_count {}", expect_bin_count)));
|
||||
}
|
||||
if res.get("avgs").unwrap().as_array().unwrap().len() != expect_bin_count as usize {
|
||||
return Err(Error::with_msg(format!("expect_bin_count {}", expect_bin_count)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO reuse the types from server.
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ScalarEventsResponse {
|
||||
#[serde(rename = "tsAnchor")]
|
||||
pub ts_anchor: u64,
|
||||
#[serde(rename = "tsMs")]
|
||||
pub ts_ms: Vec<u64>,
|
||||
#[serde(rename = "tsNs")]
|
||||
pub ts_ns: Vec<u64>,
|
||||
pub values: Vec<f64>,
|
||||
#[serde(rename = "finalisedRange", default = "bool_false")]
|
||||
pub finalised_range: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct WaveEventsResponse {
|
||||
#[serde(rename = "tsAnchor")]
|
||||
ts_anchor: u64,
|
||||
#[serde(rename = "tsMs")]
|
||||
ts_ms: Vec<u64>,
|
||||
#[serde(rename = "tsNs")]
|
||||
ts_ns: Vec<u64>,
|
||||
values: Vec<Vec<f64>>,
|
||||
#[serde(rename = "finalisedRange", default = "bool_false")]
|
||||
finalised_range: bool,
|
||||
}
|
||||
|
||||
impl WaveEventsResponse {
|
||||
pub fn is_close(&self, other: &Self) -> Result<bool, Error> {
|
||||
let reterr = || -> Result<bool, Error> {
|
||||
Err(Error::with_msg_no_trace(format!(
|
||||
"Mismatch\n{:?}\nVS\n{:?}",
|
||||
self, other
|
||||
)))
|
||||
};
|
||||
if self.ts_anchor != other.ts_anchor {
|
||||
return reterr();
|
||||
}
|
||||
if self.finalised_range != other.finalised_range {
|
||||
return reterr();
|
||||
}
|
||||
let pairs = [(&self.values, &other.values)];
|
||||
for (t, u) in pairs {
|
||||
for (j, k) in t.iter().zip(u) {
|
||||
for (&a, &b) in j.iter().zip(k) {
|
||||
if !f64_close(a, b) {
|
||||
return reterr();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct BinnedResponse {
|
||||
#[serde(rename = "tsAnchor")]
|
||||
ts_anchor: u64,
|
||||
#[serde(rename = "tsMs")]
|
||||
ts_ms: Vec<u64>,
|
||||
#[serde(rename = "tsNs")]
|
||||
ts_ns: Vec<u64>,
|
||||
mins: Vec<Option<f64>>,
|
||||
maxs: Vec<Option<f64>>,
|
||||
avgs: Vec<Option<f64>>,
|
||||
counts: Vec<u64>,
|
||||
#[serde(rename = "finalisedRange", default = "bool_false")]
|
||||
finalised_range: bool,
|
||||
}
|
||||
|
||||
impl BinnedResponse {
|
||||
pub fn is_close(&self, other: &Self) -> Result<bool, Error> {
|
||||
let reterr = || -> Result<bool, Error> {
|
||||
Err(Error::with_msg_no_trace(format!(
|
||||
"Mismatch\n{:?}\nVS\n{:?}",
|
||||
self, other
|
||||
)))
|
||||
};
|
||||
if self.ts_anchor != other.ts_anchor {
|
||||
return reterr();
|
||||
}
|
||||
if self.finalised_range != other.finalised_range {
|
||||
return reterr();
|
||||
}
|
||||
if self.counts != other.counts {
|
||||
return reterr();
|
||||
}
|
||||
let pairs = [
|
||||
(&self.mins, &other.mins),
|
||||
(&self.maxs, &other.maxs),
|
||||
(&self.avgs, &other.avgs),
|
||||
];
|
||||
for (t, u) in pairs {
|
||||
for (&a, &b) in t.iter().zip(u) {
|
||||
if let (Some(a), Some(b)) = (a, b) {
|
||||
if !f64_close(a, b) {
|
||||
return reterr();
|
||||
}
|
||||
} else if let (None, None) = (a, b) {
|
||||
} else {
|
||||
return reterr();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
|
||||
fn bool_false() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
async fn get_binned_json_common_res(
|
||||
channel: Channel,
|
||||
beg_date: &str,
|
||||
end_date: &str,
|
||||
bin_count: u32,
|
||||
agg_kind: AggKind,
|
||||
cluster: &Cluster,
|
||||
) -> Result<(BinnedResponse, String), Error> {
|
||||
let t1 = Utc::now();
|
||||
let node0 = &cluster.nodes[0];
|
||||
let beg_date: DateTime<Utc> = beg_date.parse()?;
|
||||
let end_date: DateTime<Utc> = end_date.parse()?;
|
||||
let range = NanoRange::from_date_time(beg_date, end_date);
|
||||
let mut query = BinnedQuery::new(channel, range, bin_count, agg_kind);
|
||||
query.set_timeout(Duration::from_millis(15000));
|
||||
query.set_cache_usage(CacheUsage::Ignore);
|
||||
let mut url = Url::parse(&format!("http://{}:{}/api/4/binned", node0.host, node0.port))?;
|
||||
query.append_to_url(&mut url);
|
||||
let url = url;
|
||||
info!("get_binned_json_common_res get {}", url);
|
||||
let req = hyper::Request::builder()
|
||||
.method(http::Method::GET)
|
||||
.uri(url.to_string())
|
||||
.header(http::header::ACCEPT, APP_JSON)
|
||||
.body(Body::empty())
|
||||
.ec()?;
|
||||
let client = hyper::Client::new();
|
||||
let res = client.request(req).await.ec()?;
|
||||
if res.status() != StatusCode::OK {
|
||||
let msg = format!("client response {res:?}");
|
||||
error!("{msg}");
|
||||
return Err(msg.into());
|
||||
}
|
||||
let res = hyper::body::to_bytes(res.into_body()).await.ec()?;
|
||||
let t2 = chrono::Utc::now();
|
||||
let _ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
|
||||
let res = String::from_utf8_lossy(&res).to_string();
|
||||
let ret: BinnedResponse = serde_json::from_str(res.as_str())?;
|
||||
Ok((ret, res))
|
||||
}
|
||||
|
||||
async fn get_events_json_common_res(
|
||||
channel: Channel,
|
||||
beg_date: &str,
|
||||
end_date: &str,
|
||||
cluster: &Cluster,
|
||||
) -> Result<String, Error> {
|
||||
let t1 = Utc::now();
|
||||
let node0 = &cluster.nodes[0];
|
||||
let beg_date: DateTime<Utc> = beg_date.parse()?;
|
||||
let end_date: DateTime<Utc> = end_date.parse()?;
|
||||
let range = NanoRange::from_date_time(beg_date, end_date);
|
||||
let mut query = PlainEventsQuery::new(channel, range, 4096, None, false);
|
||||
query.set_timeout(Duration::from_millis(15000));
|
||||
let mut url = Url::parse(&format!("http://{}:{}/api/4/events", node0.host, node0.port))?;
|
||||
query.append_to_url(&mut url);
|
||||
let url = url;
|
||||
info!("get_events_json_common_res get {}", url);
|
||||
let req = hyper::Request::builder()
|
||||
.method(http::Method::GET)
|
||||
.uri(url.to_string())
|
||||
.header(http::header::ACCEPT, APP_JSON)
|
||||
.body(Body::empty())
|
||||
.ec()?;
|
||||
let client = hyper::Client::new();
|
||||
let res = client.request(req).await.ec()?;
|
||||
if res.status() != StatusCode::OK {
|
||||
let msg = format!("client response {res:?}");
|
||||
error!("{msg}");
|
||||
return Err(msg.into());
|
||||
}
|
||||
let res = hyper::body::to_bytes(res.into_body()).await.ec()?;
|
||||
let t2 = chrono::Utc::now();
|
||||
let _ms = t2.signed_duration_since(t1).num_milliseconds() as u64;
|
||||
let res = String::from_utf8_lossy(&res).to_string();
|
||||
//info!("STRING RESULT:{}", res);
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ use super::*;
|
||||
|
||||
#[test]
|
||||
fn get_scalar_2_events() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
@@ -45,6 +47,8 @@ fn get_scalar_2_events() -> Result<(), Error> {
|
||||
|
||||
#[test]
|
||||
fn get_scalar_2_binned() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
@@ -57,7 +61,7 @@ fn get_scalar_2_binned() -> Result<(), Error> {
|
||||
let endstr = "2021-11-10T00:10:00Z";
|
||||
let (res, jsstr) =
|
||||
get_binned_json_common_res(channel, begstr, endstr, 10, AggKind::TimeWeightedScalar, cluster).await?;
|
||||
let exp = r##"{"avgs":[401.1745910644531,401.5135498046875,400.8823547363281,400.66156005859375,401.8301086425781,401.19305419921875,400.5584411621094,401.4371337890625,401.4137268066406,400.77880859375],"counts":[19,6,6,19,6,6,6,19,6,6],"finalisedRange":true,"maxs":[402.04977411361034,401.8439029736943,401.22628955394583,402.1298351124666,402.1298351124666,401.5084092642013,400.8869834159359,402.05358654212733,401.74477983225313,401.1271664125047],"mins":[400.08256099885625,401.22628955394583,400.60867613419754,400.0939982844072,401.5084092642013,400.8869834159359,400.2693699961876,400.05968642775446,401.1271664125047,400.50574056423943],"tsAnchor":1636502400,"tsMs":[0,60000,120000,180000,240000,300000,360000,420000,480000,540000,600000],"tsNs":[0,0,0,0,0,0,0,0,0,0,0]}"##;
|
||||
let exp = r##"{"avgs":[401.1745910644531,401.5135498046875,400.8823547363281,400.66156005859375,401.8301086425781,401.19305419921875,400.5584411621094,401.4371337890625,401.4137268066406,400.77880859375],"counts":[19,6,6,19,6,6,6,19,6,6],"rangeFinal":true,"maxs":[402.04977411361034,401.8439029736943,401.22628955394583,402.1298351124666,402.1298351124666,401.5084092642013,400.8869834159359,402.05358654212733,401.74477983225313,401.1271664125047],"mins":[400.08256099885625,401.22628955394583,400.60867613419754,400.0939982844072,401.5084092642013,400.8869834159359,400.2693699961876,400.05968642775446,401.1271664125047,400.50574056423943],"tsAnchor":1636502400,"tsMs":[0,60000,120000,180000,240000,300000,360000,420000,480000,540000,600000],"tsNs":[0,0,0,0,0,0,0,0,0,0,0]}"##;
|
||||
let exp: BinnedResponse = serde_json::from_str(exp).unwrap();
|
||||
check_close(&res, &exp, &jsstr)?;
|
||||
Ok(())
|
||||
@@ -67,6 +71,8 @@ fn get_scalar_2_binned() -> Result<(), Error> {
|
||||
|
||||
#[test]
|
||||
fn get_wave_1_events() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
@@ -108,6 +114,8 @@ fn get_wave_1_events() -> Result<(), Error> {
|
||||
|
||||
#[test]
|
||||
fn get_wave_1_binned() -> Result<(), Error> {
|
||||
let fut = async { return Err::<(), _>(Error::with_msg_no_trace("TODO")) };
|
||||
#[cfg(DISABLED)]
|
||||
let fut = async move {
|
||||
let rh = require_sls_test_host_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
|
||||
@@ -10,79 +10,6 @@ use netpod::{AggKind, Channel, Cluster, NanoRange, APP_JSON};
|
||||
use std::time::Duration;
|
||||
use url::Url;
|
||||
|
||||
#[test]
|
||||
fn time_weighted_json_00() -> Result<(), Error> {
|
||||
async fn inner() -> Result<(), Error> {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let res = get_json_common(
|
||||
"const-regular-scalar-i32-be",
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T04:20:30.000Z",
|
||||
20,
|
||||
AggKind::DimXBins1,
|
||||
cluster,
|
||||
25,
|
||||
true,
|
||||
)
|
||||
.await?;
|
||||
let v = res.avgs[0];
|
||||
assert!(v > 41.9999 && v < 42.0001);
|
||||
Ok(())
|
||||
}
|
||||
super::run_test(inner())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn time_weighted_json_01() -> Result<(), Error> {
|
||||
// TODO OFFENDING TEST
|
||||
if true {
|
||||
return Ok(());
|
||||
}
|
||||
async fn inner() -> Result<(), Error> {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let res = get_json_common(
|
||||
"const-regular-scalar-i32-be",
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T10:20:30.000Z",
|
||||
10,
|
||||
AggKind::DimXBins1,
|
||||
cluster,
|
||||
9,
|
||||
true,
|
||||
)
|
||||
.await?;
|
||||
let v = res.avgs[0];
|
||||
assert!(v > 41.9999 && v < 42.0001);
|
||||
Ok(())
|
||||
}
|
||||
super::run_test(inner())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn time_weighted_json_02() -> Result<(), Error> {
|
||||
async fn inner() -> Result<(), Error> {
|
||||
let rh = require_test_hosts_running()?;
|
||||
let cluster = &rh.cluster;
|
||||
let res = get_json_common(
|
||||
"const-regular-scalar-i32-be",
|
||||
"1970-01-01T00:20:10.000Z",
|
||||
"1970-01-01T00:20:20.000Z",
|
||||
20,
|
||||
AggKind::TimeWeightedScalar,
|
||||
cluster,
|
||||
100,
|
||||
true,
|
||||
)
|
||||
.await?;
|
||||
let v = res.avgs[0];
|
||||
assert!(v > 41.9999 && v < 42.0001);
|
||||
Ok(())
|
||||
}
|
||||
super::run_test(inner())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn time_weighted_json_03() -> Result<(), Error> {
|
||||
async fn inner() -> Result<(), Error> {
|
||||
@@ -209,15 +136,15 @@ async fn get_json_common(
|
||||
if false {
|
||||
if expect_finalised_range {
|
||||
if !res
|
||||
.get("finalisedRange")
|
||||
.ok_or(Error::with_msg("missing finalisedRange"))?
|
||||
.get("rangeFinal")
|
||||
.ok_or(Error::with_msg("missing rangeFinal"))?
|
||||
.as_bool()
|
||||
.ok_or(Error::with_msg("key finalisedRange not bool"))?
|
||||
.ok_or(Error::with_msg("key rangeFinal not bool"))?
|
||||
{
|
||||
return Err(Error::with_msg("expected finalisedRange"));
|
||||
return Err(Error::with_msg("expected rangeFinal"));
|
||||
}
|
||||
} else if res.get("finalisedRange").is_some() {
|
||||
return Err(Error::with_msg("expect absent finalisedRange"));
|
||||
} else if res.get("rangeFinal").is_some() {
|
||||
return Err(Error::with_msg("expect absent rangeFinal"));
|
||||
}
|
||||
}
|
||||
let counts = res.get("counts").unwrap().as_array().unwrap();
|
||||
|
||||
Reference in New Issue
Block a user