Forward non-200 status in proxy. Start with event stats reader

This commit is contained in:
Dominik Werder
2022-02-07 21:35:25 +01:00
parent bcd3273dea
commit a9f9d1ada6
35 changed files with 913 additions and 122 deletions

View File

@@ -1,6 +1,7 @@
use items::numops::NumOps;
use items::scalarevents::ScalarEvents;
use items::waveevents::WaveEvents;
use items::EventsNodeProcessor;
use items::{numops::NumOps, statsevents::StatsEvents};
use netpod::{AggKind, Shape};
use std::marker::PhantomData;
@@ -23,3 +24,38 @@ where
inp
}
}
pub struct Stats1Scalar {}
impl EventsNodeProcessor for Stats1Scalar {
type Input = StatsEvents;
type Output = StatsEvents;
fn create(_shape: Shape, _agg_kind: AggKind) -> Self {
Self {}
}
fn process(&self, inp: Self::Input) -> Self::Output {
inp
}
}
pub struct Stats1Wave<NTY> {
_m1: PhantomData<NTY>,
}
impl<NTY> EventsNodeProcessor for Stats1Wave<NTY>
where
NTY: NumOps,
{
type Input = WaveEvents<NTY>;
type Output = StatsEvents;
fn create(_shape: Shape, _agg_kind: AggKind) -> Self {
Self { _m1: PhantomData }
}
fn process(&self, _inp: Self::Input) -> Self::Output {
err::todoval()
}
}

View File

@@ -11,6 +11,7 @@ pub fn make_test_node(id: u32) -> Node {
listen: "0.0.0.0".into(),
port: 8800 + id as u16,
port_raw: 8800 + id as u16 + 100,
// TODO use a common function to supply the tmp path.
data_base_path: format!("../tmpdata/node{:02}", id).into(),
cache_base_path: format!("../tmpdata/node{:02}", id).into(),
ksprefix: "ks".into(),

View File

@@ -107,7 +107,7 @@ where
continue 'outer;
} else {
let msg =
format!("PreBinnedValueFetchedStream got non-OK result from sub request: {res:?}");
format!("PreBinnedValueFetchedStream non-OK result from sub request: {res:?}");
error!("{msg}");
let e = Error::with_msg_no_trace(msg);
self.errored = true;

View File

@@ -81,6 +81,10 @@ where
AggKind::Plain => {
panic!();
}
AggKind::Stats1 => {
// Currently not meant to be binned.
panic!();
}
}
}
Shape::Wave(n) => {
@@ -112,6 +116,10 @@ where
AggKind::Plain => {
panic!();
}
AggKind::Stats1 => {
// Currently not meant to be binned.
panic!();
}
}
}
Shape::Image(..) => {

View File

@@ -9,8 +9,8 @@ use err::Error;
use futures_core::Stream;
use futures_util::future::FutureExt;
use futures_util::StreamExt;
use items::scalarevents::ScalarEvents;
use items::numops::{BoolNum, NumOps};
use items::scalarevents::ScalarEvents;
use items::streams::{Collectable, Collector};
use items::{
Clearable, EventsNodeProcessor, Framable, FrameType, PushableIndex, RangeCompletableItem, Sitemty, StreamItem,
@@ -99,55 +99,55 @@ where
{
match shape {
Shape::Scalar => {
//
let evs = EventValuesDim0Case::new();
match agg_kind {
AggKind::EventBlobs => panic!(),
AggKind::Plain => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggPlain as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::TimeWeightedScalar => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBins1 => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBinsN(_) => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToNBins as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::Stats1 => {
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToStats1 as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
}
}
Shape::Wave(n) => {
//
let evs = EventValuesDim1Case::new(n);
match agg_kind {
AggKind::EventBlobs => panic!(),
AggKind::Plain => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggPlain as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::TimeWeightedScalar => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBins1 => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBinsN(_) => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToNBins as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::Stats1 => {
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToStats1 as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
}
}
Shape::Image(..) => {

View File

@@ -5,9 +5,9 @@ use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use items::eventsitem::EventsItem;
use items::scalarevents::ScalarEvents;
use items::numops::{BoolNum, NumOps};
use items::plainevents::{PlainEvents, ScalarPlainEvents};
use items::scalarevents::ScalarEvents;
use items::waveevents::{WaveEvents, WaveNBinner, WavePlainProc, WaveXBinner};
use items::{Appendable, EventAppendable, EventsNodeProcessor, RangeCompletableItem, Sitemty, StreamItem};
use netpod::{ScalarType, Shape};
@@ -140,6 +140,7 @@ where
type NumXAggToSingleBin: EventsNodeProcessor<Input = <Self as EventValueFromBytes<NTY, END>>::Batch>;
type NumXAggToNBins: EventsNodeProcessor<Input = <Self as EventValueFromBytes<NTY, END>>::Batch>;
type NumXAggPlain: EventsNodeProcessor<Input = <Self as EventValueFromBytes<NTY, END>>::Batch>;
type NumXAggToStats1: EventsNodeProcessor<Input = <Self as EventValueFromBytes<NTY, END>>::Batch>;
}
pub struct EventValuesDim0Case<NTY> {
@@ -160,6 +161,7 @@ where
// TODO is this sufficient?
type NumXAggToNBins = Identity<NTY>;
type NumXAggPlain = Identity<NTY>;
type NumXAggToStats1 = Identity<NTY>;
}
pub struct EventValuesDim1Case<NTY> {
@@ -180,6 +182,7 @@ where
type NumXAggToSingleBin = WaveXBinner<NTY>;
type NumXAggToNBins = WaveNBinner<NTY>;
type NumXAggPlain = WavePlainProc<NTY>;
type NumXAggToStats1 = crate::agg::enp::Stats1Wave<NTY>;
}
pub struct EventsDecodedStream<NTY, END, EVS>

View File

@@ -18,7 +18,8 @@ pub fn gen_test_data_test() {
}
pub async fn gen_test_data() -> Result<(), Error> {
let data_base_path = PathBuf::from("tmpdata");
let homedir = std::env::var("HOME").unwrap();
let data_base_path = PathBuf::from(homedir).join("daqbuffer-testdata").join("databuffer");
let ksprefix = String::from("ks");
let mut ensemble = Ensemble {
nodes: vec![],

View File

@@ -68,6 +68,11 @@ macro_rules! pipe4 {
<$evs<$nty> as EventValueShape<$nty, $end>>::NumXAggPlain::create($shape, $agg_kind),
$event_blobs,
),
AggKind::Stats1 => make_num_pipeline_stream_evs::<$nty, $end, $evs<$nty>, _>(
$evsv,
<$evs<$nty> as EventValueShape<$nty, $end>>::NumXAggToStats1::create($shape, $agg_kind),
$event_blobs,
),
}
};
}