This commit is contained in:
Dominik Werder
2023-03-16 10:10:16 +01:00
parent b0f71b2143
commit 6a86ac1063
46 changed files with 549 additions and 1871 deletions

View File

@@ -5,6 +5,7 @@ use crate::Error;
use futures_util::Future;
use futures_util::Stream;
use futures_util::StreamExt;
use items_0::collect_s::Collected;
use items_0::collect_s::Collector;
use items_0::collect_s::ToJsonResult;
use items_0::streamitem::RangeCompletableItem;
@@ -62,7 +63,7 @@ fn flush_binned(
pub struct BinnedCollectedResult {
pub range_final: bool,
pub did_timeout: bool,
pub result: Box<dyn ToJsonResult>,
pub result: Box<dyn Collected>,
}
fn _old_binned_collected(

View File

@@ -1,14 +1,11 @@
use crate::ts_offs_from_abs;
use crate::ts_offs_from_abs_with_anchor;
use crate::IsoDateTime;
use crate::RangeOverlapInfo;
use crate::TimeBinnableType;
use crate::TimeBinnableTypeAggregator;
use chrono::TimeZone;
use chrono::Utc;
use err::Error;
use items_0::collect_s::Collectable;
use items_0::collect_s::CollectableType;
use items_0::collect_s::Collected;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonResult;
use items_0::scalar_ops::ScalarOps;
@@ -23,7 +20,6 @@ use items_0::TimeBins;
use items_0::WithLen;
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
use netpod::Dim0Kind;
use netpod::SeriesRange;
@@ -33,7 +29,6 @@ use std::any;
use std::any::Any;
use std::collections::VecDeque;
use std::fmt;
use std::mem;
#[allow(unused)]
macro_rules! trace4 {
@@ -338,7 +333,7 @@ where
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for BinsDim0CollectedResult<NTY> {}
impl<NTY: ScalarOps> Collected for BinsDim0CollectedResult<NTY> {}
impl<NTY> BinsDim0CollectedResult<NTY> {
pub fn len(&self) -> usize {
@@ -513,77 +508,6 @@ impl<NTY: ScalarOps> CollectableType for BinsDim0<NTY> {
}
}
impl<NTY> items_0::collect_c::Collector for BinsDim0Collector<NTY>
where
NTY: ScalarOps,
{
fn len(&self) -> usize {
self.vals.as_ref().map_or(0, |x| x.len())
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::Collectable) {
trace4!("\n\n••••••••••••••••••••••••••••\nINGEST\n{:?}\n\n", item);
{
{
//let tyid = item.type_id();
//let tyid = item.as_any_mut().type_id();
//let tyid = format!("{:?}", item.type_id().to_owned());
trace4!("ty 0: {:40?}", (item.as_any_mut() as &dyn Any).type_id());
}
trace4!("ty 1: {:40?}", std::any::TypeId::of::<BinsDim0<NTY>>());
trace4!("ty 2: {:40?}", std::any::TypeId::of::<BinsDim0<i32>>());
trace4!("ty 3: {:40?}", std::any::TypeId::of::<Box<BinsDim0<i32>>>());
trace4!(
"ty 4: {:?}",
std::any::TypeId::of::<Box<dyn items_0::collect_c::Collectable>>()
);
trace4!(
"ty 5: {:?}",
std::any::TypeId::of::<&mut dyn items_0::collect_c::Collectable>()
);
trace4!("ty 6: {:?}", std::any::TypeId::of::<Box<dyn items_0::TimeBinned>>());
}
if let Some(item) = item.as_any_mut().downcast_mut::<BinsDim0<NTY>>() {
trace4!("ingest plain");
CollectorType::ingest(self, item)
} else if let Some(item) = item.as_any_mut().downcast_mut::<Box<BinsDim0<NTY>>>() {
trace4!("ingest boxed");
CollectorType::ingest(self, item)
} else if let Some(item) = item.as_any_mut().downcast_mut::<Box<dyn items_0::TimeBinned>>() {
trace4!("ingest boxed dyn TimeBinned");
if let Some(item) = item.as_any_mut().downcast_mut::<BinsDim0<NTY>>() {
trace4!("ingest boxed dyn TimeBinned match");
CollectorType::ingest(self, item)
} else {
warn!("BinsDim0Collector::ingest unexpected inner item");
trace!("BinsDim0Collector::ingest unexpected inner item {:?}", item);
}
} else {
warn!("BinsDim0Collector::ingest unexpected item");
trace!("BinsDim0Collector::ingest unexpected item {:?}", item);
}
}
fn set_range_complete(&mut self) {
CollectorType::set_range_complete(self)
}
fn set_timed_out(&mut self) {
CollectorType::set_timed_out(self)
}
fn result(
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, Error> {
match CollectorType::result(self, range, binrange) {
Ok(res) => Ok(Box::new(res)),
Err(e) => Err(e.into()),
}
}
}
pub struct BinsDim0Aggregator<NTY> {
range: SeriesRange,
count: u64,
@@ -909,12 +833,3 @@ impl<NTY: ScalarOps> TimeBinned for BinsDim0<NTY> {
self
}
}
impl<NTY> items_0::collect_c::Collectable for BinsDim0<NTY>
where
NTY: ScalarOps,
{
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(BinsDim0Collector::<NTY>::new())
}
}

View File

@@ -8,6 +8,7 @@ use chrono::{TimeZone, Utc};
use err::Error;
use items_0::collect_s::Collectable;
use items_0::collect_s::CollectableType;
use items_0::collect_s::Collected;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonResult;
use items_0::scalar_ops::ScalarOps;
@@ -22,7 +23,6 @@ use items_0::TimeBins;
use items_0::WithLen;
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
use netpod::Dim0Kind;
use netpod::NanoRange;
@@ -299,7 +299,7 @@ where
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for BinsXbinDim0CollectedResult<NTY> {}
impl<NTY: ScalarOps> Collected for BinsXbinDim0CollectedResult<NTY> {}
impl<NTY> BinsXbinDim0CollectedResult<NTY> {
pub fn len(&self) -> usize {
@@ -468,78 +468,6 @@ impl<NTY: ScalarOps> CollectableType for BinsXbinDim0<NTY> {
}
}
impl<NTY> items_0::collect_c::Collector for BinsXbinDim0Collector<NTY>
where
NTY: ScalarOps,
{
fn len(&self) -> usize {
self.vals.len()
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::Collectable) {
/*trace4!("\n\n••••••••••••••••••••••••••••\nINGEST\n{:?}\n\n", item);
{
{
//let tyid = item.type_id();
//let tyid = item.as_any_mut().type_id();
//let tyid = format!("{:?}", item.type_id().to_owned());
trace4!("ty 0: {:40?}", (item.as_any_mut() as &dyn Any).type_id());
}
trace4!("ty 1: {:40?}", std::any::TypeId::of::<BinsDim0<NTY>>());
trace4!("ty 2: {:40?}", std::any::TypeId::of::<BinsDim0<i32>>());
trace4!("ty 3: {:40?}", std::any::TypeId::of::<Box<BinsDim0<i32>>>());
trace4!(
"ty 4: {:?}",
std::any::TypeId::of::<Box<dyn items_0::collect_c::Collectable>>()
);
trace4!(
"ty 5: {:?}",
std::any::TypeId::of::<&mut dyn items_0::collect_c::Collectable>()
);
trace4!("ty 6: {:?}", std::any::TypeId::of::<Box<dyn items_0::TimeBinned>>());
}
if let Some(item) = item.as_any_mut().downcast_mut::<BinsXbinDim0<NTY>>() {
trace4!("ingest plain");
CollectorType::ingest(self, item)
} else if let Some(item) = item.as_any_mut().downcast_mut::<Box<BinsXbinDim0<NTY>>>() {
trace4!("ingest boxed");
CollectorType::ingest(self, item)
} else if let Some(item) = item.as_any_mut().downcast_mut::<Box<dyn items_0::TimeBinned>>() {
trace4!("ingest boxed dyn TimeBinned");
if let Some(item) = item.as_any_mut().downcast_mut::<BinsXbinDim0<NTY>>() {
trace4!("ingest boxed dyn TimeBinned match");
CollectorType::ingest(self, item)
} else {
warn!("BinsDim0Collector::ingest unexpected inner item");
trace!("BinsDim0Collector::ingest unexpected inner item {:?}", item);
}
} else {
warn!("BinsDim0Collector::ingest unexpected item");
trace!("BinsDim0Collector::ingest unexpected item {:?}", item);
}*/
todo!()
}
fn set_range_complete(&mut self) {
CollectorType::set_range_complete(self)
}
fn set_timed_out(&mut self) {
CollectorType::set_timed_out(self)
}
fn result(
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, Error> {
match CollectorType::result(self, range, binrange) {
Ok(res) => Ok(Box::new(res)),
Err(e) => Err(e.into()),
}
}
}
pub struct BinsXbinDim0Aggregator<NTY> {
range: SeriesRange,
count: u64,
@@ -798,7 +726,7 @@ impl<NTY: ScalarOps> TimeBinner for BinsXbinDim0TimeBinner<NTY> {
fn set_range_complete(&mut self) {}
fn empty(&self) -> Box<dyn items_0::TimeBinned> {
fn empty(&self) -> Box<dyn TimeBinned> {
let ret = <BinsXbinDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Output::empty();
Box::new(ret)
}
@@ -861,12 +789,3 @@ impl<NTY: ScalarOps> TimeBinned for BinsXbinDim0<NTY> {
self
}
}
impl<NTY> items_0::collect_c::Collectable for BinsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(BinsXbinDim0Collector::<NTY>::new())
}
}

View File

@@ -1,12 +1,16 @@
use crate::framable::FrameType;
use crate::merger;
use crate::merger::Mergeable;
use crate::Events;
use items_0::collect_s::Collectable;
use items_0::collect_s::Collected;
use items_0::collect_s::Collector;
use items_0::framable::FrameTypeInnerStatic;
use items_0::streamitem::ITEMS_2_CHANNEL_EVENTS_FRAME_TYPE_ID;
use items_0::AsAnyMut;
use items_0::AsAnyRef;
use items_0::MergeError;
use items_0::WithLen;
use netpod::log::*;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
@@ -378,6 +382,7 @@ mod test_channel_events_serde {
use bincode::config::WithOtherTrailing;
use bincode::DefaultOptions;
use items_0::bincode;
use items_0::Appendable;
use items_0::Empty;
use serde::Deserialize;
use serde::Serialize;
@@ -473,17 +478,19 @@ impl PartialEq for ChannelEvents {
}
}
impl crate::merger::Mergeable for ChannelEvents {
impl WithLen for ChannelEvents {
fn len(&self) -> usize {
match self {
ChannelEvents::Events(k) => k.len(),
ChannelEvents::Events(k) => k.as_ref().len(),
ChannelEvents::Status(k) => match k {
Some(_) => 1,
None => 0,
},
}
}
}
impl Mergeable for ChannelEvents {
fn ts_min(&self) -> Option<u64> {
match self {
ChannelEvents::Events(k) => k.ts_min(),
@@ -511,18 +518,18 @@ impl crate::merger::Mergeable for ChannelEvents {
}
}
fn drain_into(&mut self, dst: &mut Self, range: (usize, usize)) -> Result<(), merger::MergeError> {
fn drain_into(&mut self, dst: &mut Self, range: (usize, usize)) -> Result<(), MergeError> {
match self {
ChannelEvents::Events(k) => match dst {
ChannelEvents::Events(j) => k.drain_into(j, range),
ChannelEvents::Status(_) => Err(merger::MergeError::NotCompatible),
ChannelEvents::Status(_) => Err(MergeError::NotCompatible),
},
ChannelEvents::Status(k) => match dst {
ChannelEvents::Events(_) => Err(merger::MergeError::NotCompatible),
ChannelEvents::Events(_) => Err(MergeError::NotCompatible),
ChannelEvents::Status(j) => match j {
Some(_) => {
trace!("drain_into merger::MergeError::Full");
Err(merger::MergeError::Full)
Err(MergeError::Full)
}
None => {
if range.0 > 0 {
@@ -596,10 +603,7 @@ impl crate::merger::Mergeable for ChannelEvents {
impl Collectable for ChannelEvents {
fn new_collector(&self) -> Box<dyn Collector> {
match self {
ChannelEvents::Events(_item) => todo!(),
ChannelEvents::Status(_) => todo!(),
}
Box::new(ChannelEventsCollector::new())
}
}
@@ -727,11 +731,11 @@ impl items_0::collect_s::ToJsonResult for ChannelEventsCollectorOutput {
}
}
impl items_0::collect_c::Collected for ChannelEventsCollectorOutput {}
impl Collected for ChannelEventsCollectorOutput {}
#[derive(Debug)]
pub struct ChannelEventsCollector {
coll: Option<Box<dyn items_0::collect_c::CollectorDyn>>,
coll: Option<Box<dyn Collector>>,
range_complete: bool,
timed_out: bool,
}
@@ -746,15 +750,14 @@ impl ChannelEventsCollector {
}
}
impl items_0::collect_c::Collector for ChannelEventsCollector {
impl WithLen for ChannelEventsCollector {
fn len(&self) -> usize {
match &self.coll {
Some(coll) => coll.len(),
None => 0,
}
self.coll.as_ref().map_or(0, |x| x.len())
}
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::Collectable) {
impl Collector for ChannelEventsCollector {
fn ingest(&mut self, item: &mut dyn Collectable) {
if let Some(item) = item.as_any_mut().downcast_mut::<ChannelEvents>() {
match item {
ChannelEvents::Events(item) => {
@@ -786,7 +789,7 @@ impl items_0::collect_c::Collector for ChannelEventsCollector {
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
) -> Result<Box<dyn Collected>, err::Error> {
match self.coll.as_mut() {
Some(coll) => {
if self.range_complete {
@@ -805,17 +808,3 @@ impl items_0::collect_c::Collector for ChannelEventsCollector {
}
}
}
impl items_0::WithLen for ChannelEvents {
fn len(&self) -> usize {
match self {
ChannelEvents::Events(k) => k.len(),
ChannelEvents::Status(_) => 1,
}
}
}
impl items_0::collect_c::Collectable for ChannelEvents {
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(ChannelEventsCollector::new())
}
}

View File

@@ -1,11 +1,12 @@
use crate::framable::FrameType;
use crate::merger::Mergeable;
use bytes::BytesMut;
use items::ByteEstimate;
use items::Clearable;
use items::PushableIndex;
use items::WithTimestamps;
use items_0::framable::FrameTypeInnerStatic;
use items_0::streamitem::EVENT_FULL_FRAME_TYPE_ID;
use items_0::Empty;
use items_0::MergeError;
use items_0::WithLen;
use netpod::ScalarType;
use netpod::Shape;
@@ -69,19 +70,6 @@ mod decomps_serde {
}
impl EventFull {
pub fn empty() -> Self {
Self {
tss: VecDeque::new(),
pulses: VecDeque::new(),
blobs: VecDeque::new(),
decomps: VecDeque::new(),
scalar_types: VecDeque::new(),
be: VecDeque::new(),
shapes: VecDeque::new(),
comps: VecDeque::new(),
}
}
pub fn add_event(
&mut self,
ts: u64,
@@ -132,54 +120,27 @@ impl FrameType for EventFull {
}
}
impl Empty for EventFull {
fn empty() -> Self {
Self {
tss: VecDeque::new(),
pulses: VecDeque::new(),
blobs: VecDeque::new(),
decomps: VecDeque::new(),
scalar_types: VecDeque::new(),
be: VecDeque::new(),
shapes: VecDeque::new(),
comps: VecDeque::new(),
}
}
}
impl WithLen for EventFull {
fn len(&self) -> usize {
self.tss.len()
}
}
impl items::WithLen for EventFull {
fn len(&self) -> usize {
self.tss.len()
}
}
impl items::Appendable for EventFull {
fn empty_like_self(&self) -> Self {
Self::empty()
}
// TODO expensive, get rid of it.
fn append(&mut self, src: &Self) {
self.tss.extend(&src.tss);
self.pulses.extend(&src.pulses);
self.blobs.extend(src.blobs.iter().map(Clone::clone));
self.decomps.extend(src.decomps.iter().map(Clone::clone));
self.scalar_types.extend(src.scalar_types.iter().map(Clone::clone));
self.be.extend(&src.be);
self.shapes.extend(src.shapes.iter().map(Clone::clone));
self.comps.extend(src.comps.iter().map(Clone::clone));
}
fn append_zero(&mut self, _ts1: u64, _ts2: u64) {
// TODO do we still need this type?
todo!()
}
}
impl Clearable for EventFull {
fn clear(&mut self) {
self.tss.clear();
self.pulses.clear();
self.blobs.clear();
self.decomps.clear();
self.scalar_types.clear();
self.be.clear();
self.shapes.clear();
self.comps.clear();
}
}
impl WithTimestamps for EventFull {
fn ts(&self, ix: usize) -> u64 {
self.tss[ix]
@@ -199,16 +160,57 @@ impl ByteEstimate for EventFull {
}
}
impl PushableIndex for EventFull {
// TODO check all use cases, can't we move?
fn push_index(&mut self, src: &Self, ix: usize) {
self.tss.push_back(src.tss[ix]);
self.pulses.push_back(src.pulses[ix]);
self.blobs.push_back(src.blobs[ix].clone());
self.decomps.push_back(src.decomps[ix].clone());
self.scalar_types.push_back(src.scalar_types[ix].clone());
self.be.push_back(src.be[ix]);
self.shapes.push_back(src.shapes[ix].clone());
self.comps.push_back(src.comps[ix].clone());
impl Mergeable for EventFull {
fn ts_min(&self) -> Option<u64> {
self.tss.front().map(|&x| x)
}
fn ts_max(&self) -> Option<u64> {
self.tss.back().map(|&x| x)
}
fn new_empty(&self) -> Self {
Empty::empty()
}
fn drain_into(&mut self, dst: &mut Self, range: (usize, usize)) -> Result<(), MergeError> {
// TODO make it harder to forget new members when the struct may get modified in the future
let r = range.0..range.1;
dst.tss.extend(self.tss.drain(r.clone()));
dst.pulses.extend(self.pulses.drain(r.clone()));
dst.blobs.extend(self.blobs.drain(r.clone()));
dst.decomps.extend(self.decomps.drain(r.clone()));
dst.scalar_types.extend(self.scalar_types.drain(r.clone()));
dst.be.extend(self.be.drain(r.clone()));
dst.shapes.extend(self.shapes.drain(r.clone()));
dst.comps.extend(self.comps.drain(r.clone()));
Ok(())
}
fn find_lowest_index_gt(&self, ts: u64) -> Option<usize> {
for (i, &m) in self.tss.iter().enumerate() {
if m > ts {
return Some(i);
}
}
None
}
fn find_lowest_index_ge(&self, ts: u64) -> Option<usize> {
for (i, &m) in self.tss.iter().enumerate() {
if m >= ts {
return Some(i);
}
}
None
}
fn find_highest_index_lt(&self, ts: u64) -> Option<usize> {
for (i, &m) in self.tss.iter().enumerate().rev() {
if m < ts {
return Some(i);
}
}
None
}
}

View File

@@ -4,6 +4,12 @@ use crate::RangeOverlapInfo;
use crate::TimeBinnableType;
use crate::TimeBinnableTypeAggregator;
use err::Error;
use items_0::collect_s::Collectable;
use items_0::collect_s::Collected;
use items_0::collect_s::Collector;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonBytes;
use items_0::collect_s::ToJsonResult;
use items_0::scalar_ops::ScalarOps;
use items_0::Appendable;
use items_0::AsAnyMut;
@@ -11,13 +17,14 @@ use items_0::AsAnyRef;
use items_0::Empty;
use items_0::Events;
use items_0::EventsNonObj;
use items_0::MergeError;
use items_0::TimeBinnable;
use items_0::TimeBinner;
use items_0::WithLen;
use netpod::log::*;
use netpod::timeunits::MS;
use netpod::timeunits::SEC;
use netpod::BinnedRangeEnum;
use netpod::Dim0Kind;
use netpod::NanoRange;
use netpod::SeriesRange;
use serde::Deserialize;
@@ -42,14 +49,6 @@ pub struct EventsDim0<NTY> {
}
impl<NTY> EventsDim0<NTY> {
#[inline(always)]
pub fn push(&mut self, ts: u64, pulse: u64, value: NTY) {
self.tss.push_back(ts);
self.pulses.push_back(pulse);
self.values.push_back(value);
}
#[inline(always)]
pub fn push_front(&mut self, ts: u64, pulse: u64, value: NTY) {
self.tss.push_front(ts);
self.pulses.push_front(pulse);
@@ -327,16 +326,16 @@ where
}
}
impl<NTY: ScalarOps> items_0::collect_s::ToJsonResult for EventsDim0CollectorOutput<NTY> {
fn to_json_result(&self) -> Result<Box<dyn items_0::collect_s::ToJsonBytes>, Error> {
impl<NTY: ScalarOps> ToJsonResult for EventsDim0CollectorOutput<NTY> {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error> {
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for EventsDim0CollectorOutput<NTY> {}
impl<NTY: ScalarOps> Collected for EventsDim0CollectorOutput<NTY> {}
impl<NTY: ScalarOps> items_0::collect_s::CollectorType for EventsDim0Collector<NTY> {
impl<NTY: ScalarOps> CollectorType for EventsDim0Collector<NTY> {
type Input = EventsDim0<NTY>;
type Output = EventsDim0CollectorOutput<NTY>;
@@ -376,11 +375,11 @@ impl<NTY: ScalarOps> items_0::collect_s::CollectorType for EventsDim0Collector<N
};
let continue_at = if self.timed_out {
if let Some(ts) = vals.tss.back() {
Some(IsoDateTime::from_u64(*ts + netpod::timeunits::MS))
Some(IsoDateTime::from_u64(*ts + MS))
} else {
if let Some(range) = &range {
match range {
SeriesRange::TimeRange(x) => Some(IsoDateTime::from_u64(x.beg + netpod::timeunits::SEC)),
SeriesRange::TimeRange(x) => Some(IsoDateTime::from_u64(x.beg + SEC)),
SeriesRange::PulseRange(x) => {
error!("TODO emit create continueAt for pulse range");
None
@@ -434,39 +433,6 @@ impl<NTY: ScalarOps> items_0::collect_s::CollectableType for EventsDim0<NTY> {
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collector for EventsDim0Collector<NTY> {
fn len(&self) -> usize {
self.vals.as_ref().map_or(0, |x| x.len())
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::Collectable) {
if let Some(item) = item.as_any_mut().downcast_mut::<EventsDim0<NTY>>() {
items_0::collect_s::CollectorType::ingest(self, item)
} else {
error!("EventsDim0Collector::ingest unexpected item {:?}", item);
}
}
fn set_range_complete(&mut self) {
items_0::collect_s::CollectorType::set_range_complete(self)
}
fn set_timed_out(&mut self) {
items_0::collect_s::CollectorType::set_timed_out(self)
}
fn result(
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
match items_0::collect_s::CollectorType::result(self, range, binrange) {
Ok(x) => Ok(Box::new(x)),
Err(e) => Err(e.into()),
}
}
}
pub struct EventsDim0Aggregator<NTY> {
range: SeriesRange,
count: u64,
@@ -831,15 +797,15 @@ impl<STY: ScalarOps> Events for EventsDim0<STY> {
}
}
fn as_collectable_mut(&mut self) -> &mut dyn items_0::collect_s::Collectable {
fn as_collectable_mut(&mut self) -> &mut dyn Collectable {
self
}
fn as_collectable_with_default_ref(&self) -> &dyn items_0::collect_c::CollectableWithDefault {
fn as_collectable_with_default_ref(&self) -> &dyn Collectable {
self
}
fn as_collectable_with_default_mut(&mut self) -> &mut dyn items_0::collect_c::CollectableWithDefault {
fn as_collectable_with_default_mut(&mut self) -> &mut dyn Collectable {
self
}
@@ -857,7 +823,7 @@ impl<STY: ScalarOps> Events for EventsDim0<STY> {
Box::new(Self::empty())
}
fn drain_into(&mut self, dst: &mut Box<dyn Events>, range: (usize, usize)) -> Result<(), items_0::MergeError> {
fn drain_into(&mut self, dst: &mut Box<dyn Events>, range: (usize, usize)) -> Result<(), MergeError> {
// TODO as_any and as_any_mut are declared on unrelated traits. Simplify.
if let Some(dst) = dst.as_mut().as_any_mut().downcast_mut::<Self>() {
// TODO make it harder to forget new members when the struct may get modified in the future
@@ -868,7 +834,7 @@ impl<STY: ScalarOps> Events for EventsDim0<STY> {
Ok(())
} else {
error!("downcast to EventsDim0 FAILED");
Err(items_0::MergeError::NotCompatible)
Err(MergeError::NotCompatible)
}
}
@@ -1132,95 +1098,14 @@ impl<NTY: ScalarOps> TimeBinner for EventsDim0TimeBinner<NTY> {
}
}
// TODO remove this struct?
#[derive(Debug)]
pub struct EventsDim0CollectorDyn {}
impl EventsDim0CollectorDyn {
pub fn new() -> Self {
Self {}
}
}
impl items_0::collect_c::CollectorDyn for EventsDim0CollectorDyn {
fn len(&self) -> usize {
todo!()
}
fn ingest(&mut self, _item: &mut dyn items_0::collect_c::CollectableWithDefault) {
todo!()
}
fn set_range_complete(&mut self) {
todo!()
}
fn set_timed_out(&mut self) {
todo!()
}
fn result(
&mut self,
_range: Option<SeriesRange>,
_binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
todo!()
}
}
impl<NTY: ScalarOps> items_0::collect_c::CollectorDyn for EventsDim0Collector<NTY> {
fn len(&self) -> usize {
WithLen::len(self)
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::CollectableWithDefault) {
let x = item.as_any_mut();
if let Some(item) = x.downcast_mut::<EventsDim0<NTY>>() {
items_0::collect_s::CollectorType::ingest(self, item)
} else {
// TODO need possibility to return error
()
}
}
fn set_range_complete(&mut self) {
items_0::collect_s::CollectorType::set_range_complete(self);
}
fn set_timed_out(&mut self) {
items_0::collect_s::CollectorType::set_timed_out(self);
}
fn result(
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
items_0::collect_s::CollectorType::result(self, range, binrange)
.map(|x| Box::new(x) as _)
.map_err(|e| e.into())
}
}
impl<NTY: ScalarOps> items_0::collect_c::CollectableWithDefault for EventsDim0<NTY> {
fn new_collector(&self) -> Box<dyn items_0::collect_c::CollectorDyn> {
let coll = EventsDim0Collector::<NTY>::new();
Box::new(coll)
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collectable for EventsDim0<NTY> {
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(EventsDim0Collector::<NTY>::new())
}
}
impl<STY> Appendable<STY> for EventsDim0<STY>
where
STY: ScalarOps,
{
fn push(&mut self, ts: u64, pulse: u64, value: STY) {
Self::push(self, ts, pulse, value)
self.tss.push_back(ts);
self.pulses.push_back(pulse);
self.values.push_back(value);
}
}

View File

@@ -4,6 +4,12 @@ use crate::RangeOverlapInfo;
use crate::TimeBinnableType;
use crate::TimeBinnableTypeAggregator;
use err::Error;
use items_0::collect_s::Collectable;
use items_0::collect_s::CollectableType;
use items_0::collect_s::Collected;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonBytes;
use items_0::collect_s::ToJsonResult;
use items_0::scalar_ops::ScalarOps;
use items_0::Appendable;
use items_0::AsAnyMut;
@@ -11,20 +17,20 @@ use items_0::AsAnyRef;
use items_0::Empty;
use items_0::Events;
use items_0::EventsNonObj;
use items_0::MergeError;
use items_0::TimeBinnable;
use items_0::TimeBinner;
use items_0::WithLen;
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
use netpod::SeriesRange;
use serde::Deserialize;
use serde::Serialize;
use std::any;
use std::any::Any;
use std::collections::VecDeque;
use std::fmt;
use std::mem;
#[allow(unused)]
macro_rules! trace2 {
@@ -257,16 +263,16 @@ where
}
}
impl<NTY: ScalarOps> items_0::collect_s::ToJsonResult for EventsDim1CollectorOutput<NTY> {
fn to_json_result(&self) -> Result<Box<dyn items_0::collect_s::ToJsonBytes>, Error> {
impl<NTY: ScalarOps> ToJsonResult for EventsDim1CollectorOutput<NTY> {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error> {
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for EventsDim1CollectorOutput<NTY> {}
impl<NTY: ScalarOps> Collected for EventsDim1CollectorOutput<NTY> {}
impl<NTY: ScalarOps> items_0::collect_s::CollectorType for EventsDim1Collector<NTY> {
impl<NTY: ScalarOps> CollectorType for EventsDim1Collector<NTY> {
type Input = EventsDim1<NTY>;
type Output = EventsDim1CollectorOutput<NTY>;
@@ -328,7 +334,7 @@ impl<NTY: ScalarOps> items_0::collect_s::CollectorType for EventsDim1Collector<N
}
}
impl<NTY: ScalarOps> items_0::collect_s::CollectableType for EventsDim1<NTY> {
impl<NTY: ScalarOps> CollectableType for EventsDim1<NTY> {
type Collector = EventsDim1Collector<NTY>;
fn new_collector() -> Self::Collector {
@@ -336,39 +342,6 @@ impl<NTY: ScalarOps> items_0::collect_s::CollectableType for EventsDim1<NTY> {
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collector for EventsDim1Collector<NTY> {
fn len(&self) -> usize {
self.vals.len()
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::Collectable) {
if let Some(item) = item.as_any_mut().downcast_mut::<EventsDim1<NTY>>() {
items_0::collect_s::CollectorType::ingest(self, item)
} else {
error!("EventsDim0Collector::ingest unexpected item {:?}", item);
}
}
fn set_range_complete(&mut self) {
items_0::collect_s::CollectorType::set_range_complete(self)
}
fn set_timed_out(&mut self) {
items_0::collect_s::CollectorType::set_timed_out(self)
}
fn result(
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
match items_0::collect_s::CollectorType::result(self, range, binrange) {
Ok(x) => Ok(Box::new(x)),
Err(e) => Err(e.into()),
}
}
}
pub struct EventsDim1Aggregator<NTY> {
range: SeriesRange,
count: u64,
@@ -626,11 +599,11 @@ impl<NTY: ScalarOps> TimeBinnableTypeAggregator for EventsDim1Aggregator<NTY> {
fn ingest(&mut self, item: &Self::Input) {
if true {
trace!("{} ingest {} events", std::any::type_name::<Self>(), item.len());
trace!("{} ingest {} events", any::type_name::<Self>(), item.len());
}
if false {
for (i, &ts) in item.tss.iter().enumerate() {
trace!("{} ingest {:6} {:20}", std::any::type_name::<Self>(), i, ts);
trace!("{} ingest {:6} {:20}", any::type_name::<Self>(), i, ts);
}
}
if self.do_time_weight {
@@ -657,7 +630,7 @@ impl<NTY: ScalarOps> TimeBinnable for EventsDim1<NTY> {
Box::new(ret)
}
fn to_box_to_json_result(&self) -> Box<dyn items_0::collect_s::ToJsonResult> {
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
let k = serde_json::to_value(self).unwrap();
Box::new(k) as _
}
@@ -722,15 +695,15 @@ impl<STY: ScalarOps> Events for EventsDim1<STY> {
}
}
fn as_collectable_mut(&mut self) -> &mut dyn items_0::collect_s::Collectable {
fn as_collectable_mut(&mut self) -> &mut dyn Collectable {
self
}
fn as_collectable_with_default_ref(&self) -> &dyn items_0::collect_c::CollectableWithDefault {
fn as_collectable_with_default_ref(&self) -> &dyn Collectable {
self
}
fn as_collectable_with_default_mut(&mut self) -> &mut dyn items_0::collect_c::CollectableWithDefault {
fn as_collectable_with_default_mut(&mut self) -> &mut dyn Collectable {
self
}
@@ -748,7 +721,7 @@ impl<STY: ScalarOps> Events for EventsDim1<STY> {
Box::new(Self::empty())
}
fn drain_into(&mut self, dst: &mut Box<dyn Events>, range: (usize, usize)) -> Result<(), items_0::MergeError> {
fn drain_into(&mut self, dst: &mut Box<dyn Events>, range: (usize, usize)) -> Result<(), MergeError> {
// TODO as_any and as_any_mut are declared on unrelated traits. Simplify.
if let Some(dst) = dst.as_mut().as_any_mut().downcast_mut::<Self>() {
// TODO make it harder to forget new members when the struct may get modified in the future
@@ -759,7 +732,7 @@ impl<STY: ScalarOps> Events for EventsDim1<STY> {
Ok(())
} else {
error!("downcast to EventsDim0 FAILED");
Err(items_0::MergeError::NotCompatible)
Err(MergeError::NotCompatible)
}
}
@@ -1031,89 +1004,6 @@ impl<NTY: ScalarOps> TimeBinner for EventsDim1TimeBinner<NTY> {
}
}
// TODO remove this struct?
#[derive(Debug)]
pub struct EventsDim1CollectorDyn {}
impl EventsDim1CollectorDyn {
pub fn new() -> Self {
Self {}
}
}
impl items_0::collect_c::CollectorDyn for EventsDim1CollectorDyn {
fn len(&self) -> usize {
todo!()
}
fn ingest(&mut self, _item: &mut dyn items_0::collect_c::CollectableWithDefault) {
todo!()
}
fn set_range_complete(&mut self) {
todo!()
}
fn set_timed_out(&mut self) {
todo!()
}
fn result(
&mut self,
_range: Option<SeriesRange>,
_binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
todo!()
}
}
impl<NTY: ScalarOps> items_0::collect_c::CollectorDyn for EventsDim1Collector<NTY> {
fn len(&self) -> usize {
WithLen::len(self)
}
fn ingest(&mut self, item: &mut dyn items_0::collect_c::CollectableWithDefault) {
let x = item.as_any_mut();
if let Some(item) = x.downcast_mut::<EventsDim1<NTY>>() {
items_0::collect_s::CollectorType::ingest(self, item)
} else {
// TODO need possibility to return error
()
}
}
fn set_range_complete(&mut self) {
items_0::collect_s::CollectorType::set_range_complete(self);
}
fn set_timed_out(&mut self) {
items_0::collect_s::CollectorType::set_timed_out(self);
}
fn result(
&mut self,
range: Option<SeriesRange>,
binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, err::Error> {
items_0::collect_s::CollectorType::result(self, range, binrange)
.map(|x| Box::new(x) as _)
.map_err(|e| e.into())
}
}
impl<NTY: ScalarOps> items_0::collect_c::CollectableWithDefault for EventsDim1<NTY> {
fn new_collector(&self) -> Box<dyn items_0::collect_c::CollectorDyn> {
let coll = EventsDim1Collector::<NTY>::new();
Box::new(coll)
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collectable for EventsDim1<NTY> {
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(EventsDim1Collector::<NTY>::new())
}
}
impl<STY> Appendable<Vec<STY>> for EventsDim1<STY>
where
STY: ScalarOps,

View File

@@ -4,13 +4,17 @@ use crate::RangeOverlapInfo;
use crate::TimeBinnableType;
use crate::TimeBinnableTypeAggregator;
use err::Error;
use items_0::collect_s::CollectableType;
use items_0::collect_s::Collected;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonBytes;
use items_0::collect_s::ToJsonResult;
use items_0::scalar_ops::ScalarOps;
use items_0::AsAnyMut;
use items_0::AsAnyRef;
use items_0::Empty;
use items_0::WithLen;
use netpod::log::*;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
use netpod::SeriesRange;
use serde::Deserialize;
@@ -387,17 +391,17 @@ where
}
}
impl<NTY> items_0::collect_s::ToJsonResult for EventsXbinDim0CollectorOutput<NTY>
impl<NTY> ToJsonResult for EventsXbinDim0CollectorOutput<NTY>
where
NTY: ScalarOps,
{
fn to_json_result(&self) -> Result<Box<dyn items_0::collect_s::ToJsonBytes>, Error> {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error> {
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
}
impl<NTY> items_0::collect_c::Collected for EventsXbinDim0CollectorOutput<NTY> where NTY: ScalarOps {}
impl<NTY> Collected for EventsXbinDim0CollectorOutput<NTY> where NTY: ScalarOps {}
#[derive(Debug)]
pub struct EventsXbinDim0Collector<NTY> {
@@ -422,7 +426,7 @@ impl<NTY> WithLen for EventsXbinDim0Collector<NTY> {
}
}
impl<NTY> items_0::collect_s::CollectorType for EventsXbinDim0Collector<NTY>
impl<NTY> CollectorType for EventsXbinDim0Collector<NTY>
where
NTY: ScalarOps,
{
@@ -490,7 +494,7 @@ where
}
}
impl<NTY> items_0::collect_s::CollectableType for EventsXbinDim0<NTY>
impl<NTY> CollectableType for EventsXbinDim0<NTY>
where
NTY: ScalarOps,
{
@@ -500,41 +504,3 @@ where
Self::Collector::new()
}
}
impl<NTY> items_0::collect_c::Collector for EventsXbinDim0Collector<NTY>
where
NTY: ScalarOps,
{
fn len(&self) -> usize {
todo!()
}
fn ingest(&mut self, _item: &mut dyn items_0::collect_c::Collectable) {
todo!()
}
fn set_range_complete(&mut self) {
todo!()
}
fn set_timed_out(&mut self) {
todo!()
}
fn result(
&mut self,
_range: Option<SeriesRange>,
_binrange: Option<BinnedRangeEnum>,
) -> Result<Box<dyn items_0::collect_c::Collected>, Error> {
todo!()
}
}
impl<NTY> items_0::collect_c::Collectable for EventsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(<Self as items_0::collect_s::CollectableType>::new_collector())
}
}

View File

@@ -22,19 +22,15 @@ use channelevents::ChannelEvents;
use chrono::DateTime;
use chrono::TimeZone;
use chrono::Utc;
use futures_util::FutureExt;
use futures_util::Stream;
use futures_util::StreamExt;
use items_0::streamitem::Sitemty;
use items_0::Empty;
use items_0::Events;
use items_0::MergeError;
use items_0::RangeOverlapInfo;
use netpod::log::*;
use merger::Mergeable;
use netpod::timeunits::*;
use netpod::NanoRange;
use netpod::ScalarType;
use netpod::SeriesRange;
use netpod::Shape;
use serde::Deserialize;
use serde::Serialize;
use serde::Serializer;
@@ -78,9 +74,6 @@ pub fn pulse_offs_from_abs(pulse: &[u64]) -> (u64, VecDeque<u64>) {
(pulse_anchor, pulse_off)
}
#[allow(unused)]
struct Ts(u64);
#[derive(Debug, PartialEq)]
pub enum ErrorKind {
General,
@@ -169,11 +162,7 @@ pub fn make_iso_ts(tss: &[u64]) -> Vec<IsoDateTime> {
.collect()
}
impl crate::merger::Mergeable for Box<dyn Events> {
fn len(&self) -> usize {
self.as_ref().len()
}
impl Mergeable for Box<dyn Events> {
fn ts_min(&self) -> Option<u64> {
self.as_ref().ts_min()
}
@@ -186,7 +175,7 @@ impl crate::merger::Mergeable for Box<dyn Events> {
self.as_ref().new_empty()
}
fn drain_into(&mut self, dst: &mut Self, range: (usize, usize)) -> Result<(), merger::MergeError> {
fn drain_into(&mut self, dst: &mut Self, range: (usize, usize)) -> Result<(), MergeError> {
self.as_mut().drain_into(dst, range)
}

View File

@@ -1,10 +1,13 @@
use crate::Error;
pub use crate::Error;
use futures_util::Stream;
use futures_util::StreamExt;
use items_0::streamitem::sitem_data;
use items_0::streamitem::RangeCompletableItem;
use items_0::streamitem::Sitemty;
use items_0::streamitem::StreamItem;
use items_0::MergeError;
use items_0::WithLen;
use netpod::log::*;
use std::collections::VecDeque;
use std::fmt;
@@ -31,11 +34,7 @@ macro_rules! trace4 {
($($arg:tt)*) => (trace!($($arg)*));
}
// TODO
pub use items_0::MergeError;
pub trait Mergeable<Rhs = Self>: fmt::Debug + Unpin {
fn len(&self) -> usize;
pub trait Mergeable<Rhs = Self>: fmt::Debug + WithLen + Unpin {
fn ts_min(&self) -> Option<u64>;
fn ts_max(&self) -> Option<u64>;
fn new_empty(&self) -> Self;

View File

@@ -1,3 +1,6 @@
#[cfg(test)]
pub mod eventsdim0;
use crate::binnedcollected::BinnedCollected;
use crate::binsdim0::BinsDim0CollectedResult;
use crate::channelevents::ConnStatus;
@@ -21,7 +24,9 @@ use items_0::streamitem::sitem_data;
use items_0::streamitem::RangeCompletableItem;
use items_0::streamitem::Sitemty;
use items_0::streamitem::StreamItem;
use items_0::Appendable;
use items_0::Empty;
use items_0::WithLen;
use netpod::log::*;
use netpod::timeunits::*;
use netpod::AggKind;

View File

@@ -0,0 +1,24 @@
use crate::eventsdim0::EventsDim0;
use items_0::Appendable;
use items_0::Empty;
use items_0::Events;
#[test]
fn collect_s_00() {
let mut evs = EventsDim0::empty();
evs.push(123, 4, 1.00f32);
evs.push(124, 5, 1.01);
let mut coll = evs.as_collectable_mut().new_collector();
coll.ingest(&mut evs);
assert_eq!(coll.len(), 2);
}
#[test]
fn collect_c_00() {
let mut evs = EventsDim0::empty();
evs.push(123, 4, 1.00f32);
evs.push(124, 5, 1.01);
let mut coll = evs.as_collectable_with_default_ref().new_collector();
coll.ingest(&mut evs);
assert_eq!(coll.len(), 2);
}

View File

@@ -1,5 +1,6 @@
use crate::eventsdim0::EventsDim0;
use crate::Events;
use items_0::Appendable;
use items_0::Empty;
#[allow(unused)]