Remove more unused
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
use crate::container::ByteEstimate;
|
||||
use crate::timebin::BinningggContainerBinsDyn;
|
||||
use crate::timebin::TimeBinned;
|
||||
use crate::AsAnyMut;
|
||||
use crate::AsAnyRef;
|
||||
use crate::Events;
|
||||
@@ -253,21 +252,3 @@ impl CollectableDyn for Box<dyn CollectableDyn> {
|
||||
CollectableDyn::new_collector(self.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl WithLen for Box<dyn TimeBinned> {
|
||||
fn len(&self) -> usize {
|
||||
WithLen::len(self.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeName for Box<dyn TimeBinned> {
|
||||
fn type_name(&self) -> String {
|
||||
self.as_ref().type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl CollectableDyn for Box<dyn TimeBinned> {
|
||||
fn new_collector(&self) -> Box<dyn CollectorDyn> {
|
||||
self.as_ref().new_collector()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ use std::any::Any;
|
||||
use std::collections::VecDeque;
|
||||
use std::fmt;
|
||||
use timebin::BinningggContainerEventsDyn;
|
||||
use timebin::TimeBinnable;
|
||||
|
||||
pub trait WithLen {
|
||||
fn len(&self) -> usize;
|
||||
@@ -115,57 +114,12 @@ impl fmt::Display for MergeError {
|
||||
|
||||
impl std::error::Error for MergeError {}
|
||||
|
||||
pub trait IntoTimeBinnable: BoxedIntoTimeBinnable {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable>;
|
||||
}
|
||||
|
||||
pub trait BoxedIntoTimeBinnable {
|
||||
fn boxed_into_time_binnable(self: Box<Self>) -> Box<dyn TimeBinnable>;
|
||||
}
|
||||
|
||||
impl<T> BoxedIntoTimeBinnable for T
|
||||
where
|
||||
T: IntoTimeBinnable,
|
||||
{
|
||||
fn boxed_into_time_binnable(self: Box<Self>) -> Box<dyn TimeBinnable> {
|
||||
<Self as IntoTimeBinnable>::into_time_binnable(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoTimeBinnable for Box<dyn IntoTimeBinnable + '_> {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
<dyn IntoTimeBinnable as BoxedIntoTimeBinnable>::boxed_into_time_binnable(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoTimeBinnable for Box<dyn Events + '_> {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
// <dyn IntoTimeBinnable as BoxedIntoTimeBinnable>::boxed_into_time_binnable(self)
|
||||
// Box::new(*self)
|
||||
// let a: Box<dyn Events> = err::todoval();
|
||||
// let b: Box<dyn TimeBinnable> = Box::new(*a);
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO can I remove the Any bound?
|
||||
|
||||
/// Container of some form of events, for use as trait object.
|
||||
pub trait Events:
|
||||
fmt::Debug
|
||||
+ TypeName
|
||||
+ Any
|
||||
+ CollectableDyn
|
||||
+ TimeBinnable
|
||||
+ WithLen
|
||||
+ ByteEstimate
|
||||
+ Send
|
||||
+ erased_serde::Serialize
|
||||
+ EventsNonObj
|
||||
+ IntoTimeBinnable
|
||||
fmt::Debug + TypeName + Any + CollectableDyn + WithLen + ByteEstimate + Send + erased_serde::Serialize + EventsNonObj
|
||||
{
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable;
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable;
|
||||
fn verify(&self) -> bool;
|
||||
fn output_info(&self) -> String;
|
||||
fn as_collectable_mut(&mut self) -> &mut dyn CollectableDyn;
|
||||
@@ -222,14 +176,6 @@ impl EventsNonObj for Box<dyn Events> {
|
||||
}
|
||||
|
||||
impl Events for Box<dyn Events> {
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
Events::as_time_binnable_ref(self.as_ref())
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
Events::as_time_binnable_mut(self.as_mut())
|
||||
}
|
||||
|
||||
fn verify(&self) -> bool {
|
||||
Events::verify(self.as_ref())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::timebin::TimeBinned;
|
||||
use err::Error;
|
||||
use netpod::log::Level;
|
||||
use netpod::DiskStats;
|
||||
@@ -202,5 +201,3 @@ mod levelserde {
|
||||
de.deserialize_u32(VisitLevel)
|
||||
}
|
||||
}
|
||||
|
||||
erased_serde::serialize_trait_object!(TimeBinned);
|
||||
|
||||
@@ -135,437 +135,3 @@ pub trait BinnedEventsTimeweightTrait: fmt::Debug + Send {
|
||||
fn input_done_range_open(&mut self) -> Result<(), BinningggError>;
|
||||
fn output(&mut self) -> Result<Option<BinsBoxed>, BinningggError>;
|
||||
}
|
||||
|
||||
/// Data in time-binned form.
|
||||
pub trait TimeBinned: Any + TypeName + TimeBinnable + Resettable + CollectableDyn + erased_serde::Serialize {
|
||||
fn clone_box_time_binned(&self) -> Box<dyn TimeBinned>;
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable;
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable;
|
||||
fn as_collectable_mut(&mut self) -> &mut dyn CollectableDyn;
|
||||
fn edges_slice(&self) -> (&[u64], &[u64]);
|
||||
fn counts(&self) -> &[u64];
|
||||
fn mins(&self) -> Vec<f32>;
|
||||
fn maxs(&self) -> Vec<f32>;
|
||||
fn avgs(&self) -> Vec<f32>;
|
||||
fn validate(&self) -> Result<(), String>;
|
||||
fn empty_like_self_box_time_binned(&self) -> Box<dyn TimeBinned>;
|
||||
fn to_simple_bins_f32(&mut self) -> Box<dyn TimeBinned>;
|
||||
fn drain_into_tb(&mut self, dst: &mut dyn TimeBinned, range: Range<usize>) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn TimeBinned> {
|
||||
fn clone(&self) -> Self {
|
||||
self.clone_box_time_binned()
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnable for Box<dyn TimeBinned> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
self.as_ref().time_binner_new(binrange, do_time_weight, emit_empty_bins)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
self.as_ref().to_box_to_json_result()
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn BinningggContainerBinsDyn> {
|
||||
self.as_ref().to_container_bins()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TimeBinner: fmt::Debug + Send {
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable);
|
||||
|
||||
fn bins_ready_count(&self) -> usize;
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>>;
|
||||
|
||||
/// If there is a bin in progress with non-zero count, push it to the result set.
|
||||
/// With push_empty == true, a bin in progress is pushed even if it contains no counts.
|
||||
fn push_in_progress(&mut self, push_empty: bool);
|
||||
|
||||
/// Implies `Self::push_in_progress` but in addition, pushes a zero-count bin if the call
|
||||
/// to `push_in_progress` did not change the result count, as long as edges are left.
|
||||
/// The next call to `Self::bins_ready_count` must return one higher count than before.
|
||||
fn cycle(&mut self);
|
||||
|
||||
fn set_range_complete(&mut self);
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned>;
|
||||
|
||||
fn append_empty_until_end(&mut self);
|
||||
}
|
||||
|
||||
// TODO remove the Any bound. Factor out into custom AsAny trait.
|
||||
|
||||
/// Provides a time-binned representation of the implementing type.
|
||||
/// In contrast to `TimeBinnableType` this is meant for trait objects.
|
||||
pub trait TimeBinnable: fmt::Debug + WithLen + CollectableDyn + Any + AsAnyRef + AsAnyMut + Send {
|
||||
// TODO implementors may fail if edges contain not at least 2 entries.
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner>;
|
||||
// TODO just a helper for the empty result.
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult>;
|
||||
// TODO temporary converter
|
||||
fn to_container_bins(&self) -> Box<dyn BinningggContainerBinsDyn>;
|
||||
}
|
||||
|
||||
impl WithLen for Box<dyn TimeBinnable> {
|
||||
fn len(&self) -> usize {
|
||||
WithLen::len(self.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnable for Box<dyn TimeBinnable> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
self.as_ref().time_binner_new(binrange, do_time_weight, emit_empty_bins)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn BinningggContainerBinsDyn> {
|
||||
self.as_ref().to_container_bins()
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnable for Box<dyn Events> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
TimeBinnable::time_binner_new(self.as_ref(), binrange, do_time_weight, emit_empty_bins)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
TimeBinnable::to_box_to_json_result(self.as_ref())
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn BinningggContainerBinsDyn> {
|
||||
panic!("logic error this converter must not get used on events")
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeName for Box<dyn TimeBinnable> {
|
||||
fn type_name(&self) -> String {
|
||||
format!("Box<dyn TimeBinnable> TODO TypeName for Box<dyn TimeBinnable>")
|
||||
}
|
||||
}
|
||||
|
||||
impl CollectableDyn for Box<dyn TimeBinnable> {
|
||||
fn new_collector(&self) -> Box<dyn CollectorDyn> {
|
||||
self.as_ref().new_collector()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TimeBinnerDynStruct {
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
binner: Option<Box<dyn TimeBinner>>,
|
||||
emit_empty_bins: bool,
|
||||
}
|
||||
|
||||
impl TimeBinnerDynStruct {
|
||||
pub fn type_name() -> &'static str {
|
||||
std::any::type_name::<Self>()
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
binner: Box<dyn TimeBinner>,
|
||||
) -> Self {
|
||||
Self {
|
||||
binrange,
|
||||
do_time_weight,
|
||||
binner: Some(binner),
|
||||
emit_empty_bins,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnerTy for TimeBinnerDynStruct {
|
||||
type Input = Box<dyn TimeBinnable>;
|
||||
type Output = Box<dyn TimeBinned>;
|
||||
|
||||
fn ingest(&mut self, item: &mut Self::Input) {
|
||||
trace!("{} INGEST {:?}", Self::type_name(), item);
|
||||
if self.binner.is_none() {
|
||||
self.binner = Some(Box::new(TimeBinnableTy::time_binner_new(
|
||||
item,
|
||||
self.binrange.clone(),
|
||||
self.do_time_weight,
|
||||
self.emit_empty_bins,
|
||||
)));
|
||||
}
|
||||
self.binner.as_mut().unwrap().as_mut().ingest(item.as_mut())
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.set_range_complete()
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
if let Some(k) = self.binner.as_ref() {
|
||||
k.bins_ready_count()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Self::Output> {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.bins_ready()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.push_in_progress(push_empty)
|
||||
}
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.cycle()
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(&self) -> Option<Self::Output> {
|
||||
if let Some(k) = self.binner.as_ref() {
|
||||
Some(k.empty())
|
||||
} else {
|
||||
warn!("TimeBinnerDynStruct::empty called with binner None");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinner for TimeBinnerDynStruct {
|
||||
fn ingest(&mut self, _item: &mut dyn TimeBinnable) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, _push_empty: bool) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TimeBinnerDynStruct2 {
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
binner: Option<Box<dyn TimeBinner>>,
|
||||
}
|
||||
|
||||
impl TimeBinnerDynStruct2 {
|
||||
pub fn type_name() -> &'static str {
|
||||
std::any::type_name::<Self>()
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
binner: Box<dyn TimeBinner>,
|
||||
) -> Self {
|
||||
Self {
|
||||
binrange,
|
||||
do_time_weight,
|
||||
emit_empty_bins,
|
||||
binner: Some(binner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnerTy for TimeBinnerDynStruct2 {
|
||||
type Input = Box<dyn TimeBinned>;
|
||||
type Output = Box<dyn TimeBinned>;
|
||||
|
||||
fn ingest(&mut self, item: &mut Self::Input) {
|
||||
trace!("{} INGEST {:?}", Self::type_name(), item);
|
||||
if self.binner.is_none() {
|
||||
self.binner = Some(Box::new(TimeBinnableTy::time_binner_new(
|
||||
item,
|
||||
self.binrange.clone(),
|
||||
self.do_time_weight,
|
||||
self.emit_empty_bins,
|
||||
)));
|
||||
}
|
||||
self.binner
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.as_mut()
|
||||
.ingest(item.as_time_binnable_mut())
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.set_range_complete()
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
if let Some(k) = self.binner.as_ref() {
|
||||
k.bins_ready_count()
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Self::Output> {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.bins_ready()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.push_in_progress(push_empty)
|
||||
}
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
if let Some(k) = self.binner.as_mut() {
|
||||
k.cycle()
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(&self) -> Option<Self::Output> {
|
||||
if let Some(k) = self.binner.as_ref() {
|
||||
Some(k.empty())
|
||||
} else {
|
||||
warn!("TimeBinnerDynStruct::empty called with binner None");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
impl TimeBinner for TimeBinnerDynStruct2 {
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnableTy for Box<dyn TimeBinnable> {
|
||||
type TimeBinner = TimeBinnerDynStruct;
|
||||
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Self::TimeBinner {
|
||||
let binner = self
|
||||
.as_ref()
|
||||
.time_binner_new(binrange.clone(), do_time_weight, emit_empty_bins);
|
||||
TimeBinnerDynStruct::new(binrange, do_time_weight, emit_empty_bins, binner)
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnableTy for Box<dyn TimeBinned> {
|
||||
type TimeBinner = TimeBinnerDynStruct2;
|
||||
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Self::TimeBinner {
|
||||
let binner = self
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange.clone(), do_time_weight, emit_empty_bins);
|
||||
TimeBinnerDynStruct2::new(binrange, do_time_weight, emit_empty_bins, binner)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TimeBinnerIngest: fmt::Debug + TypeName + Send {
|
||||
fn ingest_inrange(&mut self, item: &mut dyn TimeBinnable) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ use crate::collect_s::CollectedDyn;
|
||||
use crate::streamitem::RangeCompletableItem;
|
||||
use crate::streamitem::Sitemty;
|
||||
use crate::streamitem::StreamItem;
|
||||
use crate::timebin::TimeBinnable;
|
||||
use crate::Events;
|
||||
use err::Error;
|
||||
use futures_util::stream;
|
||||
@@ -16,11 +15,6 @@ use std::task::Poll;
|
||||
|
||||
pub trait EventStreamTrait: Stream<Item = Sitemty<Box<dyn Events>>> + WithTransformProperties + Send {}
|
||||
|
||||
pub trait TimeBinnableStreamTrait:
|
||||
Stream<Item = Sitemty<Box<dyn TimeBinnable>>> + WithTransformProperties + Send
|
||||
{
|
||||
}
|
||||
|
||||
pub trait CollectableStreamTrait:
|
||||
Stream<Item = Sitemty<Box<dyn CollectableDyn>>> + WithTransformProperties + Send
|
||||
{
|
||||
@@ -137,24 +131,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TimeBinnableStreamBox(pub Pin<Box<dyn TimeBinnableStreamTrait>>);
|
||||
|
||||
impl WithTransformProperties for TimeBinnableStreamBox {
|
||||
fn query_transform_properties(&self) -> TransformProperties {
|
||||
self.0.query_transform_properties()
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for TimeBinnableStreamBox {
|
||||
type Item = <dyn TimeBinnableStreamTrait as Stream>::Item;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
|
||||
self.0.poll_next_unpin(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnableStreamTrait for TimeBinnableStreamBox {}
|
||||
|
||||
pub struct CollectableStreamBox(pub Pin<Box<dyn CollectableStreamTrait>>);
|
||||
|
||||
impl Stream for CollectableStreamBox {
|
||||
|
||||
@@ -1,258 +1 @@
|
||||
use crate::channelevents::ChannelEvents;
|
||||
use crate::empty::empty_events_dyn_ev;
|
||||
use crate::ChannelEventsInput;
|
||||
use crate::Error;
|
||||
use futures_util::Future;
|
||||
use futures_util::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use items_0::collect_s::CollectedDyn;
|
||||
use items_0::collect_s::CollectorDyn;
|
||||
use items_0::collect_s::ToJsonResult;
|
||||
use items_0::streamitem::RangeCompletableItem;
|
||||
use items_0::streamitem::Sitemty;
|
||||
use items_0::streamitem::StreamItem;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::transform::EventTransform;
|
||||
use netpod::log::*;
|
||||
use netpod::BinnedRange;
|
||||
use netpod::BinnedRangeEnum;
|
||||
use netpod::ScalarType;
|
||||
use netpod::Shape;
|
||||
use std::pin::Pin;
|
||||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
fn flush_binned(
|
||||
binner: &mut Box<dyn TimeBinner>,
|
||||
coll: &mut Option<Box<dyn CollectorDyn>>,
|
||||
force: bool,
|
||||
) -> Result<(), Error> {
|
||||
trace!("flush_binned bins_ready_count: {}", binner.bins_ready_count());
|
||||
if force {
|
||||
if binner.bins_ready_count() == 0 {
|
||||
debug!("cycle the binner forced");
|
||||
binner.cycle();
|
||||
} else {
|
||||
debug!("bins ready, do not force");
|
||||
}
|
||||
}
|
||||
if binner.bins_ready_count() > 0 {
|
||||
let ready = binner.bins_ready();
|
||||
match ready {
|
||||
Some(mut ready) => {
|
||||
trace!("binned_collected ready {ready:?}");
|
||||
if coll.is_none() {
|
||||
*coll = Some(ready.as_collectable_mut().new_collector());
|
||||
}
|
||||
let cl = coll.as_mut().unwrap();
|
||||
cl.ingest(ready.as_collectable_mut());
|
||||
Ok(())
|
||||
}
|
||||
None => Err(format!("bins_ready_count but no result").into()),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BinnedCollectedResult {
|
||||
pub range_final: bool,
|
||||
pub did_timeout: bool,
|
||||
pub result: Box<dyn CollectedDyn>,
|
||||
}
|
||||
|
||||
fn _old_binned_collected(
|
||||
scalar_type: ScalarType,
|
||||
shape: Shape,
|
||||
binrange: BinnedRangeEnum,
|
||||
transformer: &dyn EventTransform,
|
||||
deadline: Instant,
|
||||
inp: Pin<Box<dyn Stream<Item = Sitemty<ChannelEvents>> + Send>>,
|
||||
) -> Result<BinnedCollectedResult, Error> {
|
||||
event!(Level::TRACE, "binned_collected");
|
||||
let transprops = transformer.query_transform_properties();
|
||||
// TODO use a trait to allow check of unfinished data [hcn2956jxhwsf]
|
||||
// TODO implement continue-at [hcn2956jxhwsf]
|
||||
// TODO maybe TimeBinner should take all ChannelEvents and handle this?
|
||||
let empty_item = empty_events_dyn_ev(&scalar_type, &shape)?;
|
||||
let tmp_item = Ok(StreamItem::DataItem(RangeCompletableItem::Data(ChannelEvents::Events(
|
||||
empty_item,
|
||||
))));
|
||||
let empty_stream = futures_util::stream::once(futures_util::future::ready(tmp_item));
|
||||
let mut stream = empty_stream.chain(inp);
|
||||
todo!()
|
||||
}
|
||||
|
||||
enum BinnedCollectedState {
|
||||
Init,
|
||||
Run,
|
||||
Done,
|
||||
}
|
||||
|
||||
pub struct BinnedCollected {
|
||||
state: BinnedCollectedState,
|
||||
binrange: BinnedRangeEnum,
|
||||
scalar_type: ScalarType,
|
||||
shape: Shape,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
did_timeout: bool,
|
||||
range_final: bool,
|
||||
coll: Option<Box<dyn CollectorDyn>>,
|
||||
binner: Option<Box<dyn TimeBinner>>,
|
||||
inp: Pin<Box<dyn ChannelEventsInput>>,
|
||||
}
|
||||
|
||||
impl BinnedCollected {
|
||||
const fn self_name() -> &'static str {
|
||||
"BinnedCollected"
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
binrange: BinnedRangeEnum,
|
||||
scalar_type: ScalarType,
|
||||
shape: Shape,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
//transformer: &dyn Transformer,
|
||||
deadline: Instant,
|
||||
inp: Pin<Box<dyn ChannelEventsInput>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
state: BinnedCollectedState::Init,
|
||||
binrange,
|
||||
scalar_type,
|
||||
shape,
|
||||
do_time_weight,
|
||||
emit_empty_bins,
|
||||
did_timeout: false,
|
||||
range_final: false,
|
||||
coll: None,
|
||||
binner: None,
|
||||
inp,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_item(&mut self, item: StreamItem<RangeCompletableItem<ChannelEvents>>) -> Result<(), Error> {
|
||||
match item {
|
||||
StreamItem::DataItem(k) => match k {
|
||||
RangeCompletableItem::RangeComplete => {
|
||||
self.range_final = true;
|
||||
}
|
||||
RangeCompletableItem::Data(k) => match k {
|
||||
ChannelEvents::Events(mut events) => {
|
||||
if self.binner.is_none() {
|
||||
let bb = events.as_time_binnable_mut().time_binner_new(
|
||||
self.binrange.clone(),
|
||||
self.do_time_weight,
|
||||
self.emit_empty_bins,
|
||||
);
|
||||
self.binner = Some(bb);
|
||||
}
|
||||
let binner = self.binner.as_mut().unwrap();
|
||||
trace!("handle_item call binner.ingest");
|
||||
binner.ingest(events.as_time_binnable_mut());
|
||||
flush_binned(binner, &mut self.coll, false)?;
|
||||
}
|
||||
ChannelEvents::Status(item) => {
|
||||
trace!("{:?}", item);
|
||||
}
|
||||
},
|
||||
},
|
||||
StreamItem::Log(item) => {
|
||||
// TODO collect also errors here?
|
||||
trace!("{:?}", item);
|
||||
}
|
||||
StreamItem::Stats(item) => {
|
||||
// TODO do something with the stats
|
||||
trace!("{:?}", item);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn result(&mut self) -> Result<BinnedCollectedResult, Error> {
|
||||
if let Some(mut binner) = self.binner.take() {
|
||||
if self.range_final {
|
||||
trace!("range_final");
|
||||
binner.set_range_complete();
|
||||
} else {
|
||||
debug!("not range_final");
|
||||
}
|
||||
if self.did_timeout {
|
||||
warn!("did_timeout");
|
||||
} else {
|
||||
trace!("not did_timeout");
|
||||
binner.cycle();
|
||||
}
|
||||
flush_binned(&mut binner, &mut self.coll, false)?;
|
||||
if self.coll.is_none() {
|
||||
debug!("force a bin");
|
||||
flush_binned(&mut binner, &mut self.coll, true)?;
|
||||
} else {
|
||||
trace!("coll is already some");
|
||||
}
|
||||
} else {
|
||||
error!("no binner, should always have one");
|
||||
}
|
||||
let result = match self.coll.take() {
|
||||
Some(mut coll) => {
|
||||
let res = coll
|
||||
.result(None, Some(self.binrange.clone()))
|
||||
.map_err(|e| format!("{e}"))?;
|
||||
res
|
||||
}
|
||||
None => {
|
||||
error!("binned_collected nothing collected");
|
||||
return Err(Error::from(format!("binned_collected nothing collected")));
|
||||
}
|
||||
};
|
||||
let ret = BinnedCollectedResult {
|
||||
range_final: self.range_final,
|
||||
did_timeout: self.did_timeout,
|
||||
result,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
|
||||
impl Future for BinnedCollected {
|
||||
type Output = Result<BinnedCollectedResult, Error>;
|
||||
|
||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
|
||||
let span = span!(Level::INFO, BinnedCollected::self_name());
|
||||
let _spg = span.enter();
|
||||
use Poll::*;
|
||||
loop {
|
||||
break match &self.state {
|
||||
BinnedCollectedState::Init => {
|
||||
self.state = BinnedCollectedState::Run;
|
||||
continue;
|
||||
}
|
||||
BinnedCollectedState::Run => match self.inp.poll_next_unpin(cx) {
|
||||
Ready(Some(Ok(item))) => match self.handle_item(item) {
|
||||
Ok(()) => continue,
|
||||
Err(e) => {
|
||||
self.state = BinnedCollectedState::Done;
|
||||
Ready(Err(e))
|
||||
}
|
||||
},
|
||||
Ready(Some(Err(e))) => {
|
||||
self.state = BinnedCollectedState::Done;
|
||||
Ready(Err(e.into()))
|
||||
}
|
||||
Ready(None) => {
|
||||
self.state = BinnedCollectedState::Done;
|
||||
Ready(self.result())
|
||||
}
|
||||
Pending => Pending,
|
||||
},
|
||||
BinnedCollectedState::Done => Ready(Err(Error::from(format!("already done")))),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
use crate::timebin::TimeBinnerCommonV0Func;
|
||||
use crate::timebin::TimeBinnerCommonV0Trait;
|
||||
use crate::ts_offs_from_abs;
|
||||
use crate::ts_offs_from_abs_with_anchor;
|
||||
use crate::IsoDateTime;
|
||||
use crate::TimeBinnableType;
|
||||
use crate::TimeBinnableTypeAggregator;
|
||||
use err::Error;
|
||||
use items_0::collect_s::CollectableDyn;
|
||||
use items_0::collect_s::CollectableType;
|
||||
@@ -15,10 +11,7 @@ use items_0::container::ByteEstimate;
|
||||
use items_0::overlap::HasTimestampDeque;
|
||||
use items_0::scalar_ops::AsPrimF32;
|
||||
use items_0::scalar_ops::ScalarOps;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinnableTy;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::timebin::TimeBinnerTy;
|
||||
use items_0::timebin::TimeBins;
|
||||
use items_0::vecpreview::VecPreview;
|
||||
@@ -326,21 +319,6 @@ impl<NTY: ScalarOps> TimeBins for BinsDim0<NTY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinnableType for BinsDim0<NTY> {
|
||||
type Output = BinsDim0<NTY>;
|
||||
type Aggregator = BinsDim0Aggregator<NTY>;
|
||||
|
||||
fn aggregator(range: SeriesRange, x_bin_count: usize, do_time_weight: bool) -> Self::Aggregator {
|
||||
let self_name = any::type_name::<Self>();
|
||||
debug!(
|
||||
"TimeBinnableType for {self_name} aggregator() range {:?} x_bin_count {} do_time_weight {}",
|
||||
range, x_bin_count, do_time_weight
|
||||
);
|
||||
//Self::Aggregator::new(range, do_time_weight)
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BinsDim0TimeBinnerTy<STY>
|
||||
where
|
||||
@@ -924,620 +902,3 @@ impl<NTY: ScalarOps> BinsDim0Aggregator<NTY> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinnableTypeAggregator for BinsDim0Aggregator<NTY> {
|
||||
type Input = BinsDim0<NTY>;
|
||||
type Output = BinsDim0<NTY>;
|
||||
|
||||
fn range(&self) -> &SeriesRange {
|
||||
&self.range
|
||||
}
|
||||
|
||||
fn ingest(&mut self, item: &Self::Input) {
|
||||
let beg = self.range.beg_u64();
|
||||
let end = self.range.end_u64();
|
||||
for ((((((&ts1, &ts2), &count), min), max), &avg), lst) in item
|
||||
.ts1s
|
||||
.iter()
|
||||
.zip(item.ts2s.iter())
|
||||
.zip(item.cnts.iter())
|
||||
.zip(item.mins.iter())
|
||||
.zip(item.maxs.iter())
|
||||
.zip(item.avgs.iter())
|
||||
.zip(item.lsts.iter())
|
||||
{
|
||||
if ts2 <= beg {
|
||||
} else if ts1 >= end {
|
||||
} else {
|
||||
if let Some((cmin, cmax, clst)) = self.minmaxlst.as_mut() {
|
||||
if min < cmin {
|
||||
*cmin = min.clone();
|
||||
}
|
||||
if max > cmax {
|
||||
*cmax = max.clone();
|
||||
}
|
||||
*clst = lst.clone();
|
||||
} else {
|
||||
self.minmaxlst = Some((min.clone(), max.clone(), lst.clone()));
|
||||
}
|
||||
self.cnt += count;
|
||||
// TODO this works only for equidistant bins edges.
|
||||
self.sumc += 1;
|
||||
self.sum += avg;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn result_reset(&mut self, range: SeriesRange) -> Self::Output {
|
||||
let ret = if let Some((min, max, lst)) = self.minmaxlst.take() {
|
||||
self.minmaxlst = Some((lst.clone(), lst.clone(), lst.clone()));
|
||||
let avg = if self.sumc > 0 {
|
||||
self.sum / self.sumc as f32
|
||||
} else {
|
||||
NTY::zero_b().as_prim_f32_b()
|
||||
};
|
||||
Self::Output {
|
||||
ts1s: [self.range.beg_u64()].into(),
|
||||
ts2s: [self.range.end_u64()].into(),
|
||||
cnts: [self.cnt].into(),
|
||||
mins: [min].into(),
|
||||
maxs: [max].into(),
|
||||
avgs: [avg].into(),
|
||||
lsts: [lst].into(),
|
||||
// TODO
|
||||
dim0kind: None,
|
||||
}
|
||||
} else {
|
||||
if self.cnt != 0 {
|
||||
error!("result_reset non-zero cnt but no minmaxlst");
|
||||
}
|
||||
warn!("result_reset missing minmaxlst");
|
||||
Self::Output::empty()
|
||||
};
|
||||
self.range = range;
|
||||
self.cnt = 0;
|
||||
self.sumc = 0;
|
||||
self.sum = 0.;
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! try_to_container_bins {
|
||||
($sty:ty, $avgty:ty, $this:expr) => {
|
||||
let this = $this;
|
||||
if let Some(bins) = this.as_any_ref().downcast_ref::<BinsDim0<$sty>>() {
|
||||
let ts1s = bins.ts1s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let ts2s = bins.ts2s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let cnts = bins.cnts.iter().map(|&x| x).collect();
|
||||
let mins = bins.mins.iter().map(|&x| x).collect();
|
||||
let maxs = bins.maxs.iter().map(|&x| x).collect();
|
||||
let avgs = bins.avgs.iter().map(|&x| x as $avgty).collect();
|
||||
let lsts = bins.lsts.iter().map(|&x| x).collect();
|
||||
let fnls = bins.ts1s.iter().map(|_| true).collect();
|
||||
let dst = crate::binning::container_bins::ContainerBins::<$sty>::from_constituents(
|
||||
ts1s, ts2s, cnts, mins, maxs, avgs, lsts, fnls,
|
||||
);
|
||||
return Box::new(dst);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinnable for BinsDim0<NTY> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
// TODO get rid of unwrap
|
||||
// TODO respect emit_empty_bins
|
||||
let ret = BinsDim0TimeBinner::<NTY>::new(binrange, do_time_weight).unwrap();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
let k = serde_json::to_value(self).unwrap();
|
||||
Box::new(k) as _
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
let this = self;
|
||||
|
||||
try_to_container_bins!(u8, f64, self);
|
||||
try_to_container_bins!(u16, f64, self);
|
||||
try_to_container_bins!(u32, f64, self);
|
||||
try_to_container_bins!(u64, f64, self);
|
||||
try_to_container_bins!(i8, f64, self);
|
||||
try_to_container_bins!(i16, f64, self);
|
||||
try_to_container_bins!(i32, f64, self);
|
||||
try_to_container_bins!(i64, f64, self);
|
||||
try_to_container_bins!(f32, f32, self);
|
||||
try_to_container_bins!(f64, f64, self);
|
||||
|
||||
if let Some(bins) = this.as_any_ref().downcast_ref::<BinsDim0<bool>>() {
|
||||
let ts1s = bins.ts1s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let ts2s = bins.ts2s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let cnts = bins.cnts.iter().map(|&x| x).collect();
|
||||
let mins = bins.mins.iter().map(|x| x.clone()).collect();
|
||||
let maxs = bins.maxs.iter().map(|x| x.clone()).collect();
|
||||
let avgs = bins.avgs.iter().map(|&x| x as f64).collect();
|
||||
let lsts = bins.lsts.iter().map(|&x| x).collect();
|
||||
let fnls = bins.ts1s.iter().map(|_| true).collect();
|
||||
let dst = crate::binning::container_bins::ContainerBins::<bool>::from_constituents(
|
||||
ts1s, ts2s, cnts, mins, maxs, avgs, lsts, fnls,
|
||||
);
|
||||
return Box::new(dst);
|
||||
}
|
||||
|
||||
if let Some(bins) = this.as_any_ref().downcast_ref::<BinsDim0<String>>() {
|
||||
let ts1s = bins.ts1s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let ts2s = bins.ts2s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let cnts = bins.cnts.iter().map(|&x| x).collect();
|
||||
let mins = bins.mins.iter().map(|x| x.clone()).collect();
|
||||
let maxs = bins.maxs.iter().map(|x| x.clone()).collect();
|
||||
let avgs = bins.avgs.iter().map(|&x| x as f64).collect();
|
||||
let lsts = bins.lsts.iter().map(|x| x.clone()).collect();
|
||||
let fnls = bins.ts1s.iter().map(|_| true).collect();
|
||||
let dst = crate::binning::container_bins::ContainerBins::<String>::from_constituents(
|
||||
ts1s, ts2s, cnts, mins, maxs, avgs, lsts, fnls,
|
||||
);
|
||||
return Box::new(dst);
|
||||
}
|
||||
|
||||
if let Some(bins) = this.as_any_ref().downcast_ref::<BinsDim0<netpod::EnumVariant>>() {
|
||||
let ts1s = bins.ts1s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let ts2s = bins.ts2s.iter().map(|&x| TsNano::from_ns(x)).collect();
|
||||
let cnts = bins.cnts.iter().map(|&x| x).collect();
|
||||
let mins = bins.mins.iter().map(|x| x.clone()).collect();
|
||||
let maxs = bins.maxs.iter().map(|x| x.clone()).collect();
|
||||
let avgs = bins.avgs.iter().map(|&x| x).collect();
|
||||
let lsts = bins.lsts.iter().map(|x| x.clone()).collect();
|
||||
let fnls = bins.ts1s.iter().map(|_| true).collect();
|
||||
let dst = crate::binning::container_bins::ContainerBins::<netpod::EnumVariant>::from_constituents(
|
||||
ts1s, ts2s, cnts, mins, maxs, avgs, lsts, fnls,
|
||||
);
|
||||
return Box::new(dst);
|
||||
}
|
||||
|
||||
let styn = any::type_name::<NTY>();
|
||||
todo!("TODO impl for {styn}");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BinsDim0TimeBinner<NTY: ScalarOps> {
|
||||
binrange: BinnedRangeEnum,
|
||||
rix: usize,
|
||||
rng: Option<SeriesRange>,
|
||||
agg: BinsDim0Aggregator<NTY>,
|
||||
ready: Option<<BinsDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Output>,
|
||||
range_final: bool,
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> BinsDim0TimeBinner<NTY> {
|
||||
fn type_name() -> &'static str {
|
||||
any::type_name::<Self>()
|
||||
}
|
||||
|
||||
fn new(binrange: BinnedRangeEnum, do_time_weight: bool) -> Result<Self, Error> {
|
||||
let rng = binrange
|
||||
.range_at(0)
|
||||
.ok_or_else(|| Error::with_msg_no_trace("empty binrange"))?;
|
||||
let agg = BinsDim0Aggregator::new(rng, do_time_weight);
|
||||
let ret = Self {
|
||||
binrange,
|
||||
rix: 0,
|
||||
rng: Some(agg.range().clone()),
|
||||
agg,
|
||||
ready: None,
|
||||
range_final: false,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
fn next_bin_range(&mut self) -> Option<SeriesRange> {
|
||||
self.rix += 1;
|
||||
if let Some(rng) = self.binrange.range_at(self.rix) {
|
||||
trace!("{} next_bin_range {:?}", Self::type_name(), rng);
|
||||
Some(rng)
|
||||
} else {
|
||||
trace!("{} next_bin_range None", Self::type_name());
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinnerCommonV0Trait for BinsDim0TimeBinner<STY> {
|
||||
type Input = <BinsDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Input;
|
||||
type Output = <BinsDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Output;
|
||||
|
||||
fn type_name() -> &'static str {
|
||||
Self::type_name()
|
||||
}
|
||||
|
||||
fn common_bins_ready_count(&self) -> usize {
|
||||
match &self.ready {
|
||||
Some(k) => k.len(),
|
||||
None => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn common_range_current(&self) -> &SeriesRange {
|
||||
self.agg.range()
|
||||
}
|
||||
|
||||
fn common_has_more_range(&self) -> bool {
|
||||
self.rng.is_some()
|
||||
}
|
||||
|
||||
fn common_next_bin_range(&mut self) -> Option<SeriesRange> {
|
||||
self.next_bin_range()
|
||||
}
|
||||
|
||||
fn common_set_current_range(&mut self, range: Option<SeriesRange>) {
|
||||
self.rng = range;
|
||||
}
|
||||
fn common_take_or_append_all_from(&mut self, item: Self::Output) {
|
||||
let mut item = item;
|
||||
match self.ready.as_mut() {
|
||||
Some(ready) => {
|
||||
ready.append_all_from(&mut item);
|
||||
}
|
||||
None => {
|
||||
self.ready = Some(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn common_result_reset(&mut self, range: Option<SeriesRange>) -> Self::Output {
|
||||
// TODO maybe better to wrap the aggregator in Option and remove the whole thing when no more bins?
|
||||
self.agg.result_reset(range.unwrap_or_else(|| {
|
||||
SeriesRange::TimeRange(netpod::range::evrange::NanoRange {
|
||||
beg: u64::MAX,
|
||||
end: u64::MAX,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
fn common_agg_ingest(&mut self, item: &mut Self::Input) {
|
||||
self.agg.ingest(item)
|
||||
}
|
||||
|
||||
fn common_has_lst(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn common_feed_lst(&mut self, item: &mut Self::Input) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinner for BinsDim0TimeBinner<NTY> {
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
TimeBinnerCommonV0Trait::common_bins_ready_count(self)
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
match self.ready.take() {
|
||||
Some(k) => Some(Box::new(k)),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable) {
|
||||
panic!("TODO do not use")
|
||||
}
|
||||
|
||||
// TODO there is too much common code between implementors:
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
panic!("TODO do not use")
|
||||
}
|
||||
|
||||
// TODO there is too much common code between implementors:
|
||||
fn cycle(&mut self) {
|
||||
panic!("TODO do not use")
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
self.range_final = true;
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
let ret = <BinsDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Output::empty();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
while self.common_has_more_range() {
|
||||
TimeBinnerCommonV0Func::push_in_progress(self, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinned for BinsDim0<NTY> {
|
||||
fn clone_box_time_binned(&self) -> Box<dyn TimeBinned> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn edges_slice(&self) -> (&[u64], &[u64]) {
|
||||
if self.ts1s.as_slices().1.len() != 0 {
|
||||
panic!();
|
||||
}
|
||||
if self.ts2s.as_slices().1.len() != 0 {
|
||||
panic!();
|
||||
}
|
||||
(&self.ts1s.as_slices().0, &self.ts2s.as_slices().0)
|
||||
}
|
||||
|
||||
fn counts(&self) -> &[u64] {
|
||||
// TODO check for contiguous
|
||||
self.cnts.as_slices().0
|
||||
}
|
||||
|
||||
// TODO is Vec needed?
|
||||
fn mins(&self) -> Vec<f32> {
|
||||
self.mins.iter().map(|x| x.clone().as_prim_f32_b()).collect()
|
||||
}
|
||||
|
||||
// TODO is Vec needed?
|
||||
fn maxs(&self) -> Vec<f32> {
|
||||
self.maxs.iter().map(|x| x.clone().as_prim_f32_b()).collect()
|
||||
}
|
||||
|
||||
// TODO is Vec needed?
|
||||
fn avgs(&self) -> Vec<f32> {
|
||||
self.avgs.iter().map(Clone::clone).collect()
|
||||
}
|
||||
|
||||
fn validate(&self) -> Result<(), String> {
|
||||
use fmt::Write;
|
||||
let mut msg = String::new();
|
||||
if self.ts1s.len() != self.ts2s.len() {
|
||||
write!(&mut msg, "ts1s ≠ ts2s\n").unwrap();
|
||||
}
|
||||
for (i, ((count, min), max)) in self.cnts.iter().zip(&self.mins).zip(&self.maxs).enumerate() {
|
||||
if min.as_prim_f32_b() < 1. && *count != 0 {
|
||||
write!(&mut msg, "i {} count {} min {:?} max {:?}\n", i, count, min, max).unwrap();
|
||||
}
|
||||
}
|
||||
if msg.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(msg)
|
||||
}
|
||||
}
|
||||
|
||||
fn as_collectable_mut(&mut self) -> &mut dyn CollectableDyn {
|
||||
self
|
||||
}
|
||||
|
||||
fn empty_like_self_box_time_binned(&self) -> Box<dyn TimeBinned> {
|
||||
Box::new(Self::empty())
|
||||
}
|
||||
|
||||
fn to_simple_bins_f32(&mut self) -> Box<dyn TimeBinned> {
|
||||
use mem::replace;
|
||||
let ret = BinsDim0::<f32> {
|
||||
ts1s: replace(&mut self.ts1s, VecDeque::new()),
|
||||
ts2s: replace(&mut self.ts2s, VecDeque::new()),
|
||||
cnts: replace(&mut self.cnts, VecDeque::new()),
|
||||
mins: self.mins.iter().map(AsPrimF32::as_prim_f32_b).collect(),
|
||||
maxs: self.maxs.iter().map(AsPrimF32::as_prim_f32_b).collect(),
|
||||
avgs: replace(&mut self.avgs, VecDeque::new()),
|
||||
lsts: self.lsts.iter().map(AsPrimF32::as_prim_f32_b).collect(),
|
||||
dim0kind: None,
|
||||
};
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn drain_into_tb(&mut self, dst: &mut dyn TimeBinned, range: Range<usize>) -> Result<(), Error> {
|
||||
// TODO as_any and as_any_mut are declared on unrelated traits. Simplify.
|
||||
if let Some(dst) = dst.as_any_mut().downcast_mut::<Self>() {
|
||||
// TODO make it harder to forget new members when the struct may get modified in the future
|
||||
dst.ts1s.extend(self.ts1s.drain(range.clone()));
|
||||
dst.ts2s.extend(self.ts2s.drain(range.clone()));
|
||||
dst.cnts.extend(self.cnts.drain(range.clone()));
|
||||
dst.mins.extend(self.mins.drain(range.clone()));
|
||||
dst.maxs.extend(self.maxs.drain(range.clone()));
|
||||
dst.avgs.extend(self.avgs.drain(range.clone()));
|
||||
dst.lsts.extend(self.lsts.drain(range.clone()));
|
||||
Ok(())
|
||||
} else {
|
||||
let type_name = any::type_name::<Self>();
|
||||
error!("downcast to {} FAILED", type_name);
|
||||
Err(Error::with_msg_no_trace(format!("downcast to {} FAILED", type_name)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_fill_empty_00() {
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 5,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
binner.append_empty_until_end();
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// for i in 0..5 {
|
||||
// exp.push(SEC * 2 * (9 + i), SEC * 2 * (10 + i), 0, 0, 0, 0., None);
|
||||
// }
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_fill_empty_01() {
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 5,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
binner.push_in_progress(true);
|
||||
binner.append_empty_until_end();
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// for i in 0..5 {
|
||||
// exp.push(SEC * 2 * (9 + i), SEC * 2 * (10 + i), 0, 0, 0, 0., None);
|
||||
// }
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_push_empty_00() {
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 5,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
binner.push_in_progress(true);
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// for i in 0..1 {
|
||||
// exp.push(SEC * 2 * (9 + i), SEC * 2 * (10 + i), 0, 0, 0, 0., None);
|
||||
// }
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_push_empty_01() {
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 5,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
binner.push_in_progress(true);
|
||||
binner.push_in_progress(true);
|
||||
binner.push_in_progress(true);
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// for i in 0..3 {
|
||||
// exp.push(SEC * 2 * (9 + i), SEC * 2 * (10 + i), 0, 0, 0, 0., None);
|
||||
// }
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_ingest_only_before() {
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
bins.push(SEC * 2, SEC * 4, 3, 7, 9, 8.1, 8);
|
||||
bins.push(SEC * 4, SEC * 6, 3, 6, 9, 8.2, 8);
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 5,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
binner.push_in_progress(true);
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// exp.push(SEC * 18, SEC * 20, 0, 0, 0, 0., None);
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_ingest_00() {
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
bins.push(SEC * 20, SEC * 21, 3, 70, 94, 82., 80);
|
||||
bins.push(SEC * 21, SEC * 22, 5, 71, 93, 86., 81);
|
||||
bins.push(SEC * 23, SEC * 24, 6, 72, 92, 81., 82);
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 5,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
binner.push_in_progress(true);
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// exp.push(SEC * 18, SEC * 20, 0, 0, 0, 0., None);
|
||||
exp.push(SEC * 20, SEC * 22, 8, 70, 94, 84., 82);
|
||||
exp.push(SEC * 22, SEC * 24, 6, 72, 92, 81., 91);
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bins_timebin_ingest_continuous_00() {
|
||||
let binrange = BinnedRangeEnum::Time(BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 20,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut bins = BinsDim0::<u32>::empty();
|
||||
bins.push(SEC * 20, SEC * 21, 3, 70, 94, 82., 80);
|
||||
//bins.push(SEC * 21, SEC * 22, 5, 71, 93, 86.);
|
||||
//bins.push(SEC * 23, SEC * 24, 6, 72, 92, 81.);
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
//binner.push_in_progress(true);
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// Currently bins without lst can not exist.
|
||||
// exp.push(SEC * 18, SEC * 20, 0, 0, 0, 0., None);
|
||||
exp.push(SEC * 20, SEC * 22, 8, 70, 94, 84., 82);
|
||||
exp.push(SEC * 22, SEC * 24, 6, 72, 92, 81., 91);
|
||||
assert_eq!(got, &exp);
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use crate::ts_offs_from_abs;
|
||||
use crate::ts_offs_from_abs_with_anchor;
|
||||
use crate::IsoDateTime;
|
||||
use crate::TimeBinnableType;
|
||||
use crate::TimeBinnableTypeAggregator;
|
||||
use err::Error;
|
||||
use items_0::collect_s::CollectableDyn;
|
||||
use items_0::collect_s::CollectableType;
|
||||
@@ -12,9 +10,6 @@ use items_0::collect_s::ToJsonResult;
|
||||
use items_0::container::ByteEstimate;
|
||||
use items_0::scalar_ops::AsPrimF32;
|
||||
use items_0::scalar_ops::ScalarOps;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::timebin::TimeBins;
|
||||
use items_0::AppendEmptyBin;
|
||||
use items_0::AsAnyMut;
|
||||
@@ -262,21 +257,6 @@ impl<NTY: ScalarOps> TimeBins for BinsXbinDim0<NTY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinnableType for BinsXbinDim0<NTY> {
|
||||
type Output = BinsXbinDim0<NTY>;
|
||||
type Aggregator = BinsXbinDim0Aggregator<NTY>;
|
||||
|
||||
fn aggregator(range: SeriesRange, x_bin_count: usize, do_time_weight: bool) -> Self::Aggregator {
|
||||
/*let self_name = any::type_name::<Self>();
|
||||
debug!(
|
||||
"TimeBinnableType for {self_name} aggregator() range {:?} x_bin_count {} do_time_weight {}",
|
||||
range, x_bin_count, do_time_weight
|
||||
);
|
||||
Self::Aggregator::new(range, do_time_weight)*/
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO rename to BinsDim0CollectorOutput
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct BinsXbinDim0CollectedResult<NTY> {
|
||||
@@ -540,365 +520,3 @@ impl<NTY: ScalarOps> BinsXbinDim0Aggregator<NTY> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinnableTypeAggregator for BinsXbinDim0Aggregator<NTY> {
|
||||
type Input = BinsXbinDim0<NTY>;
|
||||
type Output = BinsXbinDim0<NTY>;
|
||||
|
||||
fn range(&self) -> &SeriesRange {
|
||||
&self.range
|
||||
}
|
||||
|
||||
fn ingest(&mut self, item: &Self::Input) {
|
||||
/*for i1 in 0..item.ts1s.len() {
|
||||
if item.counts[i1] == 0 {
|
||||
} else if item.ts2s[i1] <= self.range.beg {
|
||||
} else if item.ts1s[i1] >= self.range.end {
|
||||
} else {
|
||||
if self.count == 0 {
|
||||
self.min = item.mins[i1].clone();
|
||||
self.max = item.maxs[i1].clone();
|
||||
} else {
|
||||
if self.min > item.mins[i1] {
|
||||
self.min = item.mins[i1].clone();
|
||||
}
|
||||
if self.max < item.maxs[i1] {
|
||||
self.max = item.maxs[i1].clone();
|
||||
}
|
||||
}
|
||||
self.count += item.counts[i1];
|
||||
self.sum += item.avgs[i1];
|
||||
self.sumc += 1;
|
||||
}
|
||||
}*/
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn result_reset(&mut self, range: SeriesRange) -> Self::Output {
|
||||
/*if self.sumc > 0 {
|
||||
self.avg = self.sum / self.sumc as f32;
|
||||
}
|
||||
let ret = Self::Output {
|
||||
ts1s: [self.range.beg].into(),
|
||||
ts2s: [self.range.end].into(),
|
||||
counts: [self.count].into(),
|
||||
mins: [self.min.clone()].into(),
|
||||
maxs: [self.max.clone()].into(),
|
||||
avgs: [self.avg].into(),
|
||||
};
|
||||
self.range = range;
|
||||
self.count = 0;
|
||||
self.sum = 0f32;
|
||||
self.sumc = 0;
|
||||
ret*/
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinnable for BinsXbinDim0<NTY> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
// TODO respect emit_empty_bins
|
||||
let ret = BinsXbinDim0TimeBinner::<NTY>::new(binrange, do_time_weight);
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
let k = serde_json::to_value(self).unwrap();
|
||||
Box::new(k)
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
panic!("not supported, remove")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BinsXbinDim0TimeBinner<NTY: ScalarOps> {
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
agg: Option<BinsXbinDim0Aggregator<NTY>>,
|
||||
ready: Option<<BinsXbinDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Output>,
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> BinsXbinDim0TimeBinner<NTY> {
|
||||
fn new(binrange: BinnedRangeEnum, do_time_weight: bool) -> Self {
|
||||
Self {
|
||||
binrange,
|
||||
do_time_weight,
|
||||
agg: None,
|
||||
ready: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn next_bin_range(&mut self) -> Option<NanoRange> {
|
||||
/*if self.edges.len() >= 2 {
|
||||
let ret = NanoRange {
|
||||
beg: self.edges[0],
|
||||
end: self.edges[1],
|
||||
};
|
||||
self.edges.pop_front();
|
||||
Some(ret)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
*/
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinner for BinsXbinDim0TimeBinner<NTY> {
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable) {
|
||||
/*let self_name = std::any::type_name::<Self>();
|
||||
if item.len() == 0 {
|
||||
// Return already here, RangeOverlapInfo would not give much sense.
|
||||
return;
|
||||
}
|
||||
if self.edges.len() < 2 {
|
||||
warn!("TimeBinnerDyn for {self_name} no more bin in edges A\n{:?}\n\n", item);
|
||||
return;
|
||||
}
|
||||
// TODO optimize by remembering at which event array index we have arrived.
|
||||
// That needs modified interfaces which can take and yield the start and latest index.
|
||||
loop {
|
||||
while item.starts_after(NanoRange {
|
||||
beg: 0,
|
||||
end: self.edges[1],
|
||||
}) {
|
||||
self.cycle();
|
||||
if self.edges.len() < 2 {
|
||||
warn!("TimeBinnerDyn for {self_name} no more bin in edges B\n{:?}\n\n", item);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if item.ends_before(NanoRange {
|
||||
beg: self.edges[0],
|
||||
end: u64::MAX,
|
||||
}) {
|
||||
return;
|
||||
} else {
|
||||
if self.edges.len() < 2 {
|
||||
warn!("TimeBinnerDyn for {self_name} edge list exhausted");
|
||||
return;
|
||||
} else {
|
||||
let agg = if let Some(agg) = self.agg.as_mut() {
|
||||
agg
|
||||
} else {
|
||||
self.agg = Some(BinsXbinDim0Aggregator::new(
|
||||
// We know here that we have enough edges for another bin.
|
||||
// and `next_bin_range` will pop the first edge.
|
||||
self.next_bin_range().unwrap(),
|
||||
self.do_time_weight,
|
||||
));
|
||||
self.agg.as_mut().unwrap()
|
||||
};
|
||||
if let Some(item) = item
|
||||
.as_any_ref()
|
||||
// TODO make statically sure that we attempt to cast to the correct type here:
|
||||
.downcast_ref::<<BinsXbinDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Input>()
|
||||
{
|
||||
agg.ingest(item);
|
||||
} else {
|
||||
let tyid_item = std::any::Any::type_id(item.as_any_ref());
|
||||
error!("not correct item type {:?}", tyid_item);
|
||||
};
|
||||
if item.ends_after(agg.range().clone()) {
|
||||
self.cycle();
|
||||
if self.edges.len() < 2 {
|
||||
warn!("TimeBinnerDyn for {self_name} no more bin in edges C\n{:?}\n\n", item);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}*/
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
match &self.ready {
|
||||
Some(k) => k.len(),
|
||||
None => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
match self.ready.take() {
|
||||
Some(k) => Some(Box::new(k)),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO there is too much common code between implementors:
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
// TODO expand should be derived from AggKind. Is it still required after all?
|
||||
/*let expand = true;
|
||||
if let Some(agg) = self.agg.as_mut() {
|
||||
let dummy_range = NanoRange { beg: 4, end: 5 };
|
||||
let mut bins = agg.result_reset(dummy_range, expand);
|
||||
self.agg = None;
|
||||
assert_eq!(bins.len(), 1);
|
||||
if push_empty || bins.counts[0] != 0 {
|
||||
match self.ready.as_mut() {
|
||||
Some(ready) => {
|
||||
ready.append_all_from(&mut bins);
|
||||
}
|
||||
None => {
|
||||
self.ready = Some(bins);
|
||||
}
|
||||
}
|
||||
}
|
||||
}*/
|
||||
todo!()
|
||||
}
|
||||
|
||||
// TODO there is too much common code between implementors:
|
||||
fn cycle(&mut self) {
|
||||
let n = self.bins_ready_count();
|
||||
self.push_in_progress(true);
|
||||
if self.bins_ready_count() == n {
|
||||
if let Some(range) = self.next_bin_range() {
|
||||
let mut bins = BinsXbinDim0::<NTY>::empty();
|
||||
bins.append_zero(range.beg, range.end);
|
||||
match self.ready.as_mut() {
|
||||
Some(ready) => {
|
||||
ready.append_all_from(&mut bins);
|
||||
}
|
||||
None => {
|
||||
self.ready = Some(bins);
|
||||
}
|
||||
}
|
||||
if self.bins_ready_count() <= n {
|
||||
error!("failed to push a zero bin");
|
||||
}
|
||||
} else {
|
||||
warn!("cycle: no in-progress bin pushed, but also no more bin to add as zero-bin");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
let ret = <BinsXbinDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Output::empty();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
// TODO
|
||||
todo!();
|
||||
/*while self.rng.is_some() {
|
||||
TimeBinnerCommonV0Func::push_in_progress(self, true);
|
||||
}*/
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> TimeBinned for BinsXbinDim0<NTY> {
|
||||
fn clone_box_time_binned(&self) -> Box<dyn TimeBinned> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn edges_slice(&self) -> (&[u64], &[u64]) {
|
||||
if self.ts1s.as_slices().1.len() != 0 {
|
||||
panic!();
|
||||
}
|
||||
if self.ts2s.as_slices().1.len() != 0 {
|
||||
panic!();
|
||||
}
|
||||
(&self.ts1s.as_slices().0, &self.ts2s.as_slices().0)
|
||||
}
|
||||
|
||||
fn counts(&self) -> &[u64] {
|
||||
// TODO check for contiguous
|
||||
self.counts.as_slices().0
|
||||
}
|
||||
|
||||
// TODO is Vec needed?
|
||||
fn mins(&self) -> Vec<f32> {
|
||||
self.mins.iter().map(|x| x.clone().as_prim_f32_b()).collect()
|
||||
}
|
||||
|
||||
// TODO is Vec needed?
|
||||
fn maxs(&self) -> Vec<f32> {
|
||||
self.maxs.iter().map(|x| x.clone().as_prim_f32_b()).collect()
|
||||
}
|
||||
|
||||
// TODO is Vec needed?
|
||||
fn avgs(&self) -> Vec<f32> {
|
||||
self.avgs.iter().map(Clone::clone).collect()
|
||||
}
|
||||
|
||||
fn validate(&self) -> Result<(), String> {
|
||||
use std::fmt::Write;
|
||||
let mut msg = String::new();
|
||||
if self.ts1s.len() != self.ts2s.len() {
|
||||
write!(&mut msg, "ts1s ≠ ts2s\n").unwrap();
|
||||
}
|
||||
for (i, ((count, min), max)) in self.counts.iter().zip(&self.mins).zip(&self.maxs).enumerate() {
|
||||
if min.as_prim_f32_b() < 1. && *count != 0 {
|
||||
write!(&mut msg, "i {} count {} min {:?} max {:?}\n", i, count, min, max).unwrap();
|
||||
}
|
||||
}
|
||||
if msg.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(msg)
|
||||
}
|
||||
}
|
||||
|
||||
fn as_collectable_mut(&mut self) -> &mut dyn CollectableDyn {
|
||||
self
|
||||
}
|
||||
|
||||
fn empty_like_self_box_time_binned(&self) -> Box<dyn TimeBinned> {
|
||||
Box::new(Self::empty())
|
||||
}
|
||||
|
||||
fn to_simple_bins_f32(&mut self) -> Box<dyn TimeBinned> {
|
||||
use mem::replace;
|
||||
let ret = super::binsdim0::BinsDim0::<f32> {
|
||||
ts1s: replace(&mut self.ts1s, VecDeque::new()),
|
||||
ts2s: replace(&mut self.ts2s, VecDeque::new()),
|
||||
cnts: replace(&mut self.counts, VecDeque::new()),
|
||||
mins: self.mins.iter().map(AsPrimF32::as_prim_f32_b).collect(),
|
||||
maxs: self.maxs.iter().map(AsPrimF32::as_prim_f32_b).collect(),
|
||||
avgs: replace(&mut self.avgs, VecDeque::new()),
|
||||
lsts: err::todoval(),
|
||||
dim0kind: None,
|
||||
};
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn drain_into_tb(&mut self, dst: &mut dyn TimeBinned, range: Range<usize>) -> Result<(), Error> {
|
||||
// TODO as_any and as_any_mut are declared on unrelated traits. Simplify.
|
||||
if let Some(dst) = dst.as_any_mut().downcast_mut::<Self>() {
|
||||
// TODO make it harder to forget new members when the struct may get modified in the future
|
||||
dst.ts1s.extend(self.ts1s.drain(range.clone()));
|
||||
dst.ts2s.extend(self.ts2s.drain(range.clone()));
|
||||
dst.counts.extend(self.counts.drain(range.clone()));
|
||||
dst.mins.extend(self.mins.drain(range.clone()));
|
||||
dst.maxs.extend(self.maxs.drain(range.clone()));
|
||||
dst.avgs.extend(self.avgs.drain(range.clone()));
|
||||
todo!("handle last_seen");
|
||||
Ok(())
|
||||
} else {
|
||||
let type_name = any::type_name::<Self>();
|
||||
error!("downcast to {} FAILED", type_name);
|
||||
Err(Error::with_msg_no_trace(format!("downcast to {} FAILED", type_name)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,7 @@ use items_0::container::ByteEstimate;
|
||||
use items_0::framable::FrameTypeInnerStatic;
|
||||
use items_0::isodate::IsoDateTime;
|
||||
use items_0::streamitem::ITEMS_2_CHANNEL_EVENTS_FRAME_TYPE_ID;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinnableTy;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::timebin::TimeBinnerTy;
|
||||
use items_0::AsAnyMut;
|
||||
use items_0::AsAnyRef;
|
||||
@@ -165,12 +162,6 @@ impl ChannelEvents {
|
||||
}
|
||||
}
|
||||
|
||||
impl items_0::IntoTimeBinnable for ChannelEvents {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
Box::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeName for ChannelEvents {
|
||||
fn type_name(&self) -> String {
|
||||
any::type_name::<Self>().into()
|
||||
@@ -826,26 +817,6 @@ impl Mergeable for ChannelEvents {
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnable for ChannelEvents {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
let ret = <ChannelEvents as TimeBinnableTy>::time_binner_new(&self, binrange, do_time_weight, emit_empty_bins);
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn items_0::collect_s::ToJsonResult> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
panic!("logic error must not get used on ChannelEvents")
|
||||
}
|
||||
}
|
||||
|
||||
impl EventsNonObj for ChannelEvents {
|
||||
fn into_tss_pulses(self: Box<Self>) -> (VecDeque<u64>, VecDeque<u64>) {
|
||||
match *self {
|
||||
@@ -856,14 +827,6 @@ impl EventsNonObj for ChannelEvents {
|
||||
}
|
||||
|
||||
impl Events for ChannelEvents {
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn verify(&self) -> bool {
|
||||
match self {
|
||||
ChannelEvents::Events(x) => Events::verify(x),
|
||||
@@ -1030,174 +993,6 @@ impl CollectableDyn for ChannelEvents {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChannelEventsTimeBinner {
|
||||
// TODO `ConnStatus` contains all the changes that can happen to a connection, but
|
||||
// here we would rather require a simplified current state for binning purpose.
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
conn_state: ConnStatus,
|
||||
binner: Option<Box<dyn TimeBinner>>,
|
||||
}
|
||||
|
||||
impl ChannelEventsTimeBinner {
|
||||
pub fn type_name() -> &'static str {
|
||||
std::any::type_name::<Self>()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for ChannelEventsTimeBinner {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt.debug_struct(Self::type_name())
|
||||
.field("binrange", &self.binrange)
|
||||
.field("do_time_weight", &self.do_time_weight)
|
||||
.field("emit_empty_bins", &self.emit_empty_bins)
|
||||
.field("conn_state", &self.conn_state)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl ChannelEventsTimeBinner {}
|
||||
|
||||
impl TimeBinnerTy for ChannelEventsTimeBinner {
|
||||
type Input = ChannelEvents;
|
||||
type Output = Box<dyn TimeBinned>;
|
||||
|
||||
fn ingest(&mut self, item: &mut Self::Input) {
|
||||
trace_ingest!("{} INGEST {:?}", Self::type_name(), item);
|
||||
match item {
|
||||
ChannelEvents::Events(item) => {
|
||||
let binner = self.binner.get_or_insert_with(|| {
|
||||
item.time_binner_new(self.binrange.clone(), self.do_time_weight, self.emit_empty_bins)
|
||||
});
|
||||
binner.ingest(item.as_time_binnable_mut())
|
||||
}
|
||||
ChannelEvents::Status(item) => {
|
||||
warn!("TODO consider channel status in time binning {item:?}");
|
||||
}
|
||||
}
|
||||
trace_ingest!("{} INGEST RETURN {:?}", Self::type_name(), item);
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
match &self.binner {
|
||||
Some(binner) => binner.bins_ready_count(),
|
||||
None => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Self::Output> {
|
||||
match self.binner.as_mut() {
|
||||
Some(binner) => binner.bins_ready(),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
match self.binner.as_mut() {
|
||||
Some(binner) => binner.push_in_progress(push_empty),
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
match self.binner.as_mut() {
|
||||
Some(binner) => binner.cycle(),
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
match self.binner.as_mut() {
|
||||
Some(binner) => binner.set_range_complete(),
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(&self) -> Option<Self::Output> {
|
||||
match self.binner.as_ref() {
|
||||
Some(binner) => Some(binner.empty()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
match self.binner.as_mut() {
|
||||
Some(binner) => binner.append_empty_until_end(),
|
||||
None => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinner for ChannelEventsTimeBinner {
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable) {
|
||||
if let Some(item) = item.as_any_mut().downcast_mut::<ChannelEvents>() {
|
||||
TimeBinnerTy::ingest(self, item)
|
||||
} else {
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
TimeBinnerTy::bins_ready_count(self)
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
TimeBinnerTy::bins_ready(self)
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
TimeBinnerTy::push_in_progress(self, push_empty)
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
TimeBinnerTy::cycle(self)
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
TimeBinnerTy::set_range_complete(self)
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
match TimeBinnerTy::empty(self) {
|
||||
Some(x) => x,
|
||||
None => panic!("TODO TimeBinner::empty for ChannelEventsTimeBinner"),
|
||||
}
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
match self.binner.as_mut() {
|
||||
Some(binner) => binner.append_empty_until_end(),
|
||||
None => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnableTy for ChannelEvents {
|
||||
type TimeBinner = ChannelEventsTimeBinner;
|
||||
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Self::TimeBinner {
|
||||
trace!("TimeBinnableTy for ChannelEvents make ChannelEventsTimeBinner");
|
||||
// TODO probably wrong?
|
||||
let (binner, status) = match self {
|
||||
ChannelEvents::Events(_events) => (None, ConnStatus::Connect),
|
||||
ChannelEvents::Status(_status) => (None, ConnStatus::Connect),
|
||||
};
|
||||
ChannelEventsTimeBinner {
|
||||
binrange,
|
||||
do_time_weight,
|
||||
emit_empty_bins,
|
||||
conn_state: status,
|
||||
binner,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO remove type
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ChannelEventsCollectorOutput {}
|
||||
|
||||
@@ -3,11 +3,7 @@ use crate::timebin::ChooseIndicesForTimeBin;
|
||||
use crate::timebin::ChooseIndicesForTimeBinEvents;
|
||||
use crate::timebin::TimeAggregatorCommonV0Func;
|
||||
use crate::timebin::TimeAggregatorCommonV0Trait;
|
||||
use crate::timebin::TimeBinnerCommonV0Func;
|
||||
use crate::timebin::TimeBinnerCommonV0Trait;
|
||||
use crate::IsoDateTime;
|
||||
use crate::TimeBinnableType;
|
||||
use crate::TimeBinnableTypeAggregator;
|
||||
use err::Error;
|
||||
use items_0::collect_s::CollectableDyn;
|
||||
use items_0::collect_s::CollectedDyn;
|
||||
@@ -16,9 +12,6 @@ use items_0::collect_s::ToJsonResult;
|
||||
use items_0::container::ByteEstimate;
|
||||
use items_0::overlap::HasTimestampDeque;
|
||||
use items_0::scalar_ops::ScalarOps;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::AppendAllFrom;
|
||||
use items_0::Appendable;
|
||||
use items_0::AsAnyMut;
|
||||
@@ -178,12 +171,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> items_0::IntoTimeBinnable for EventsDim0<STY> {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
Box::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> WithLen for EventsDim0<STY> {
|
||||
fn len(&self) -> usize {
|
||||
self.tss.len()
|
||||
@@ -243,24 +230,6 @@ impl<STY> ChooseIndicesForTimeBin for EventsDim0<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> TimeBinnableType for EventsDim0<STY>
|
||||
where
|
||||
STY: ScalarOps,
|
||||
{
|
||||
type Output = BinsDim0<STY>;
|
||||
type Aggregator = EventsDim0Aggregator<STY>;
|
||||
|
||||
fn aggregator(range: SeriesRange, x_bin_count: usize, do_time_weight: bool) -> Self::Aggregator {
|
||||
panic!("TODO remove, should no longer be used");
|
||||
let self_name = any::type_name::<Self>();
|
||||
debug!(
|
||||
"TimeBinnableType for {self_name} aggregator() range {:?} x_bin_count {} do_time_weight {}",
|
||||
range, x_bin_count, do_time_weight
|
||||
);
|
||||
Self::Aggregator::new(range, do_time_weight)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct EventsDim0ChunkOutput<STY> {
|
||||
tss: VecDeque<u64>,
|
||||
@@ -545,320 +514,6 @@ impl<STY> Drop for EventsDim0Aggregator<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeAggregatorCommonV0Trait for EventsDim0Aggregator<STY> {
|
||||
type Input = <Self as TimeBinnableTypeAggregator>::Input;
|
||||
type Output = <Self as TimeBinnableTypeAggregator>::Output;
|
||||
|
||||
fn type_name() -> &'static str {
|
||||
Self::type_name()
|
||||
}
|
||||
|
||||
fn common_range_current(&self) -> &SeriesRange {
|
||||
&self.range
|
||||
}
|
||||
|
||||
fn common_ingest_unweight_range(&mut self, item: &Self::Input, r: core::ops::Range<usize>) {
|
||||
panic!("TODO common_ingest_unweight_range");
|
||||
for (&ts, val) in item.tss.range(r.clone()).zip(item.values.range(r)) {
|
||||
self.apply_event_unweight(val.clone());
|
||||
self.count += 1;
|
||||
self.last_ts = ts;
|
||||
}
|
||||
}
|
||||
|
||||
fn common_ingest_one_before(&mut self, item: &Self::Input, j: usize) {
|
||||
trace_ingest_item!("{} common_ingest_one_before {:?} {:?}", Self::type_name(), j, item);
|
||||
self.apply_min_max_lst(item.values[j].clone());
|
||||
self.last_ts = item.tss[j];
|
||||
}
|
||||
|
||||
fn common_ingest_range(&mut self, item: &Self::Input, r: core::ops::Range<usize>) {
|
||||
trace_ingest_item!(
|
||||
"{} common_ingest_range {:?} {:?} lst {:?}",
|
||||
Self::type_name(),
|
||||
r,
|
||||
item,
|
||||
self.minmaxlst
|
||||
);
|
||||
// panic!("common_ingest_range");
|
||||
let beg = self.range.beg_u64();
|
||||
for (&ts, val) in item.tss.range(r.clone()).zip(item.values.range(r)) {
|
||||
if ts > beg {
|
||||
self.apply_event_time_weight(ts);
|
||||
} else {
|
||||
trace_ingest_item!("{} common_ingest_range init minmaxlst {:?}", Self::type_name(), val);
|
||||
self.apply_min_max_lst(val.clone());
|
||||
}
|
||||
self.count += 1;
|
||||
self.last_ts = ts;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> EventsDim0Aggregator<STY> {
|
||||
fn type_name() -> &'static str {
|
||||
any::type_name::<Self>()
|
||||
}
|
||||
|
||||
pub fn new(range: SeriesRange, do_time_weight: bool) -> Self {
|
||||
trace_init!("{}::new", Self::type_name());
|
||||
let int_ts = range.beg_u64();
|
||||
Self {
|
||||
range,
|
||||
count: 0,
|
||||
minmaxlst: None,
|
||||
sumc: 0,
|
||||
sum: 0.,
|
||||
int_ts,
|
||||
last_ts: 0,
|
||||
do_time_weight,
|
||||
events_ignored_count: 0,
|
||||
items_seen: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO reduce clone.. optimize via more traits to factor the trade-offs?
|
||||
fn apply_min_max_lst(&mut self, val: STY) {
|
||||
trace_ingest_event!(
|
||||
"apply_min_max_lst val {:?} count {} sumc {:?} minmaxlst {:?}",
|
||||
val,
|
||||
self.count,
|
||||
self.sumc,
|
||||
self.minmaxlst,
|
||||
);
|
||||
if let Some((min, max, lst)) = self.minmaxlst.as_mut() {
|
||||
if *min > val {
|
||||
*min = val.clone();
|
||||
}
|
||||
if *max < val {
|
||||
*max = val.clone();
|
||||
}
|
||||
*lst = val.clone();
|
||||
} else {
|
||||
self.minmaxlst = Some((val.clone(), val.clone(), val.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_event_unweight(&mut self, val: STY) {
|
||||
error!("TODO check again result_reset_unweight");
|
||||
err::todo();
|
||||
let vf = val.as_prim_f32_b();
|
||||
self.apply_min_max_lst(val);
|
||||
if vf.is_nan() {
|
||||
} else {
|
||||
self.sum += vf;
|
||||
self.sumc += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_event_time_weight(&mut self, px: u64) {
|
||||
if let Some((_, _, v)) = self.minmaxlst.as_ref() {
|
||||
trace_ingest_event!("apply_event_time_weight with v {v:?}");
|
||||
let vf = v.as_prim_f32_b();
|
||||
let v2 = v.clone();
|
||||
self.apply_min_max_lst(v2);
|
||||
self.sumc += 1;
|
||||
let w = (px - self.int_ts) as f32 * 1e-9;
|
||||
if false {
|
||||
trace!(
|
||||
"int_ts {:10} px {:8} w {:8.1} vf {:8.1} sum {:8.1}",
|
||||
self.int_ts / MS,
|
||||
px / MS,
|
||||
w,
|
||||
vf,
|
||||
self.sum
|
||||
);
|
||||
}
|
||||
if vf.is_nan() {
|
||||
} else {
|
||||
self.sum += vf * w;
|
||||
}
|
||||
self.int_ts = px;
|
||||
} else {
|
||||
debug_ingest!("apply_event_time_weight minmaxlst None");
|
||||
}
|
||||
}
|
||||
|
||||
fn ingest_unweight(&mut self, item: &<Self as TimeBinnableTypeAggregator>::Input) {
|
||||
TimeAggregatorCommonV0Func::ingest_unweight(self, item)
|
||||
}
|
||||
|
||||
fn ingest_time_weight(&mut self, item: &<Self as TimeBinnableTypeAggregator>::Input) {
|
||||
TimeAggregatorCommonV0Func::ingest_time_weight(self, item)
|
||||
}
|
||||
|
||||
fn reset_values(&mut self, lst: STY, range: SeriesRange) {
|
||||
self.int_ts = range.beg_u64();
|
||||
trace_init!("ON RESET SET int_ts {:10}", self.int_ts);
|
||||
self.range = range;
|
||||
self.count = 0;
|
||||
self.sum = 0.;
|
||||
self.sumc = 0;
|
||||
self.minmaxlst = Some((lst.clone(), lst.clone(), lst));
|
||||
self.items_seen = 0;
|
||||
}
|
||||
|
||||
fn result_reset_unweight(&mut self, range: SeriesRange) -> BinsDim0<STY> {
|
||||
error!("TODO result_reset_unweight");
|
||||
panic!("TODO result_reset_unweight");
|
||||
if let Some((min, max, lst)) = self.minmaxlst.take() {
|
||||
let avg = if self.sumc > 0 {
|
||||
self.sum / self.sumc as f32
|
||||
} else {
|
||||
STY::zero_b().as_prim_f32_b()
|
||||
};
|
||||
let ret = if self.range.is_time() {
|
||||
BinsDim0 {
|
||||
ts1s: [self.range.beg_u64()].into(),
|
||||
ts2s: [self.range.end_u64()].into(),
|
||||
cnts: [self.count].into(),
|
||||
mins: [min].into(),
|
||||
maxs: [max].into(),
|
||||
avgs: [avg].into(),
|
||||
lsts: [lst.clone()].into(),
|
||||
dim0kind: Some(self.range.dim0kind()),
|
||||
}
|
||||
} else {
|
||||
error!("TODO result_reset_unweight");
|
||||
err::todoval()
|
||||
};
|
||||
self.reset_values(lst, range);
|
||||
ret
|
||||
} else {
|
||||
// TODO add check that nothing is different from initial values, or reset without lst.
|
||||
BinsDim0::empty()
|
||||
}
|
||||
}
|
||||
|
||||
fn result_reset_time_weight(&mut self, range: SeriesRange) -> BinsDim0<STY> {
|
||||
// TODO check callsite for correct expand status.
|
||||
trace_binning!(
|
||||
"result_reset_time_weight calls apply_event_time_weight range {:?} items_seen {} count {}",
|
||||
self.range,
|
||||
self.items_seen,
|
||||
self.count
|
||||
);
|
||||
let range_beg = self.range.beg_u64();
|
||||
let range_end = self.range.end_u64();
|
||||
if self.range.is_time() {
|
||||
self.apply_event_time_weight(range_end);
|
||||
} else {
|
||||
error!("TODO result_reset_time_weight");
|
||||
err::todoval()
|
||||
}
|
||||
if let Some((min, max, lst)) = self.minmaxlst.take() {
|
||||
let avg = if self.sumc > 0 {
|
||||
self.sum / (self.range.delta_u64() as f32 * 1e-9)
|
||||
} else {
|
||||
lst.as_prim_f32_b()
|
||||
};
|
||||
let max = if min > max {
|
||||
// TODO count
|
||||
debug!("min > max");
|
||||
min.clone()
|
||||
} else {
|
||||
max
|
||||
};
|
||||
let avg = {
|
||||
let g = min.as_prim_f32_b();
|
||||
if avg < g {
|
||||
debug!("avg < min");
|
||||
g
|
||||
} else {
|
||||
avg
|
||||
}
|
||||
};
|
||||
let avg = {
|
||||
let g = max.as_prim_f32_b();
|
||||
if avg > g {
|
||||
debug!("avg > max");
|
||||
g
|
||||
} else {
|
||||
avg
|
||||
}
|
||||
};
|
||||
let ret = if self.range.is_time() {
|
||||
BinsDim0 {
|
||||
ts1s: [range_beg].into(),
|
||||
ts2s: [range_end].into(),
|
||||
cnts: [self.count].into(),
|
||||
mins: [min].into(),
|
||||
maxs: [max].into(),
|
||||
avgs: [avg].into(),
|
||||
lsts: [lst.clone()].into(),
|
||||
dim0kind: Some(self.range.dim0kind()),
|
||||
}
|
||||
} else {
|
||||
error!("TODO result_reset_time_weight");
|
||||
err::todoval()
|
||||
};
|
||||
self.reset_values(lst, range);
|
||||
ret
|
||||
} else {
|
||||
// TODO add check that nothing is different from initial values, or reset without lst.
|
||||
BinsDim0::empty()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinnableTypeAggregator for EventsDim0Aggregator<STY> {
|
||||
type Input = EventsDim0<STY>;
|
||||
type Output = BinsDim0<STY>;
|
||||
|
||||
fn range(&self) -> &SeriesRange {
|
||||
&self.range
|
||||
}
|
||||
|
||||
fn ingest(&mut self, item: &Self::Input) {
|
||||
trace_ingest_item!("{} ingest {} events", Self::type_name(), item.len());
|
||||
if false {
|
||||
for (i, &ts) in item.tss.iter().enumerate() {
|
||||
trace_ingest_event!("{} ingest {:6} {:20}", Self::type_name(), i, ts);
|
||||
}
|
||||
}
|
||||
if self.do_time_weight {
|
||||
self.ingest_time_weight(item)
|
||||
} else {
|
||||
self.ingest_unweight(item)
|
||||
}
|
||||
}
|
||||
|
||||
fn result_reset(&mut self, range: SeriesRange) -> Self::Output {
|
||||
trace_binning!("result_reset {:?}", range);
|
||||
if self.do_time_weight {
|
||||
self.result_reset_time_weight(range)
|
||||
} else {
|
||||
self.result_reset_unweight(range)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinnable for EventsDim0<STY> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
trace_init!(
|
||||
"<{} as items_0::timebin::TimeBinnable>::time_binner_new",
|
||||
self.type_name()
|
||||
);
|
||||
// TODO get rid of unwrap
|
||||
let ret = EventsDim0TimeBinner::<STY>::new(binrange, do_time_weight, emit_empty_bins).unwrap();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn items_0::collect_s::ToJsonResult> {
|
||||
let k = serde_json::to_value(self).unwrap();
|
||||
Box::new(k) as _
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
panic!("logic error must not get used on events")
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> TypeName for EventsDim0<STY> {
|
||||
fn type_name(&self) -> String {
|
||||
let self_name = any::type_name::<Self>();
|
||||
@@ -892,14 +547,6 @@ macro_rules! try_to_container_events {
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> Events for EventsDim0<STY> {
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn verify(&self) -> bool {
|
||||
let mut good = true;
|
||||
let n = self.tss.len();
|
||||
@@ -1143,161 +790,6 @@ impl<STY: ScalarOps> Events for EventsDim0<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EventsDim0TimeBinner<STY: ScalarOps> {
|
||||
binrange: BinnedRangeEnum,
|
||||
rix: usize,
|
||||
rng: Option<SeriesRange>,
|
||||
agg: EventsDim0Aggregator<STY>,
|
||||
ready: Option<<EventsDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Output>,
|
||||
range_final: bool,
|
||||
emit_empty_bins: bool,
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> EventsDim0TimeBinner<STY> {
|
||||
fn type_name() -> &'static str {
|
||||
any::type_name::<Self>()
|
||||
}
|
||||
|
||||
pub fn new(binrange: BinnedRangeEnum, do_time_weight: bool, emit_empty_bins: bool) -> Result<Self, Error> {
|
||||
trace_init!("{}::new binrange {:?}", Self::type_name(), binrange);
|
||||
let rng = binrange
|
||||
.range_at(0)
|
||||
.ok_or_else(|| Error::with_msg_no_trace("empty binrange"))?;
|
||||
trace_init!("{}::new rng {:?}", Self::type_name(), rng);
|
||||
let agg = EventsDim0Aggregator::new(rng, do_time_weight);
|
||||
let ret = Self {
|
||||
binrange,
|
||||
rix: 0,
|
||||
rng: Some(agg.range().clone()),
|
||||
agg,
|
||||
ready: None,
|
||||
range_final: false,
|
||||
emit_empty_bins,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
fn next_bin_range(&mut self) -> Option<SeriesRange> {
|
||||
self.rix += 1;
|
||||
if let Some(rng) = self.binrange.range_at(self.rix) {
|
||||
trace_binning!("{} next_bin_range {:?}", Self::type_name(), rng);
|
||||
Some(rng)
|
||||
} else {
|
||||
trace_binning!("{} next_bin_range None", Self::type_name());
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinnerCommonV0Trait for EventsDim0TimeBinner<STY> {
|
||||
type Input = <EventsDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Input;
|
||||
type Output = <EventsDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Output;
|
||||
|
||||
fn type_name() -> &'static str {
|
||||
Self::type_name()
|
||||
}
|
||||
|
||||
fn common_bins_ready_count(&self) -> usize {
|
||||
match &self.ready {
|
||||
Some(k) => k.len(),
|
||||
None => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn common_range_current(&self) -> &SeriesRange {
|
||||
self.agg.range()
|
||||
}
|
||||
|
||||
fn common_has_more_range(&self) -> bool {
|
||||
self.rng.is_some()
|
||||
}
|
||||
|
||||
fn common_next_bin_range(&mut self) -> Option<SeriesRange> {
|
||||
self.next_bin_range()
|
||||
}
|
||||
|
||||
fn common_set_current_range(&mut self, range: Option<SeriesRange>) {
|
||||
self.rng = range;
|
||||
}
|
||||
|
||||
fn common_take_or_append_all_from(&mut self, item: Self::Output) {
|
||||
let mut item = item;
|
||||
match self.ready.as_mut() {
|
||||
Some(ready) => {
|
||||
ready.append_all_from(&mut item);
|
||||
}
|
||||
None => {
|
||||
self.ready = Some(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn common_result_reset(&mut self, range: Option<SeriesRange>) -> Self::Output {
|
||||
self.agg.result_reset(range.unwrap_or_else(|| {
|
||||
SeriesRange::TimeRange(netpod::range::evrange::NanoRange {
|
||||
beg: u64::MAX,
|
||||
end: u64::MAX,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
fn common_agg_ingest(&mut self, item: &mut Self::Input) {
|
||||
self.agg.ingest(item)
|
||||
}
|
||||
|
||||
fn common_has_lst(&self) -> bool {
|
||||
self.agg.minmaxlst.is_some()
|
||||
}
|
||||
|
||||
fn common_feed_lst(&mut self, item: &mut Self::Input) {
|
||||
if self.agg.minmaxlst.is_none() {
|
||||
if let Some(val) = item.values.front() {
|
||||
self.agg.apply_min_max_lst(val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinner for EventsDim0TimeBinner<STY> {
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable) {
|
||||
trace_ingest_item!("<{} as TimeBinner>::ingest {:?}", Self::type_name(), item);
|
||||
TimeBinnerCommonV0Func::ingest(self, item)
|
||||
}
|
||||
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
TimeBinnerCommonV0Trait::common_bins_ready_count(self)
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
match self.ready.take() {
|
||||
Some(k) => Some(Box::new(k)),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
TimeBinnerCommonV0Func::push_in_progress(self, push_empty)
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
TimeBinnerCommonV0Func::cycle(self)
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
self.range_final = true;
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
let ret = <EventsDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Output::empty();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
// nothing to do for events
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> Appendable<STY> for EventsDim0<STY>
|
||||
where
|
||||
STY: ScalarOps,
|
||||
@@ -1417,129 +909,3 @@ mod test_serde_opt {
|
||||
assert_eq!(s, r#"{"a":null,"b":null}"#);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binner_00() {
|
||||
let mut ev1 = EventsDim0::empty();
|
||||
ev1.push(MS * 1200, 3, 1.2f32);
|
||||
ev1.push(MS * 3200, 3, 3.2f32);
|
||||
let binrange = BinnedRangeEnum::from_custom(TsNano::from_ns(SEC), 0, 10);
|
||||
let mut binner = ev1.time_binner_new(binrange, true, false);
|
||||
binner.ingest(ev1.as_time_binnable_mut());
|
||||
eprintln!("{:?}", binner);
|
||||
// TODO add actual asserts
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn binner_01() {
|
||||
let mut ev1 = EventsDim0::empty();
|
||||
ev1.push(MS * 1200, 3, 1.2f32);
|
||||
ev1.push(MS * 1300, 3, 1.3);
|
||||
ev1.push(MS * 2100, 3, 2.1);
|
||||
ev1.push(MS * 2300, 3, 2.3);
|
||||
let binrange = BinnedRangeEnum::from_custom(TsNano::from_ns(SEC), 0, 10);
|
||||
let mut binner = ev1.time_binner_new(binrange, true, false);
|
||||
binner.ingest(ev1.as_time_binnable_mut());
|
||||
eprintln!("{:?}", binner);
|
||||
// TODO add actual asserts
|
||||
}
|
||||
|
||||
/*
|
||||
TODO adapt and enable
|
||||
#[test]
|
||||
fn bin_binned_01() {
|
||||
use binsdim0::MinMaxAvgDim0Bins;
|
||||
let edges = vec![SEC * 1000, SEC * 1010, SEC * 1020, SEC * 1030];
|
||||
let inp0 = <MinMaxAvgDim0Bins<u32> as NewEmpty>::empty(Shape::Scalar);
|
||||
let mut time_binner = inp0.time_binner_new(edges, true);
|
||||
let inp1 = MinMaxAvgDim0Bins::<u32> {
|
||||
ts1s: vec![SEC * 1000, SEC * 1010],
|
||||
ts2s: vec![SEC * 1010, SEC * 1020],
|
||||
counts: vec![1, 1],
|
||||
mins: vec![3, 4],
|
||||
maxs: vec![10, 9],
|
||||
avgs: vec![7., 6.],
|
||||
};
|
||||
assert_eq!(time_binner.bins_ready_count(), 0);
|
||||
time_binner.ingest(&inp1);
|
||||
assert_eq!(time_binner.bins_ready_count(), 1);
|
||||
time_binner.push_in_progress(false);
|
||||
assert_eq!(time_binner.bins_ready_count(), 2);
|
||||
// From here on, pushing any more should not change the bin count:
|
||||
time_binner.push_in_progress(false);
|
||||
assert_eq!(time_binner.bins_ready_count(), 2);
|
||||
// On the other hand, cycling should add one more zero-bin:
|
||||
time_binner.cycle();
|
||||
assert_eq!(time_binner.bins_ready_count(), 3);
|
||||
time_binner.cycle();
|
||||
assert_eq!(time_binner.bins_ready_count(), 3);
|
||||
let bins = time_binner.bins_ready().expect("bins should be ready");
|
||||
eprintln!("bins: {:?}", bins);
|
||||
assert_eq!(time_binner.bins_ready_count(), 0);
|
||||
assert_eq!(bins.counts(), &[1, 1, 0]);
|
||||
// TODO use proper float-compare logic:
|
||||
assert_eq!(bins.mins(), &[3., 4., 0.]);
|
||||
assert_eq!(bins.maxs(), &[10., 9., 0.]);
|
||||
assert_eq!(bins.avgs(), &[7., 6., 0.]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bin_binned_02() {
|
||||
use binsdim0::MinMaxAvgDim0Bins;
|
||||
let edges = vec![SEC * 1000, SEC * 1020];
|
||||
let inp0 = <MinMaxAvgDim0Bins<u32> as NewEmpty>::empty(Shape::Scalar);
|
||||
let mut time_binner = inp0.time_binner_new(edges, true);
|
||||
let inp1 = MinMaxAvgDim0Bins::<u32> {
|
||||
ts1s: vec![SEC * 1000, SEC * 1010],
|
||||
ts2s: vec![SEC * 1010, SEC * 1020],
|
||||
counts: vec![1, 1],
|
||||
mins: vec![3, 4],
|
||||
maxs: vec![10, 9],
|
||||
avgs: vec![7., 6.],
|
||||
};
|
||||
assert_eq!(time_binner.bins_ready_count(), 0);
|
||||
time_binner.ingest(&inp1);
|
||||
assert_eq!(time_binner.bins_ready_count(), 0);
|
||||
time_binner.cycle();
|
||||
assert_eq!(time_binner.bins_ready_count(), 1);
|
||||
time_binner.cycle();
|
||||
//assert_eq!(time_binner.bins_ready_count(), 2);
|
||||
let bins = time_binner.bins_ready().expect("bins should be ready");
|
||||
eprintln!("bins: {:?}", bins);
|
||||
assert_eq!(time_binner.bins_ready_count(), 0);
|
||||
assert_eq!(bins.counts(), &[2]);
|
||||
assert_eq!(bins.mins(), &[3.]);
|
||||
assert_eq!(bins.maxs(), &[10.]);
|
||||
assert_eq!(bins.avgs(), &[13. / 2.]);
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn events_timebin_ingest_continuous_00() {
|
||||
let binrange = BinnedRangeEnum::Time(netpod::BinnedRange {
|
||||
bin_len: TsNano::from_ns(SEC * 2),
|
||||
bin_off: 9,
|
||||
bin_cnt: 20,
|
||||
});
|
||||
let do_time_weight = true;
|
||||
let mut bins = EventsDim0::<u32>::empty();
|
||||
bins.push(SEC * 20, 1, 20);
|
||||
bins.push(SEC * 23, 2, 23);
|
||||
let mut binner = bins
|
||||
.as_time_binnable_ref()
|
||||
.time_binner_new(binrange, do_time_weight, false);
|
||||
binner.ingest(&mut bins);
|
||||
//binner.push_in_progress(true);
|
||||
let ready = binner.bins_ready();
|
||||
let got = ready.unwrap();
|
||||
let got: &BinsDim0<u32> = got.as_any_ref().downcast_ref().unwrap();
|
||||
let mut exp = BinsDim0::empty();
|
||||
// exp.push(SEC * 18, SEC * 20, 0, 0, 0, 0., None);
|
||||
exp.push(SEC * 20, SEC * 22, 1, 20, 20, 20., 20);
|
||||
assert!(items_0::test::f32_iter_cmp_near(
|
||||
got.avgs.clone(),
|
||||
exp.avgs.clone(),
|
||||
0.0001,
|
||||
0.0001
|
||||
));
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ use items_0::collect_s::ToJsonResult;
|
||||
use items_0::container::ByteEstimate;
|
||||
use items_0::isodate::IsoDateTime;
|
||||
use items_0::scalar_ops::ScalarOps;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinnableTy;
|
||||
use items_0::timebin::TimeBinnerTy;
|
||||
use items_0::AsAnyMut;
|
||||
@@ -339,30 +338,6 @@ impl TimeBinnableTy for EventsDim0Enum {
|
||||
}
|
||||
|
||||
// NOTE just a dummy because currently we don't use this for time binning
|
||||
impl TimeBinnable for EventsDim0Enum {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn items_0::timebin::TimeBinner> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
panic!("logic error must not get used on events")
|
||||
}
|
||||
}
|
||||
|
||||
impl items_0::IntoTimeBinnable for EventsDim0Enum {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
Box::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct EventsDim0EnumChunkOutput {
|
||||
@@ -373,14 +348,6 @@ pub struct EventsDim0EnumChunkOutput {
|
||||
}
|
||||
|
||||
impl Events for EventsDim0Enum {
|
||||
fn as_time_binnable_ref(&self) -> &dyn items_0::timebin::TimeBinnable {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn items_0::timebin::TimeBinnable {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn verify(&self) -> bool {
|
||||
todo!()
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use crate::binsdim0::BinsDim0;
|
||||
use crate::eventsxbindim0::EventsXbinDim0;
|
||||
use crate::IsoDateTime;
|
||||
use crate::TimeBinnableType;
|
||||
use crate::TimeBinnableTypeAggregator;
|
||||
use err::Error;
|
||||
use items_0::collect_s::CollectableDyn;
|
||||
use items_0::collect_s::CollectableType;
|
||||
@@ -12,9 +10,6 @@ use items_0::collect_s::ToJsonResult;
|
||||
use items_0::container::ByteEstimate;
|
||||
use items_0::overlap::HasTimestampDeque;
|
||||
use items_0::scalar_ops::ScalarOps;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::Appendable;
|
||||
use items_0::AsAnyMut;
|
||||
use items_0::AsAnyRef;
|
||||
@@ -148,12 +143,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> items_0::IntoTimeBinnable for EventsDim1<STY> {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
Box::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> WithLen for EventsDim1<STY> {
|
||||
fn len(&self) -> usize {
|
||||
self.tss.len()
|
||||
@@ -186,18 +175,6 @@ impl<STY: ScalarOps> HasTimestampDeque for EventsDim1<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> TimeBinnableType for EventsDim1<STY>
|
||||
where
|
||||
STY: ScalarOps,
|
||||
{
|
||||
type Output = BinsDim0<STY>;
|
||||
type Aggregator = EventsDim1Aggregator<STY>;
|
||||
|
||||
fn aggregator(_range: SeriesRange, _x_bin_count: usize, _do_time_weight: bool) -> Self::Aggregator {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct EventsDim1ChunkOutput<STY> {
|
||||
tss: VecDeque<u64>,
|
||||
@@ -483,42 +460,6 @@ impl<STY: ScalarOps> EventsDim1Aggregator<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinnableTypeAggregator for EventsDim1Aggregator<STY> {
|
||||
type Input = EventsDim1<STY>;
|
||||
type Output = BinsDim0<STY>;
|
||||
|
||||
fn range(&self) -> &SeriesRange {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn ingest(&mut self, _item: &Self::Input) {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn result_reset(&mut self, _range: SeriesRange) -> Self::Output {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinnable for EventsDim1<STY> {
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
_binrange: BinnedRangeEnum,
|
||||
_do_time_weight: bool,
|
||||
_emit_empty_bins: bool,
|
||||
) -> Box<dyn TimeBinner> {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> items_0::TypeName for EventsDim1<STY> {
|
||||
fn type_name(&self) -> String {
|
||||
let sty = std::any::type_name::<STY>();
|
||||
@@ -533,14 +474,6 @@ impl<STY: ScalarOps> EventsNonObj for EventsDim1<STY> {
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> Events for EventsDim1<STY> {
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn verify(&self) -> bool {
|
||||
let mut good = true;
|
||||
let mut ts_max = 0;
|
||||
@@ -747,47 +680,6 @@ impl<STY: ScalarOps> Events for EventsDim1<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EventsDim1TimeBinner<STY: ScalarOps> {
|
||||
_t1: PhantomData<STY>,
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> EventsDim1TimeBinner<STY> {}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinner for EventsDim1TimeBinner<STY> {
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn ingest(&mut self, _item: &mut dyn TimeBinnable) {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, _push_empty: bool) {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
panic!("TODO remove")
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> Appendable<Vec<STY>> for EventsDim1<STY>
|
||||
where
|
||||
STY: ScalarOps,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
use crate::binsxbindim0::BinsXbinDim0;
|
||||
use crate::IsoDateTime;
|
||||
use crate::TimeBinnableType;
|
||||
use crate::TimeBinnableTypeAggregator;
|
||||
use err::Error;
|
||||
use items_0::collect_s::CollectableDyn;
|
||||
use items_0::collect_s::CollectableType;
|
||||
@@ -11,9 +9,6 @@ use items_0::collect_s::ToJsonResult;
|
||||
use items_0::container::ByteEstimate;
|
||||
use items_0::overlap::HasTimestampDeque;
|
||||
use items_0::scalar_ops::ScalarOps;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::timebin::TimeBinner;
|
||||
use items_0::timebin::TimeBinnerTy;
|
||||
use items_0::AsAnyMut;
|
||||
use items_0::AsAnyRef;
|
||||
@@ -155,12 +150,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> items_0::IntoTimeBinnable for EventsXbinDim0<STY> {
|
||||
fn into_time_binnable(self) -> Box<dyn TimeBinnable> {
|
||||
Box::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> WithLen for EventsXbinDim0<STY> {
|
||||
fn len(&self) -> usize {
|
||||
self.tss.len()
|
||||
@@ -197,14 +186,6 @@ impl<STY: ScalarOps> EventsNonObj for EventsXbinDim0<STY> {
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> Events for EventsXbinDim0<STY> {
|
||||
fn as_time_binnable_ref(&self) -> &dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_time_binnable_mut(&mut self) -> &mut dyn TimeBinnable {
|
||||
self
|
||||
}
|
||||
|
||||
fn verify(&self) -> bool {
|
||||
let mut good = true;
|
||||
let mut ts_max = 0;
|
||||
@@ -389,198 +370,6 @@ impl<STY: ScalarOps> Events for EventsXbinDim0<STY> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EventsXbinDim0TimeBinner<STY: ScalarOps> {
|
||||
binrange: BinnedRangeEnum,
|
||||
rix: usize,
|
||||
rng: Option<SeriesRange>,
|
||||
agg: EventsXbinDim0Aggregator<STY>,
|
||||
ready: Option<<EventsXbinDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Output>,
|
||||
range_final: bool,
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> EventsXbinDim0TimeBinner<STY> {
|
||||
fn type_name() -> &'static str {
|
||||
any::type_name::<Self>()
|
||||
}
|
||||
|
||||
fn new(binrange: BinnedRangeEnum, do_time_weight: bool) -> Result<Self, Error> {
|
||||
trace!("{}::new binrange {:?}", Self::type_name(), binrange);
|
||||
let rng = binrange
|
||||
.range_at(0)
|
||||
.ok_or_else(|| Error::with_msg_no_trace("empty binrange"))?;
|
||||
trace!("{}::new rng {rng:?}", Self::type_name());
|
||||
let agg = EventsXbinDim0Aggregator::new(rng, do_time_weight);
|
||||
trace!("{} agg range {:?}", Self::type_name(), agg.range());
|
||||
let ret = Self {
|
||||
binrange,
|
||||
rix: 0,
|
||||
rng: Some(agg.range.clone()),
|
||||
agg,
|
||||
ready: None,
|
||||
range_final: false,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
fn next_bin_range(&mut self) -> Option<SeriesRange> {
|
||||
self.rix += 1;
|
||||
if let Some(rng) = self.binrange.range_at(self.rix) {
|
||||
trace!("{} next_bin_range {:?}", Self::type_name(), rng);
|
||||
Some(rng)
|
||||
} else {
|
||||
trace!("{} next_bin_range None", Self::type_name());
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY: ScalarOps> TimeBinner for EventsXbinDim0TimeBinner<STY> {
|
||||
fn bins_ready_count(&self) -> usize {
|
||||
match &self.ready {
|
||||
Some(k) => k.len(),
|
||||
None => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn bins_ready(&mut self) -> Option<Box<dyn TimeBinned>> {
|
||||
match self.ready.take() {
|
||||
Some(k) => Some(Box::new(k)),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn ingest(&mut self, item: &mut dyn TimeBinnable) {
|
||||
panic!("TODO remove TimeBinner for EventsXbinDim0TimeBinner ingest")
|
||||
}
|
||||
|
||||
fn push_in_progress(&mut self, push_empty: bool) {
|
||||
trace!("{}::push_in_progress push_empty {push_empty}", Self::type_name());
|
||||
// TODO expand should be derived from AggKind. Is it still required after all?
|
||||
// TODO here, the expand means that agg will assume that the current value is kept constant during
|
||||
// the rest of the time range.
|
||||
if self.rng.is_none() {
|
||||
} else {
|
||||
let expand = true;
|
||||
let range_next = self.next_bin_range();
|
||||
trace!("\n+++++\n+++++\n{} range_next {:?}", Self::type_name(), range_next);
|
||||
self.rng = range_next.clone();
|
||||
let mut bins = if let Some(range_next) = range_next {
|
||||
self.agg.result_reset(range_next)
|
||||
} else {
|
||||
// Acts as placeholder
|
||||
let range_next = NanoRange {
|
||||
beg: u64::MAX - 1,
|
||||
end: u64::MAX,
|
||||
};
|
||||
self.agg.result_reset(range_next.into())
|
||||
};
|
||||
if bins.len() != 1 {
|
||||
error!("{}::push_in_progress bins.len() {}", Self::type_name(), bins.len());
|
||||
return;
|
||||
} else {
|
||||
if push_empty || bins.counts()[0] != 0 {
|
||||
match self.ready.as_mut() {
|
||||
Some(ready) => {
|
||||
ready.append_all_from(&mut bins);
|
||||
}
|
||||
None => {
|
||||
self.ready = Some(bins);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cycle(&mut self) {
|
||||
trace!("{}::cycle", Self::type_name());
|
||||
// TODO refactor this logic.
|
||||
let n = self.bins_ready_count();
|
||||
self.push_in_progress(true);
|
||||
if self.bins_ready_count() == n {
|
||||
let range_next = self.next_bin_range();
|
||||
self.rng = range_next.clone();
|
||||
if let Some(range) = range_next {
|
||||
let mut bins = BinsXbinDim0::empty();
|
||||
if range.is_time() {
|
||||
bins.append_zero(range.beg_u64(), range.end_u64());
|
||||
} else {
|
||||
error!("TODO {}::cycle is_pulse", Self::type_name());
|
||||
}
|
||||
match self.ready.as_mut() {
|
||||
Some(ready) => {
|
||||
ready.append_all_from(&mut bins);
|
||||
}
|
||||
None => {
|
||||
self.ready = Some(bins);
|
||||
}
|
||||
}
|
||||
if self.bins_ready_count() <= n {
|
||||
error!("failed to push a zero bin");
|
||||
}
|
||||
} else {
|
||||
warn!("cycle: no in-progress bin pushed, but also no more bin to add as zero-bin");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {
|
||||
self.range_final = true;
|
||||
}
|
||||
|
||||
fn empty(&self) -> Box<dyn TimeBinned> {
|
||||
let ret = <EventsXbinDim0Aggregator<STY> as TimeBinnableTypeAggregator>::Output::empty();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn append_empty_until_end(&mut self) {
|
||||
// nothing to do for events
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> TimeBinnableType for EventsXbinDim0<STY>
|
||||
where
|
||||
STY: ScalarOps,
|
||||
{
|
||||
type Output = BinsXbinDim0<STY>;
|
||||
type Aggregator = EventsXbinDim0Aggregator<STY>;
|
||||
|
||||
fn aggregator(range: SeriesRange, x_bin_count: usize, do_time_weight: bool) -> Self::Aggregator {
|
||||
let name = any::type_name::<Self>();
|
||||
debug!(
|
||||
"TimeBinnableType for {} aggregator() range {:?} x_bin_count {} do_time_weight {}",
|
||||
name, range, x_bin_count, do_time_weight
|
||||
);
|
||||
Self::Aggregator::new(range, do_time_weight)
|
||||
}
|
||||
}
|
||||
|
||||
impl<STY> TimeBinnable for EventsXbinDim0<STY>
|
||||
where
|
||||
STY: ScalarOps,
|
||||
{
|
||||
fn time_binner_new(
|
||||
&self,
|
||||
binrange: BinnedRangeEnum,
|
||||
do_time_weight: bool,
|
||||
emit_empty_bins: bool,
|
||||
) -> Box<dyn items_0::timebin::TimeBinner> {
|
||||
// TODO respect emit_empty_bins
|
||||
let ret = EventsXbinDim0TimeBinner::<STY>::new(binrange, do_time_weight).unwrap();
|
||||
Box::new(ret)
|
||||
}
|
||||
|
||||
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
|
||||
let k = serde_json::to_value(self).unwrap();
|
||||
Box::new(k) as _
|
||||
}
|
||||
|
||||
fn to_container_bins(&self) -> Box<dyn items_0::timebin::BinningggContainerBinsDyn> {
|
||||
panic!("logic error must not get used on events")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EventsXbinDim0Aggregator<STY>
|
||||
where
|
||||
@@ -803,35 +592,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> TimeBinnableTypeAggregator for EventsXbinDim0Aggregator<NTY>
|
||||
where
|
||||
NTY: ScalarOps,
|
||||
{
|
||||
type Input = EventsXbinDim0<NTY>;
|
||||
type Output = BinsXbinDim0<NTY>;
|
||||
|
||||
fn range(&self) -> &SeriesRange {
|
||||
&self.range
|
||||
}
|
||||
|
||||
fn ingest(&mut self, item: &Self::Input) {
|
||||
trace!("{} ingest", Self::type_name());
|
||||
if self.do_time_weight {
|
||||
self.ingest_time_weight(item)
|
||||
} else {
|
||||
self.ingest_unweight(item)
|
||||
}
|
||||
}
|
||||
|
||||
fn result_reset(&mut self, range: SeriesRange) -> Self::Output {
|
||||
if self.do_time_weight {
|
||||
self.result_reset_time_weight(range)
|
||||
} else {
|
||||
self.result_reset_unweight(range)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct EventsXbinDim0CollectorOutput<NTY> {
|
||||
#[serde(rename = "tsAnchor")]
|
||||
|
||||
@@ -173,21 +173,6 @@ impl Mergeable for Box<dyn Events> {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO rename to `Typed`
|
||||
pub trait TimeBinnableType: Send + Unpin + Empty {
|
||||
type Output: TimeBinnableType;
|
||||
type Aggregator: TimeBinnableTypeAggregator<Input = Self, Output = Self::Output> + Send + Unpin;
|
||||
fn aggregator(range: SeriesRange, bin_count: usize, do_time_weight: bool) -> Self::Aggregator;
|
||||
}
|
||||
|
||||
pub trait TimeBinnableTypeAggregator: Send {
|
||||
type Input: TimeBinnableType;
|
||||
type Output: TimeBinnableType;
|
||||
fn range(&self) -> &SeriesRange;
|
||||
fn ingest(&mut self, item: &Self::Input);
|
||||
fn result_reset(&mut self, range: SeriesRange) -> Self::Output;
|
||||
}
|
||||
|
||||
pub trait ChannelEventsInput: Stream<Item = Sitemty<ChannelEvents>> + EventTransform + Send {}
|
||||
|
||||
impl<T> ChannelEventsInput for T where T: Stream<Item = Sitemty<ChannelEvents>> + EventTransform + Send {}
|
||||
|
||||
@@ -2,15 +2,11 @@ use futures_util::Future;
|
||||
use futures_util::FutureExt;
|
||||
use futures_util::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use items_0::collect_s::CollectableDyn;
|
||||
use items_0::streamitem::RangeCompletableItem;
|
||||
use items_0::streamitem::Sitemty;
|
||||
use items_0::streamitem::StreamItem;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::transform::CollectableStreamTrait;
|
||||
use items_0::transform::EventStreamTrait;
|
||||
use items_0::transform::EventTransform;
|
||||
use items_0::transform::TimeBinnableStreamTrait;
|
||||
use items_0::transform::TransformProperties;
|
||||
use items_0::transform::WithTransformProperties;
|
||||
use items_0::Events;
|
||||
@@ -292,66 +288,3 @@ where
|
||||
INP: Stream<Item = Sitemty<T>> + Send,
|
||||
{
|
||||
}
|
||||
|
||||
/// Wrap any event stream and provide transformation properties.
|
||||
pub struct PlainTimeBinnableStream<INP, T>
|
||||
where
|
||||
T: TimeBinnable,
|
||||
INP: Stream<Item = Sitemty<T>> + Send,
|
||||
{
|
||||
inp: Pin<Box<INP>>,
|
||||
}
|
||||
|
||||
impl<INP, T> PlainTimeBinnableStream<INP, T>
|
||||
where
|
||||
T: TimeBinnable,
|
||||
INP: Stream<Item = Sitemty<T>> + Send,
|
||||
{
|
||||
pub fn new(inp: INP) -> Self {
|
||||
Self { inp: Box::pin(inp) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<INP, T> Stream for PlainTimeBinnableStream<INP, T>
|
||||
where
|
||||
T: TimeBinnable,
|
||||
INP: Stream<Item = Sitemty<T>> + Send,
|
||||
{
|
||||
type Item = Sitemty<Box<dyn TimeBinnable>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
|
||||
use Poll::*;
|
||||
match self.inp.poll_next_unpin(cx) {
|
||||
Ready(Some(item)) => Ready(Some(match item {
|
||||
Ok(item) => Ok(match item {
|
||||
StreamItem::DataItem(item) => StreamItem::DataItem(match item {
|
||||
RangeCompletableItem::RangeComplete => RangeCompletableItem::RangeComplete,
|
||||
RangeCompletableItem::Data(item) => RangeCompletableItem::Data(Box::new(item)),
|
||||
}),
|
||||
StreamItem::Log(item) => StreamItem::Log(item),
|
||||
StreamItem::Stats(item) => StreamItem::Stats(item),
|
||||
}),
|
||||
Err(e) => Err(e),
|
||||
})),
|
||||
Ready(None) => Ready(None),
|
||||
Pending => Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<INP, T> WithTransformProperties for PlainTimeBinnableStream<INP, T>
|
||||
where
|
||||
T: TimeBinnable,
|
||||
INP: Stream<Item = Sitemty<T>> + Send,
|
||||
{
|
||||
fn query_transform_properties(&self) -> TransformProperties {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<INP, T> TimeBinnableStreamTrait for PlainTimeBinnableStream<INP, T>
|
||||
where
|
||||
T: TimeBinnable,
|
||||
INP: Stream<Item = Sitemty<T>> + Send,
|
||||
{
|
||||
}
|
||||
|
||||
@@ -3,8 +3,6 @@ pub mod eventsdim0;
|
||||
#[cfg(test)]
|
||||
pub mod eventsdim1;
|
||||
|
||||
use crate::binnedcollected::BinnedCollected;
|
||||
use crate::binsdim0::BinsDim0CollectedResult;
|
||||
use crate::channelevents::ConnStatus;
|
||||
use crate::channelevents::ConnStatusEvent;
|
||||
use crate::eventsdim0::EventsDim0;
|
||||
@@ -17,9 +15,6 @@ use crate::testgen::make_some_boxed_d0_f32;
|
||||
use crate::ChannelEvents;
|
||||
use crate::Error;
|
||||
use crate::Events;
|
||||
use crate::IsoDateTime;
|
||||
use chrono::TimeZone;
|
||||
use chrono::Utc;
|
||||
use futures_util::stream;
|
||||
use futures_util::StreamExt;
|
||||
use items_0::streamitem::sitem_data;
|
||||
@@ -31,13 +26,8 @@ use items_0::Empty;
|
||||
use items_0::WithLen;
|
||||
use netpod::log::*;
|
||||
use netpod::range::evrange::NanoRange;
|
||||
use netpod::range::evrange::SeriesRange;
|
||||
use netpod::timeunits::*;
|
||||
use netpod::AggKind;
|
||||
use netpod::BinnedRange;
|
||||
use netpod::BinnedRangeEnum;
|
||||
use netpod::ScalarType;
|
||||
use netpod::Shape;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
@@ -348,48 +338,6 @@ fn merge_02() {
|
||||
runfut(fut).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bin_00() {
|
||||
let fut = async {
|
||||
let inp1 = {
|
||||
let mut vec = Vec::new();
|
||||
for j in 0..2 {
|
||||
let mut events = EventsDim0::empty();
|
||||
for i in 10 * j..10 * (1 + j) {
|
||||
events.push(SEC * i, i, 17f32);
|
||||
}
|
||||
push_evd0(&mut vec, Box::new(events));
|
||||
}
|
||||
vec
|
||||
};
|
||||
let inp1 = futures_util::stream::iter(inp1);
|
||||
let inp1 = Box::pin(inp1);
|
||||
let inp2 = Box::pin(futures_util::stream::empty()) as _;
|
||||
let stream = crate::merger::Merger::new(vec![inp1, inp2], Some(32));
|
||||
let range = NanoRange {
|
||||
beg: SEC * 0,
|
||||
end: SEC * 100,
|
||||
};
|
||||
let binrange = BinnedRangeEnum::covering_range(range.into(), 10).unwrap();
|
||||
let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
let do_time_weight = true;
|
||||
let emit_empty_bins = false;
|
||||
let res = BinnedCollected::new(
|
||||
binrange,
|
||||
ScalarType::F32,
|
||||
Shape::Scalar,
|
||||
do_time_weight,
|
||||
emit_empty_bins,
|
||||
deadline,
|
||||
Box::pin(stream),
|
||||
)
|
||||
.await?;
|
||||
// TODO assert
|
||||
Ok::<_, Error>(())
|
||||
};
|
||||
runfut(fut).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bin_01() {
|
||||
const TSBASE: u64 = SEC * 1600000000;
|
||||
@@ -419,22 +367,22 @@ fn bin_01() {
|
||||
beg: TSBASE + SEC * 1,
|
||||
end: TSBASE + SEC * 10,
|
||||
};
|
||||
let binrange = BinnedRangeEnum::covering_range(range.into(), 9).map_err(|e| format!("{e}"))?;
|
||||
let stream = Box::pin(stream);
|
||||
let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
let do_time_weight = true;
|
||||
let emit_empty_bins = false;
|
||||
let res = BinnedCollected::new(
|
||||
binrange,
|
||||
ScalarType::F32,
|
||||
Shape::Scalar,
|
||||
do_time_weight,
|
||||
emit_empty_bins,
|
||||
deadline,
|
||||
Box::pin(stream),
|
||||
)
|
||||
.await?;
|
||||
eprintln!("res {:?}", res);
|
||||
// let binrange = BinnedRangeEnum::covering_range(range.into(), 9).map_err(|e| format!("{e}"))?;
|
||||
// let stream = Box::pin(stream);
|
||||
// let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
// let do_time_weight = true;
|
||||
// let emit_empty_bins = false;
|
||||
// let res = BinnedCollected::new(
|
||||
// binrange,
|
||||
// ScalarType::F32,
|
||||
// Shape::Scalar,
|
||||
// do_time_weight,
|
||||
// emit_empty_bins,
|
||||
// deadline,
|
||||
// Box::pin(stream),
|
||||
// )
|
||||
// .await?;
|
||||
// eprintln!("res {:?}", res);
|
||||
Ok::<_, Error>(())
|
||||
};
|
||||
runfut(fut).unwrap();
|
||||
@@ -454,7 +402,7 @@ fn binned_timeout_00() {
|
||||
eprintln!("binned_timeout_01 ENTER");
|
||||
let fut = async {
|
||||
eprintln!("binned_timeout_01 IN FUT");
|
||||
let mut events_vec1 = Vec::new();
|
||||
let mut events_vec1: Vec<Sitemty<ChannelEvents>> = Vec::new();
|
||||
let mut t = TSBASE;
|
||||
for _ in 0..20 {
|
||||
let mut events = EventsDim0::empty();
|
||||
@@ -481,29 +429,31 @@ fn binned_timeout_00() {
|
||||
let binrange = BinnedRangeEnum::covering_range(range.into(), 9)?;
|
||||
eprintln!("edges1: {:?}", edges);
|
||||
//eprintln!("edges2: {:?}", binrange.edges());
|
||||
let inp1 = Box::pin(inp1);
|
||||
let timeout = Duration::from_millis(400);
|
||||
let deadline = Instant::now() + timeout;
|
||||
let do_time_weight = true;
|
||||
let emit_empty_bins = false;
|
||||
let res = BinnedCollected::new(
|
||||
binrange,
|
||||
ScalarType::F32,
|
||||
Shape::Scalar,
|
||||
do_time_weight,
|
||||
emit_empty_bins,
|
||||
deadline,
|
||||
inp1,
|
||||
)
|
||||
.await?;
|
||||
let r2: &BinsDim0CollectedResult<f32> = res.result.as_any_ref().downcast_ref().expect("res seems wrong type");
|
||||
eprintln!("rs: {r2:?}");
|
||||
assert_eq!(SEC * r2.ts_anchor_sec(), TSBASE + SEC);
|
||||
assert_eq!(r2.counts(), &[10, 10, 10]);
|
||||
assert_eq!(r2.mins(), &[3.0, 2.0, 3.0]);
|
||||
assert_eq!(r2.maxs(), &[3.2, 2.2, 3.2]);
|
||||
assert_eq!(r2.missing_bins(), 6);
|
||||
assert_eq!(r2.continue_at(), Some(IsoDateTime::from_ns_u64(TSBASE + SEC * 4)));
|
||||
// let inp1 = Box::pin(inp1);
|
||||
// let deadline = Instant::now() + timeout;
|
||||
// let do_time_weight = true;
|
||||
// let emit_empty_bins = false;
|
||||
// TODO with new binning
|
||||
|
||||
// let res = BinnedCollected::new(
|
||||
// binrange,
|
||||
// ScalarType::F32,
|
||||
// Shape::Scalar,
|
||||
// do_time_weight,
|
||||
// emit_empty_bins,
|
||||
// deadline,
|
||||
// inp1,
|
||||
// )
|
||||
// .await?;
|
||||
// let r2: &BinsDim0CollectedResult<f32> = res.result.as_any_ref().downcast_ref().expect("res seems wrong type");
|
||||
// eprintln!("rs: {r2:?}");
|
||||
// assert_eq!(SEC * r2.ts_anchor_sec(), TSBASE + SEC);
|
||||
// assert_eq!(r2.counts(), &[10, 10, 10]);
|
||||
// assert_eq!(r2.mins(), &[3.0, 2.0, 3.0]);
|
||||
// assert_eq!(r2.maxs(), &[3.2, 2.2, 3.2]);
|
||||
// assert_eq!(r2.missing_bins(), 6);
|
||||
// assert_eq!(r2.continue_at(), Some(IsoDateTime::from_ns_u64(TSBASE + SEC * 4)));
|
||||
Ok::<_, Error>(())
|
||||
};
|
||||
runfut(fut).unwrap();
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::AppendEmptyBin;
|
||||
use items_0::Empty;
|
||||
use items_0::HasNonemptyFirstBin;
|
||||
@@ -21,94 +20,6 @@ macro_rules! trace_ingest_event { ($($arg:tt)*) => ( if false { trace!($($arg)*)
|
||||
#[allow(unused)]
|
||||
macro_rules! trace_ingest_detail { ($($arg:tt)*) => ( if true { trace!($($arg)*); }) }
|
||||
|
||||
pub trait TimeBinnerCommonV0Trait {
|
||||
type Input: 'static;
|
||||
type Output: WithLen + Empty + AppendEmptyBin + HasNonemptyFirstBin + 'static;
|
||||
fn type_name() -> &'static str;
|
||||
fn common_bins_ready_count(&self) -> usize;
|
||||
fn common_range_current(&self) -> &SeriesRange;
|
||||
fn common_has_more_range(&self) -> bool;
|
||||
fn common_next_bin_range(&mut self) -> Option<SeriesRange>;
|
||||
fn common_set_current_range(&mut self, range: Option<SeriesRange>);
|
||||
fn common_take_or_append_all_from(&mut self, item: Self::Output);
|
||||
fn common_result_reset(&mut self, range: Option<SeriesRange>) -> Self::Output;
|
||||
fn common_agg_ingest(&mut self, item: &mut Self::Input);
|
||||
fn common_has_lst(&self) -> bool;
|
||||
fn common_feed_lst(&mut self, item: &mut Self::Input);
|
||||
}
|
||||
|
||||
pub struct TimeBinnerCommonV0Func {}
|
||||
|
||||
impl TimeBinnerCommonV0Func {
|
||||
pub fn ingest<B>(binner: &mut B, item: &mut dyn TimeBinnable)
|
||||
where
|
||||
B: TimeBinnerCommonV0Trait,
|
||||
{
|
||||
panic!("TimeBinnerCommonV0Func::ingest")
|
||||
}
|
||||
|
||||
fn agg_ingest<B>(binner: &mut B, item: &mut <B as TimeBinnerCommonV0Trait>::Input)
|
||||
where
|
||||
B: TimeBinnerCommonV0Trait,
|
||||
{
|
||||
//self.agg.ingest(item);
|
||||
<B as TimeBinnerCommonV0Trait>::common_agg_ingest(binner, item)
|
||||
}
|
||||
|
||||
pub fn push_in_progress<B>(binner: &mut B, push_empty: bool)
|
||||
where
|
||||
B: TimeBinnerCommonV0Trait,
|
||||
{
|
||||
let self_name = B::type_name();
|
||||
trace_ingest_item!("{self_name}::push_in_progress push_empty {push_empty}");
|
||||
// TODO expand should be derived from AggKind. Is it still required after all?
|
||||
// TODO here, the expand means that agg will assume that the current value is kept constant during
|
||||
// the rest of the time range.
|
||||
if B::common_has_more_range(binner) {
|
||||
let range_next = TimeBinnerCommonV0Trait::common_next_bin_range(binner);
|
||||
B::common_set_current_range(binner, range_next.clone());
|
||||
let bins = TimeBinnerCommonV0Trait::common_result_reset(binner, range_next);
|
||||
if bins.len() != 1 {
|
||||
error!("{self_name}::push_in_progress bins.len() {}", bins.len());
|
||||
return;
|
||||
} else {
|
||||
if push_empty || HasNonemptyFirstBin::has_nonempty_first_bin(&bins) {
|
||||
TimeBinnerCommonV0Trait::common_take_or_append_all_from(binner, bins);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cycle<B>(binner: &mut B)
|
||||
where
|
||||
B: TimeBinnerCommonV0Trait,
|
||||
{
|
||||
let self_name = any::type_name::<Self>();
|
||||
trace_ingest_item!("{self_name}::cycle");
|
||||
// TODO refactor this logic.
|
||||
let n = TimeBinnerCommonV0Trait::common_bins_ready_count(binner);
|
||||
TimeBinnerCommonV0Func::push_in_progress(binner, true);
|
||||
if TimeBinnerCommonV0Trait::common_bins_ready_count(binner) == n {
|
||||
let range_next = TimeBinnerCommonV0Trait::common_next_bin_range(binner);
|
||||
B::common_set_current_range(binner, range_next.clone());
|
||||
if let Some(range) = range_next {
|
||||
let mut bins = <B as TimeBinnerCommonV0Trait>::Output::empty();
|
||||
if range.is_time() {
|
||||
bins.append_empty_bin(range.beg_u64(), range.end_u64());
|
||||
} else {
|
||||
error!("TODO {self_name}::cycle is_pulse");
|
||||
}
|
||||
TimeBinnerCommonV0Trait::common_take_or_append_all_from(binner, bins);
|
||||
if TimeBinnerCommonV0Trait::common_bins_ready_count(binner) <= n {
|
||||
error!("failed to push a zero bin");
|
||||
}
|
||||
} else {
|
||||
warn!("cycle: no in-progress bin pushed, but also no more bin to add as zero-bin");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ChooseIndicesForTimeBin {
|
||||
fn choose_indices_unweight(&self, beg: u64, end: u64) -> (Option<usize>, usize, usize);
|
||||
fn choose_indices_timeweight(&self, beg: u64, end: u64) -> (Option<usize>, usize, usize);
|
||||
|
||||
@@ -1,50 +1,12 @@
|
||||
use crate::events2::prepare::StmtsCache;
|
||||
use crate::worker::ScyllaQueue;
|
||||
use err::Error;
|
||||
use futures_util::Future;
|
||||
use futures_util::StreamExt;
|
||||
use items_0::timebin::BinsBoxed;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_2::binning::container_bins::ContainerBins;
|
||||
use netpod::log::*;
|
||||
use netpod::ChannelTyped;
|
||||
use netpod::DtMs;
|
||||
use netpod::PreBinnedPatchCoordEnum;
|
||||
use netpod::TsNano;
|
||||
use scylla::Session as ScySession;
|
||||
use std::ops::Range;
|
||||
use std::pin::Pin;
|
||||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
|
||||
#[allow(unused)]
|
||||
struct WriteFut<'a> {
|
||||
chn: &'a ChannelTyped,
|
||||
coord: &'a PreBinnedPatchCoordEnum,
|
||||
data: &'a dyn TimeBinned,
|
||||
scy: &'a ScySession,
|
||||
}
|
||||
|
||||
impl<'a> WriteFut<'a> {
|
||||
#[allow(unused)]
|
||||
fn new(
|
||||
chn: &'a ChannelTyped,
|
||||
coord: &'a PreBinnedPatchCoordEnum,
|
||||
data: &'a dyn TimeBinned,
|
||||
scy: &'a ScySession,
|
||||
) -> Self {
|
||||
Self { chn, coord, data, scy }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Future for WriteFut<'a> {
|
||||
type Output = Result<(), Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
|
||||
let _ = cx;
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ScyllaCacheReadProvider {
|
||||
scyqueue: ScyllaQueue,
|
||||
|
||||
@@ -2,9 +2,7 @@ use crate::collect::Collect;
|
||||
use crate::collect::CollectResult;
|
||||
use crate::test::runfut;
|
||||
use crate::transform::build_event_transform;
|
||||
use crate::transform::build_time_binning_transform;
|
||||
use crate::transform::EventsToTimeBinnable;
|
||||
use crate::transform::TimeBinnableToCollectable;
|
||||
use err::Error;
|
||||
use futures_util::stream;
|
||||
use futures_util::StreamExt;
|
||||
@@ -49,82 +47,82 @@ fn collect_channel_events_00() -> Result<(), Error> {
|
||||
runfut(fut)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_channel_events_01() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let evs0 = make_some_boxed_d0_f32(20, SEC * 10, SEC * 1, 0, 28736487);
|
||||
let evs1 = make_some_boxed_d0_f32(20, SEC * 30, SEC * 1, 0, 882716583);
|
||||
let stream = stream::iter(vec![
|
||||
sitem_data(evs0),
|
||||
sitem_data(evs1),
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete)),
|
||||
]);
|
||||
// TODO build like in request code
|
||||
let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
let events_max = 10000;
|
||||
let bytes_max = 80 * 10000;
|
||||
let stream = PlainEventStream::new(stream);
|
||||
let stream = EventsToTimeBinnable::new(stream);
|
||||
let stream = TimeBinnableToCollectable::new(stream);
|
||||
let stream = Box::pin(stream);
|
||||
let res = Collect::new(stream, deadline, events_max, bytes_max, None, None).await?;
|
||||
if let CollectResult::Some(res) = res {
|
||||
if let Some(res) = res.as_any_ref().downcast_ref::<EventsDim0CollectorOutput<f32>>() {
|
||||
eprintln!("Great, a match");
|
||||
eprintln!("{res:?}");
|
||||
assert_eq!(res.len(), 40);
|
||||
} else {
|
||||
return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
}
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn collect_channel_events_01() -> Result<(), Error> {
|
||||
// let fut = async {
|
||||
// let evs0 = make_some_boxed_d0_f32(20, SEC * 10, SEC * 1, 0, 28736487);
|
||||
// let evs1 = make_some_boxed_d0_f32(20, SEC * 30, SEC * 1, 0, 882716583);
|
||||
// let stream = stream::iter(vec![
|
||||
// sitem_data(evs0),
|
||||
// sitem_data(evs1),
|
||||
// Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete)),
|
||||
// ]);
|
||||
// // TODO build like in request code
|
||||
// let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
// let events_max = 10000;
|
||||
// let bytes_max = 80 * 10000;
|
||||
// let stream = PlainEventStream::new(stream);
|
||||
// let stream = EventsToTimeBinnable::new(stream);
|
||||
// let stream = TimeBinnableToCollectable::new(stream);
|
||||
// let stream = Box::pin(stream);
|
||||
// let res = Collect::new(stream, deadline, events_max, bytes_max, None, None).await?;
|
||||
// if let CollectResult::Some(res) = res {
|
||||
// if let Some(res) = res.as_any_ref().downcast_ref::<EventsDim0CollectorOutput<f32>>() {
|
||||
// eprintln!("Great, a match");
|
||||
// eprintln!("{res:?}");
|
||||
// assert_eq!(res.len(), 40);
|
||||
// } else {
|
||||
// return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
// }
|
||||
// Ok(())
|
||||
// } else {
|
||||
// return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
// }
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn collect_channel_events_pulse_id_diff() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let trqu = TransformQuery::from_url(&"https://data-api.psi.ch/?binningScheme=pulseIdDiff".parse()?)?;
|
||||
info!("{trqu:?}");
|
||||
let evs0 = make_some_boxed_d0_f32(20, SEC * 10, SEC * 1, 0, 28736487);
|
||||
let evs1 = make_some_boxed_d0_f32(20, SEC * 30, SEC * 1, 0, 882716583);
|
||||
let stream = stream::iter(vec![
|
||||
sitem_data(evs0),
|
||||
sitem_data(evs1),
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete)),
|
||||
]);
|
||||
let mut tr = build_event_transform(&trqu)?;
|
||||
let stream = stream.map(move |x| {
|
||||
on_sitemty_data!(x, |x| {
|
||||
let x = tr.0.transform(x);
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::Data(x)))
|
||||
})
|
||||
});
|
||||
let stream = PlainEventStream::new(stream);
|
||||
let stream = EventsToTimeBinnable::new(stream);
|
||||
let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
let events_max = 10000;
|
||||
let bytes_max = 80 * 10000;
|
||||
let stream = Box::pin(stream);
|
||||
let stream = build_time_binning_transform(&trqu, stream)?;
|
||||
let stream = TimeBinnableToCollectable::new(stream);
|
||||
let stream = Box::pin(stream);
|
||||
let res = Collect::new(stream, deadline, events_max, bytes_max, None, None).await?;
|
||||
if let CollectResult::Some(res) = res {
|
||||
if let Some(res) = res.as_any_ref().downcast_ref::<EventsDim0CollectorOutput<i64>>() {
|
||||
eprintln!("Great, a match");
|
||||
eprintln!("{res:?}");
|
||||
assert_eq!(res.len(), 40);
|
||||
} else {
|
||||
return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
}
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn collect_channel_events_pulse_id_diff() -> Result<(), Error> {
|
||||
// let fut = async {
|
||||
// let trqu = TransformQuery::from_url(&"https://data-api.psi.ch/?binningScheme=pulseIdDiff".parse()?)?;
|
||||
// info!("{trqu:?}");
|
||||
// let evs0 = make_some_boxed_d0_f32(20, SEC * 10, SEC * 1, 0, 28736487);
|
||||
// let evs1 = make_some_boxed_d0_f32(20, SEC * 30, SEC * 1, 0, 882716583);
|
||||
// let stream = stream::iter(vec![
|
||||
// sitem_data(evs0),
|
||||
// sitem_data(evs1),
|
||||
// Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete)),
|
||||
// ]);
|
||||
// let mut tr = build_event_transform(&trqu)?;
|
||||
// let stream = stream.map(move |x| {
|
||||
// on_sitemty_data!(x, |x| {
|
||||
// let x = tr.0.transform(x);
|
||||
// Ok(StreamItem::DataItem(RangeCompletableItem::Data(x)))
|
||||
// })
|
||||
// });
|
||||
// let stream = PlainEventStream::new(stream);
|
||||
// let stream = EventsToTimeBinnable::new(stream);
|
||||
// let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
// let events_max = 10000;
|
||||
// let bytes_max = 80 * 10000;
|
||||
// let stream = Box::pin(stream);
|
||||
// let stream = build_time_binning_transform(&trqu, stream)?;
|
||||
// let stream = TimeBinnableToCollectable::new(stream);
|
||||
// let stream = Box::pin(stream);
|
||||
// let res = Collect::new(stream, deadline, events_max, bytes_max, None, None).await?;
|
||||
// if let CollectResult::Some(res) = res {
|
||||
// if let Some(res) = res.as_any_ref().downcast_ref::<EventsDim0CollectorOutput<i64>>() {
|
||||
// eprintln!("Great, a match");
|
||||
// eprintln!("{res:?}");
|
||||
// assert_eq!(res.len(), 40);
|
||||
// } else {
|
||||
// return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
// }
|
||||
// Ok(())
|
||||
// } else {
|
||||
// return Err(Error::with_msg(format!("bad type of collected result")));
|
||||
// }
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
@@ -1,459 +1,445 @@
|
||||
use crate::collect::collect;
|
||||
use crate::generators::GenerateI32V00;
|
||||
use crate::generators::GenerateI32V01;
|
||||
use crate::itemclone::Itemclone;
|
||||
use crate::test::runfut;
|
||||
use crate::timebin::TimeBinnedStream;
|
||||
use crate::transform::build_event_transform;
|
||||
use err::Error;
|
||||
use futures_util::stream;
|
||||
use futures_util::StreamExt;
|
||||
use items_0::on_sitemty_data;
|
||||
use items_0::streamitem::sitem_data;
|
||||
use items_0::streamitem::RangeCompletableItem;
|
||||
use items_0::streamitem::StreamItem;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::timebin::TimeBinned;
|
||||
use items_0::AppendAllFrom;
|
||||
use items_0::Empty;
|
||||
use items_2::binsdim0::BinsDim0;
|
||||
use items_2::channelevents::ChannelEvents;
|
||||
use items_2::channelevents::ConnStatus;
|
||||
use items_2::channelevents::ConnStatusEvent;
|
||||
use items_2::eventsdim0::EventsDim0;
|
||||
use items_2::testgen::make_some_boxed_d0_f32;
|
||||
use netpod::range::evrange::NanoRange;
|
||||
use netpod::range::evrange::SeriesRange;
|
||||
use netpod::timeunits::MS;
|
||||
use netpod::timeunits::SEC;
|
||||
use netpod::BinnedRangeEnum;
|
||||
use query::transform::TransformQuery;
|
||||
use serde_json::Value as JsValue;
|
||||
use std::collections::VecDeque;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
// use crate::collect::collect;
|
||||
// use crate::generators::GenerateI32V00;
|
||||
// use crate::generators::GenerateI32V01;
|
||||
// use crate::itemclone::Itemclone;
|
||||
// use crate::test::runfut;
|
||||
// use crate::timebin::TimeBinnedStream;
|
||||
// use crate::transform::build_event_transform;
|
||||
// use err::Error;
|
||||
// use futures_util::stream;
|
||||
// use futures_util::StreamExt;
|
||||
// use items_0::on_sitemty_data;
|
||||
// use items_0::streamitem::sitem_data;
|
||||
// use items_0::streamitem::RangeCompletableItem;
|
||||
// use items_0::streamitem::StreamItem;
|
||||
// use items_0::AppendAllFrom;
|
||||
// use items_0::Empty;
|
||||
// use items_2::binsdim0::BinsDim0;
|
||||
// use items_2::channelevents::ChannelEvents;
|
||||
// use items_2::channelevents::ConnStatus;
|
||||
// use items_2::channelevents::ConnStatusEvent;
|
||||
// use items_2::eventsdim0::EventsDim0;
|
||||
// use items_2::testgen::make_some_boxed_d0_f32;
|
||||
// use netpod::range::evrange::NanoRange;
|
||||
// use netpod::range::evrange::SeriesRange;
|
||||
// use netpod::timeunits::MS;
|
||||
// use netpod::timeunits::SEC;
|
||||
// use netpod::BinnedRangeEnum;
|
||||
// use query::transform::TransformQuery;
|
||||
// use serde_json::Value as JsValue;
|
||||
// use std::collections::VecDeque;
|
||||
// use std::time::Duration;
|
||||
// use std::time::Instant;
|
||||
|
||||
fn nano_range_from_str(beg_date: &str, end_date: &str) -> Result<NanoRange, Error> {
|
||||
let beg_date = beg_date.parse()?;
|
||||
let end_date = end_date.parse()?;
|
||||
let range = NanoRange::from_date_time(beg_date, end_date);
|
||||
Ok(range)
|
||||
}
|
||||
// fn nano_range_from_str(beg_date: &str, end_date: &str) -> Result<NanoRange, Error> {
|
||||
// let beg_date = beg_date.parse()?;
|
||||
// let end_date = end_date.parse()?;
|
||||
// let range = NanoRange::from_date_time(beg_date, end_date);
|
||||
// Ok(range)
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn time_bin_00() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let range = nano_range_from_str("1970-01-01T00:00:00Z", "1970-01-01T00:00:08Z")?;
|
||||
let range = SeriesRange::TimeRange(range);
|
||||
let min_bin_count = 8;
|
||||
let binned_range = BinnedRangeEnum::covering_range(range, min_bin_count)?;
|
||||
let evs0 = make_some_boxed_d0_f32(10, SEC * 1, MS * 500, 0, 1846713782);
|
||||
let v00 = ChannelEvents::Events(Box::new(EventsDim0::<f32>::empty()));
|
||||
let v01 = ChannelEvents::Events(evs0);
|
||||
let v02 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 100, ConnStatus::Connect)));
|
||||
let v03 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 6000, ConnStatus::Disconnect)));
|
||||
let stream0 = Box::pin(stream::iter(vec![
|
||||
//
|
||||
sitem_data(v00),
|
||||
sitem_data(v02),
|
||||
sitem_data(v01),
|
||||
sitem_data(v03),
|
||||
]));
|
||||
let mut exps = {
|
||||
let mut d = VecDeque::new();
|
||||
let bins = BinsDim0::empty();
|
||||
d.push_back(bins);
|
||||
let mut bins = BinsDim0::empty();
|
||||
// Currently can not cosntruct bins without minmaxlst
|
||||
// bins.push(SEC * 0, SEC * 1, 0, 0.0, 0.0, 0.0);
|
||||
bins.push(SEC * 1, SEC * 2, 2, 0.0535830, 100.0589, 50.05624, 100.0589);
|
||||
bins.push(SEC * 2, SEC * 3, 2, 200.06143, 300.07645, 250.06894, 300.07645);
|
||||
bins.push(SEC * 3, SEC * 4, 2, 400.08554, 500.05222, 450.06888, 500.05222);
|
||||
bins.push(SEC * 4, SEC * 5, 2, 600.0025, 700.09094, 650.04675, 700.09094);
|
||||
d.push_back(bins);
|
||||
let mut bins = BinsDim0::empty();
|
||||
bins.push(SEC * 5, SEC * 6, 2, 800.0619, 900.02844, 850.04517, 900.02844);
|
||||
d.push_back(bins);
|
||||
d
|
||||
};
|
||||
let mut binned_stream = TimeBinnedStream::new(stream0, binned_range, true);
|
||||
while let Some(item) = binned_stream.next().await {
|
||||
eprintln!("{item:?}");
|
||||
match item {
|
||||
Ok(item) => match item {
|
||||
StreamItem::DataItem(item) => match item {
|
||||
RangeCompletableItem::Data(item) => {
|
||||
if let Some(item) = item.as_any_ref().downcast_ref::<BinsDim0<f32>>() {
|
||||
let exp = exps.pop_front().unwrap();
|
||||
if !item.equal_slack(&exp) {
|
||||
eprintln!("-----------------------");
|
||||
eprintln!("item {:?}", item);
|
||||
eprintln!("-----------------------");
|
||||
eprintln!("exp {:?}", exp);
|
||||
eprintln!("-----------------------");
|
||||
return Err(Error::with_msg_no_trace(format!("bad, content not equal")));
|
||||
}
|
||||
} else {
|
||||
return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
|
||||
}
|
||||
}
|
||||
RangeCompletableItem::RangeComplete => {}
|
||||
},
|
||||
StreamItem::Log(_) => {}
|
||||
StreamItem::Stats(_) => {}
|
||||
},
|
||||
Err(e) => Err(e).unwrap(),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn time_bin_00() -> Result<(), Error> {
|
||||
// let fut = async {
|
||||
// let range = nano_range_from_str("1970-01-01T00:00:00Z", "1970-01-01T00:00:08Z")?;
|
||||
// let range = SeriesRange::TimeRange(range);
|
||||
// let min_bin_count = 8;
|
||||
// let binned_range = BinnedRangeEnum::covering_range(range, min_bin_count)?;
|
||||
// let evs0 = make_some_boxed_d0_f32(10, SEC * 1, MS * 500, 0, 1846713782);
|
||||
// let v00 = ChannelEvents::Events(Box::new(EventsDim0::<f32>::empty()));
|
||||
// let v01 = ChannelEvents::Events(evs0);
|
||||
// let v02 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 100, ConnStatus::Connect)));
|
||||
// let v03 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 6000, ConnStatus::Disconnect)));
|
||||
// let stream0 = Box::pin(stream::iter(vec![
|
||||
// //
|
||||
// sitem_data(v00),
|
||||
// sitem_data(v02),
|
||||
// sitem_data(v01),
|
||||
// sitem_data(v03),
|
||||
// ]));
|
||||
// let mut exps = {
|
||||
// let mut d = VecDeque::new();
|
||||
// let bins = BinsDim0::empty();
|
||||
// d.push_back(bins);
|
||||
// let mut bins = BinsDim0::empty();
|
||||
// // Currently can not cosntruct bins without minmaxlst
|
||||
// // bins.push(SEC * 0, SEC * 1, 0, 0.0, 0.0, 0.0);
|
||||
// bins.push(SEC * 1, SEC * 2, 2, 0.0535830, 100.0589, 50.05624, 100.0589);
|
||||
// bins.push(SEC * 2, SEC * 3, 2, 200.06143, 300.07645, 250.06894, 300.07645);
|
||||
// bins.push(SEC * 3, SEC * 4, 2, 400.08554, 500.05222, 450.06888, 500.05222);
|
||||
// bins.push(SEC * 4, SEC * 5, 2, 600.0025, 700.09094, 650.04675, 700.09094);
|
||||
// d.push_back(bins);
|
||||
// let mut bins = BinsDim0::empty();
|
||||
// bins.push(SEC * 5, SEC * 6, 2, 800.0619, 900.02844, 850.04517, 900.02844);
|
||||
// d.push_back(bins);
|
||||
// d
|
||||
// };
|
||||
// let mut binned_stream = TimeBinnedStream::new(stream0, binned_range, true);
|
||||
// while let Some(item) = binned_stream.next().await {
|
||||
// eprintln!("{item:?}");
|
||||
// match item {
|
||||
// Ok(item) => match item {
|
||||
// StreamItem::DataItem(item) => match item {
|
||||
// RangeCompletableItem::Data(item) => {
|
||||
// if let Some(item) = item.as_any_ref().downcast_ref::<BinsDim0<f32>>() {
|
||||
// let exp = exps.pop_front().unwrap();
|
||||
// if !item.equal_slack(&exp) {
|
||||
// eprintln!("-----------------------");
|
||||
// eprintln!("item {:?}", item);
|
||||
// eprintln!("-----------------------");
|
||||
// eprintln!("exp {:?}", exp);
|
||||
// eprintln!("-----------------------");
|
||||
// return Err(Error::with_msg_no_trace(format!("bad, content not equal")));
|
||||
// }
|
||||
// } else {
|
||||
// return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
|
||||
// }
|
||||
// }
|
||||
// RangeCompletableItem::RangeComplete => {}
|
||||
// },
|
||||
// StreamItem::Log(_) => {}
|
||||
// StreamItem::Stats(_) => {}
|
||||
// },
|
||||
// Err(e) => Err(e).unwrap(),
|
||||
// }
|
||||
// }
|
||||
// Ok(())
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn time_bin_01() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let range = nano_range_from_str("1970-01-01T00:00:00Z", "1970-01-01T00:00:08Z")?;
|
||||
let range = SeriesRange::TimeRange(range);
|
||||
let min_bin_count = 8;
|
||||
let binned_range = BinnedRangeEnum::covering_range(range, min_bin_count)?;
|
||||
let v00 = ChannelEvents::Events(Box::new(EventsDim0::<f32>::empty()));
|
||||
let evs0 = make_some_boxed_d0_f32(10, SEC * 1, MS * 500, 0, 1846713782);
|
||||
let evs1 = make_some_boxed_d0_f32(10, SEC * 6, MS * 500, 0, 1846713781);
|
||||
let v01 = ChannelEvents::Events(evs0);
|
||||
let v02 = ChannelEvents::Events(evs1);
|
||||
let stream0 = stream::iter(vec![
|
||||
//
|
||||
sitem_data(v00),
|
||||
sitem_data(v01),
|
||||
sitem_data(v02),
|
||||
]);
|
||||
let stream0 = stream0.then({
|
||||
let mut i = 0;
|
||||
move |x| {
|
||||
let delay = if i == 1 { 2000 } else { 0 };
|
||||
i += 1;
|
||||
let dur = Duration::from_millis(delay);
|
||||
async move {
|
||||
tokio::time::sleep(dur).await;
|
||||
x
|
||||
}
|
||||
}
|
||||
});
|
||||
let stream0 = Box::pin(stream0);
|
||||
let mut binned_stream = TimeBinnedStream::new(stream0, binned_range, true);
|
||||
while let Some(item) = binned_stream.next().await {
|
||||
if true {
|
||||
eprintln!("{item:?}");
|
||||
}
|
||||
match item {
|
||||
Ok(item) => match item {
|
||||
StreamItem::DataItem(item) => match item {
|
||||
RangeCompletableItem::Data(item) => {
|
||||
if let Some(_) = item.as_any_ref().downcast_ref::<BinsDim0<f32>>() {
|
||||
} else {
|
||||
return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
|
||||
}
|
||||
}
|
||||
RangeCompletableItem::RangeComplete => {}
|
||||
},
|
||||
StreamItem::Log(_) => {}
|
||||
StreamItem::Stats(_) => {}
|
||||
},
|
||||
Err(e) => Err(e).unwrap(),
|
||||
}
|
||||
}
|
||||
// TODO assert that we get the bins which are sure to be ready.
|
||||
// TODO assert correct numbers.
|
||||
// TODO assert that we don't get bins which may be still changing.
|
||||
// TODO add similar test case with a RangeComplete event at different places before the timeout.
|
||||
Ok(())
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn time_bin_01() -> Result<(), Error> {
|
||||
// let fut = async {
|
||||
// let range = nano_range_from_str("1970-01-01T00:00:00Z", "1970-01-01T00:00:08Z")?;
|
||||
// let range = SeriesRange::TimeRange(range);
|
||||
// let min_bin_count = 8;
|
||||
// let binned_range = BinnedRangeEnum::covering_range(range, min_bin_count)?;
|
||||
// let v00 = ChannelEvents::Events(Box::new(EventsDim0::<f32>::empty()));
|
||||
// let evs0 = make_some_boxed_d0_f32(10, SEC * 1, MS * 500, 0, 1846713782);
|
||||
// let evs1 = make_some_boxed_d0_f32(10, SEC * 6, MS * 500, 0, 1846713781);
|
||||
// let v01 = ChannelEvents::Events(evs0);
|
||||
// let v02 = ChannelEvents::Events(evs1);
|
||||
// let stream0 = stream::iter(vec![
|
||||
// //
|
||||
// sitem_data(v00),
|
||||
// sitem_data(v01),
|
||||
// sitem_data(v02),
|
||||
// ]);
|
||||
// let stream0 = stream0.then({
|
||||
// let mut i = 0;
|
||||
// move |x| {
|
||||
// let delay = if i == 1 { 2000 } else { 0 };
|
||||
// i += 1;
|
||||
// let dur = Duration::from_millis(delay);
|
||||
// async move {
|
||||
// tokio::time::sleep(dur).await;
|
||||
// x
|
||||
// }
|
||||
// }
|
||||
// });
|
||||
// let stream0 = Box::pin(stream0);
|
||||
// let mut binned_stream = TimeBinnedStream::new(stream0, binned_range, true);
|
||||
// while let Some(item) = binned_stream.next().await {
|
||||
// if true {
|
||||
// eprintln!("{item:?}");
|
||||
// }
|
||||
// match item {
|
||||
// Ok(item) => match item {
|
||||
// StreamItem::DataItem(item) => match item {
|
||||
// RangeCompletableItem::Data(item) => {
|
||||
// if let Some(_) = item.as_any_ref().downcast_ref::<BinsDim0<f32>>() {
|
||||
// } else {
|
||||
// return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
|
||||
// }
|
||||
// }
|
||||
// RangeCompletableItem::RangeComplete => {}
|
||||
// },
|
||||
// StreamItem::Log(_) => {}
|
||||
// StreamItem::Stats(_) => {}
|
||||
// },
|
||||
// Err(e) => Err(e).unwrap(),
|
||||
// }
|
||||
// }
|
||||
// // TODO assert that we get the bins which are sure to be ready.
|
||||
// // TODO assert correct numbers.
|
||||
// // TODO assert that we don't get bins which may be still changing.
|
||||
// // TODO add similar test case with a RangeComplete event at different places before the timeout.
|
||||
// Ok(())
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn time_bin_02() -> Result<(), Error> {
|
||||
let fut = async {
|
||||
let do_time_weight = true;
|
||||
let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
let range = nano_range_from_str("1970-01-01T00:20:04Z", "1970-01-01T00:22:10Z")?;
|
||||
let range = SeriesRange::TimeRange(range);
|
||||
// TODO add test: 26 bins should result in next higher resolution.
|
||||
let min_bin_count = 25;
|
||||
let expected_bin_count = 26;
|
||||
let binned_range = BinnedRangeEnum::covering_range(range.clone(), min_bin_count)?;
|
||||
eprintln!("binned_range: {:?}", binned_range);
|
||||
for i in 0.. {
|
||||
if let Some(r) = binned_range.range_at(i) {
|
||||
eprintln!("Series Range to cover: {r:?}");
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let event_range = binned_range.binned_range_time().full_range();
|
||||
let series_range = SeriesRange::TimeRange(event_range);
|
||||
// TODO the test stream must be able to generate also one-before (on demand) and RangeComplete (by default).
|
||||
let stream = GenerateI32V00::new(0, 1, series_range, true);
|
||||
// TODO apply first some box dyn EventTransform which later is provided by TransformQuery.
|
||||
// Then the Merge will happen always by default for backends where this is needed.
|
||||
// TODO then apply the transform chain for the after-merged-stream.
|
||||
let stream = stream.map(|x| {
|
||||
let x = on_sitemty_data!(x, |x| Ok(StreamItem::DataItem(RangeCompletableItem::Data(
|
||||
Box::new(x) as Box<dyn TimeBinnable>
|
||||
))));
|
||||
x
|
||||
});
|
||||
let stream = Box::pin(stream);
|
||||
let mut binned_stream = TimeBinnedStream::new(stream, binned_range.clone(), do_time_weight);
|
||||
// From there on it should no longer be neccessary to distinguish whether its still events or time bins.
|
||||
// Then, optionally collect for output type like json, or stream as batches.
|
||||
// TODO the timebinner should already provide batches to make this efficient.
|
||||
if false {
|
||||
while let Some(e) = binned_stream.next().await {
|
||||
eprintln!("see item {e:?}");
|
||||
let _x = on_sitemty_data!(e, |e| {
|
||||
//
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::Data(e)))
|
||||
});
|
||||
}
|
||||
} else {
|
||||
let res = collect(binned_stream, deadline, 200, None, Some(binned_range)).await?;
|
||||
assert_eq!(res.len(), expected_bin_count);
|
||||
// use crate::json_stream::JsonBytes;
|
||||
let v = res.to_json_value()?;
|
||||
let d = serde_json::to_vec(&v)?;
|
||||
let s = String::from_utf8_lossy(&d);
|
||||
eprintln!("{s}");
|
||||
let jsval: JsValue = serde_json::from_slice(&d)?;
|
||||
{
|
||||
let ts_anchor = jsval.get("tsAnchor").unwrap().as_u64().unwrap();
|
||||
assert_eq!(ts_anchor, 1200);
|
||||
}
|
||||
{
|
||||
let counts = jsval.get("counts").unwrap().as_array().unwrap();
|
||||
assert_eq!(counts.len(), expected_bin_count);
|
||||
for v in counts {
|
||||
assert_eq!(v.as_u64().unwrap(), 5);
|
||||
}
|
||||
}
|
||||
{
|
||||
let ts1ms = jsval.get("ts1Ms").unwrap().as_array().unwrap();
|
||||
let mins = jsval.get("mins").unwrap().as_array().unwrap();
|
||||
assert_eq!(mins.len(), expected_bin_count);
|
||||
for (ts1ms, min) in ts1ms.iter().zip(mins) {
|
||||
assert_eq!((ts1ms.as_u64().unwrap() / 100) % 1000, min.as_u64().unwrap());
|
||||
}
|
||||
}
|
||||
{
|
||||
let ts1ms = jsval.get("ts1Ms").unwrap().as_array().unwrap();
|
||||
let maxs = jsval.get("maxs").unwrap().as_array().unwrap();
|
||||
assert_eq!(maxs.len(), expected_bin_count);
|
||||
for (ts1ms, max) in ts1ms.iter().zip(maxs) {
|
||||
assert_eq!((40 + ts1ms.as_u64().unwrap() / 100) % 1000, max.as_u64().unwrap());
|
||||
}
|
||||
}
|
||||
{
|
||||
let range_final = jsval.get("rangeFinal").unwrap().as_bool().unwrap();
|
||||
assert_eq!(range_final, true);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn time_bin_02() -> Result<(), Error> {
|
||||
// let fut = async {
|
||||
// let do_time_weight = true;
|
||||
// let deadline = Instant::now() + Duration::from_millis(4000);
|
||||
// let range = nano_range_from_str("1970-01-01T00:20:04Z", "1970-01-01T00:22:10Z")?;
|
||||
// let range = SeriesRange::TimeRange(range);
|
||||
// // TODO add test: 26 bins should result in next higher resolution.
|
||||
// let min_bin_count = 25;
|
||||
// let expected_bin_count = 26;
|
||||
// let binned_range = BinnedRangeEnum::covering_range(range.clone(), min_bin_count)?;
|
||||
// eprintln!("binned_range: {:?}", binned_range);
|
||||
// for i in 0.. {
|
||||
// if let Some(r) = binned_range.range_at(i) {
|
||||
// eprintln!("Series Range to cover: {r:?}");
|
||||
// } else {
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
// let event_range = binned_range.binned_range_time().full_range();
|
||||
// let series_range = SeriesRange::TimeRange(event_range);
|
||||
// // TODO the test stream must be able to generate also one-before (on demand) and RangeComplete (by default).
|
||||
// let stream = GenerateI32V00::new(0, 1, series_range, true);
|
||||
// // TODO apply first some box dyn EventTransform which later is provided by TransformQuery.
|
||||
// // Then the Merge will happen always by default for backends where this is needed.
|
||||
// // TODO then apply the transform chain for the after-merged-stream.
|
||||
// let stream = stream.map(|x| {
|
||||
// let x = on_sitemty_data!(x, |x| Ok(StreamItem::DataItem(RangeCompletableItem::Data(
|
||||
// Box::new(x) as Box<dyn TimeBinnable>
|
||||
// ))));
|
||||
// x
|
||||
// });
|
||||
// let stream = Box::pin(stream);
|
||||
// let mut binned_stream = TimeBinnedStream::new(stream, binned_range.clone(), do_time_weight);
|
||||
// // From there on it should no longer be neccessary to distinguish whether its still events or time bins.
|
||||
// // Then, optionally collect for output type like json, or stream as batches.
|
||||
// // TODO the timebinner should already provide batches to make this efficient.
|
||||
// if false {
|
||||
// while let Some(e) = binned_stream.next().await {
|
||||
// eprintln!("see item {e:?}");
|
||||
// let _x = on_sitemty_data!(e, |e| {
|
||||
// //
|
||||
// Ok(StreamItem::DataItem(RangeCompletableItem::Data(e)))
|
||||
// });
|
||||
// }
|
||||
// } else {
|
||||
// let res = collect(binned_stream, deadline, 200, None, Some(binned_range)).await?;
|
||||
// assert_eq!(res.len(), expected_bin_count);
|
||||
// // use crate::json_stream::JsonBytes;
|
||||
// let v = res.to_json_value()?;
|
||||
// let d = serde_json::to_vec(&v)?;
|
||||
// let s = String::from_utf8_lossy(&d);
|
||||
// eprintln!("{s}");
|
||||
// let jsval: JsValue = serde_json::from_slice(&d)?;
|
||||
// {
|
||||
// let ts_anchor = jsval.get("tsAnchor").unwrap().as_u64().unwrap();
|
||||
// assert_eq!(ts_anchor, 1200);
|
||||
// }
|
||||
// {
|
||||
// let counts = jsval.get("counts").unwrap().as_array().unwrap();
|
||||
// assert_eq!(counts.len(), expected_bin_count);
|
||||
// for v in counts {
|
||||
// assert_eq!(v.as_u64().unwrap(), 5);
|
||||
// }
|
||||
// }
|
||||
// {
|
||||
// let ts1ms = jsval.get("ts1Ms").unwrap().as_array().unwrap();
|
||||
// let mins = jsval.get("mins").unwrap().as_array().unwrap();
|
||||
// assert_eq!(mins.len(), expected_bin_count);
|
||||
// for (ts1ms, min) in ts1ms.iter().zip(mins) {
|
||||
// assert_eq!((ts1ms.as_u64().unwrap() / 100) % 1000, min.as_u64().unwrap());
|
||||
// }
|
||||
// }
|
||||
// {
|
||||
// let ts1ms = jsval.get("ts1Ms").unwrap().as_array().unwrap();
|
||||
// let maxs = jsval.get("maxs").unwrap().as_array().unwrap();
|
||||
// assert_eq!(maxs.len(), expected_bin_count);
|
||||
// for (ts1ms, max) in ts1ms.iter().zip(maxs) {
|
||||
// assert_eq!((40 + ts1ms.as_u64().unwrap() / 100) % 1000, max.as_u64().unwrap());
|
||||
// }
|
||||
// }
|
||||
// {
|
||||
// let range_final = jsval.get("rangeFinal").unwrap().as_bool().unwrap();
|
||||
// assert_eq!(range_final, true);
|
||||
// }
|
||||
// }
|
||||
// Ok(())
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
// Should fail because of missing empty item.
|
||||
// But should have some option to suppress the error log for this test case.
|
||||
#[test]
|
||||
fn time_bin_03() -> Result<(), Error> {
|
||||
// TODO re-enable with error log suppressed.
|
||||
if true {
|
||||
return Ok(());
|
||||
}
|
||||
let fut = async {
|
||||
let range = nano_range_from_str("1970-01-01T00:00:00Z", "1970-01-01T00:00:08Z")?;
|
||||
let range = SeriesRange::TimeRange(range);
|
||||
let min_bin_count = 8;
|
||||
let binned_range = BinnedRangeEnum::covering_range(range, min_bin_count)?;
|
||||
let evs0 = make_some_boxed_d0_f32(10, SEC * 1, MS * 500, 0, 1846713782);
|
||||
//let v00 = ChannelEvents::Events(Box::new(EventsDim0::<f32>::empty()));
|
||||
let v01 = ChannelEvents::Events(evs0);
|
||||
let v02 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 100, ConnStatus::Connect)));
|
||||
let v03 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 6000, ConnStatus::Disconnect)));
|
||||
let stream0 = Box::pin(stream::iter(vec![
|
||||
//
|
||||
//sitem_data(v00),
|
||||
sitem_data(v02),
|
||||
sitem_data(v01),
|
||||
sitem_data(v03),
|
||||
]));
|
||||
let mut binned_stream = TimeBinnedStream::new(stream0, binned_range, true);
|
||||
while let Some(item) = binned_stream.next().await {
|
||||
eprintln!("{item:?}");
|
||||
match item {
|
||||
Err(e) => {
|
||||
if e.to_string().contains("must emit but can not even create empty A") {
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(Error::with_msg_no_trace("should not succeed"));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::with_msg_no_trace("should not succeed"));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Err(Error::with_msg_no_trace("should not succeed"));
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn time_bin_03() -> Result<(), Error> {
|
||||
// // TODO re-enable with error log suppressed.
|
||||
// if true {
|
||||
// return Ok(());
|
||||
// }
|
||||
// let fut = async {
|
||||
// let range = nano_range_from_str("1970-01-01T00:00:00Z", "1970-01-01T00:00:08Z")?;
|
||||
// let range = SeriesRange::TimeRange(range);
|
||||
// let min_bin_count = 8;
|
||||
// let binned_range = BinnedRangeEnum::covering_range(range, min_bin_count)?;
|
||||
// let evs0 = make_some_boxed_d0_f32(10, SEC * 1, MS * 500, 0, 1846713782);
|
||||
// //let v00 = ChannelEvents::Events(Box::new(EventsDim0::<f32>::empty()));
|
||||
// let v01 = ChannelEvents::Events(evs0);
|
||||
// let v02 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 100, ConnStatus::Connect)));
|
||||
// let v03 = ChannelEvents::Status(Some(ConnStatusEvent::new(MS * 6000, ConnStatus::Disconnect)));
|
||||
// let stream0 = Box::pin(stream::iter(vec![
|
||||
// //
|
||||
// //sitem_data(v00),
|
||||
// sitem_data(v02),
|
||||
// sitem_data(v01),
|
||||
// sitem_data(v03),
|
||||
// ]));
|
||||
// let mut binned_stream = TimeBinnedStream::new(stream0, binned_range, true);
|
||||
// while let Some(item) = binned_stream.next().await {
|
||||
// eprintln!("{item:?}");
|
||||
// match item {
|
||||
// Err(e) => {
|
||||
// if e.to_string().contains("must emit but can not even create empty A") {
|
||||
// return Ok(());
|
||||
// } else {
|
||||
// return Err(Error::with_msg_no_trace("should not succeed"));
|
||||
// }
|
||||
// }
|
||||
// _ => {
|
||||
// return Err(Error::with_msg_no_trace("should not succeed"));
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// return Err(Error::with_msg_no_trace("should not succeed"));
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
// TODO add test case to observe RangeComplete after binning.
|
||||
|
||||
#[test]
|
||||
fn transform_chain_correctness_00() -> Result<(), Error> {
|
||||
// TODO
|
||||
//type STY = f32;
|
||||
//let empty = EventsDim0::<STY>::empty();
|
||||
let tq = TransformQuery::default_time_binned();
|
||||
build_event_transform(&tq)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn timebin_multi_stage_00() -> Result<(), Error> {
|
||||
// TODO chain two timebin stages with different binning grid.
|
||||
let fut = async {
|
||||
let do_time_weight = true;
|
||||
let one_before_range = do_time_weight;
|
||||
let range = nano_range_from_str("1970-01-01T00:00:10Z", "1970-01-01T00:01:03Z")?;
|
||||
let range = SeriesRange::TimeRange(range);
|
||||
let binned_range_0 = BinnedRangeEnum::covering_range(range.clone(), 22)?;
|
||||
dbg!(&binned_range_0);
|
||||
let range: SeriesRange = binned_range_0.binned_range_time().to_nano_range().into();
|
||||
let binned_range_1 = BinnedRangeEnum::covering_range(range.clone(), 48)?;
|
||||
dbg!(&binned_range_1);
|
||||
let stream_evs = GenerateI32V01::new(0, 1, range.clone(), one_before_range);
|
||||
let exp1 = {
|
||||
let mut bins = BinsDim0::<i32>::empty();
|
||||
for i in 0..54 {
|
||||
bins.push(
|
||||
SEC * (10 + i),
|
||||
SEC * (11 + i),
|
||||
2,
|
||||
20 + 2 * i as i32,
|
||||
21 + 2 * i as i32,
|
||||
20.5 + 2. * i as f32,
|
||||
21 + 2 * i as i32,
|
||||
);
|
||||
}
|
||||
bins
|
||||
};
|
||||
let exp2 = {
|
||||
let mut bins = BinsDim0::<i32>::empty();
|
||||
for i in 0..27 {
|
||||
bins.push(
|
||||
SEC * (10 + 2 * i),
|
||||
SEC * (12 + 2 * i),
|
||||
4,
|
||||
20 + 4 * i as i32,
|
||||
23 + 4 * i as i32,
|
||||
21.5 + 4. * i as f32,
|
||||
23 + 4 * i as i32,
|
||||
);
|
||||
}
|
||||
bins
|
||||
};
|
||||
// NOTE:
|
||||
// can store all bins in cache for which there is some non-empty bin following, or if the container has range-final.
|
||||
let (q1tx, q1rx) = async_channel::bounded(128);
|
||||
let (q2tx, q2rx) = async_channel::bounded(128);
|
||||
let stream_evs = Box::pin(stream_evs);
|
||||
let binned_stream = {
|
||||
TimeBinnedStream::new(stream_evs, binned_range_1, do_time_weight).map(|x| {
|
||||
//eprintln!("STAGE 1 -- {:?}", x);
|
||||
x
|
||||
})
|
||||
};
|
||||
let binned_stream = Itemclone::new(binned_stream, q1tx).map(|x| match x {
|
||||
Ok(x) => x,
|
||||
Err(e) => Err(e),
|
||||
});
|
||||
let binned_stream = {
|
||||
TimeBinnedStream::new(Box::pin(binned_stream), binned_range_0, do_time_weight).map(|x| {
|
||||
eprintln!("STAGE -- 2 {:?}", x);
|
||||
x
|
||||
})
|
||||
};
|
||||
let binned_stream = Itemclone::new(binned_stream, q2tx).map(|x| match x {
|
||||
Ok(x) => x,
|
||||
Err(e) => Err(e),
|
||||
});
|
||||
let mut have_range_final = false;
|
||||
let mut binned_stream = binned_stream;
|
||||
while let Some(item) = binned_stream.next().await {
|
||||
//eprintln!("{item:?}");
|
||||
match item {
|
||||
Ok(item) => match item {
|
||||
StreamItem::DataItem(item) => match item {
|
||||
RangeCompletableItem::Data(item) => {
|
||||
if let Some(item) = item.as_any_ref().downcast_ref::<BinsDim0<i32>>() {
|
||||
if false {
|
||||
eprintln!("-----------------------");
|
||||
eprintln!("item {:?}", item);
|
||||
eprintln!("-----------------------");
|
||||
}
|
||||
} else {
|
||||
return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
|
||||
}
|
||||
}
|
||||
RangeCompletableItem::RangeComplete => {
|
||||
have_range_final = true;
|
||||
}
|
||||
},
|
||||
StreamItem::Log(_) => {}
|
||||
StreamItem::Stats(_) => {}
|
||||
},
|
||||
Err(e) => Err(e).unwrap(),
|
||||
}
|
||||
}
|
||||
assert!(have_range_final);
|
||||
{
|
||||
eprintln!("---------------------------------------------------------------------");
|
||||
let mut coll = BinsDim0::empty();
|
||||
let stream = q1rx;
|
||||
while let Ok(item) = stream.recv().await {
|
||||
//eprintln!("RECV [q1rx] {:?}", item);
|
||||
// TODO use the transformed item
|
||||
let _item = on_sitemty_data!(item, |mut item: Box<dyn TimeBinned>| {
|
||||
if let Some(k) = item.as_any_mut().downcast_mut::<BinsDim0<i32>>() {
|
||||
coll.append_all_from(k);
|
||||
}
|
||||
sitem_data(item)
|
||||
});
|
||||
}
|
||||
eprintln!("collected 1: {:?}", coll);
|
||||
assert_eq!(coll, exp1);
|
||||
}
|
||||
{
|
||||
eprintln!("---------------------------------------------------------------------");
|
||||
let mut coll = BinsDim0::empty();
|
||||
let stream = q2rx;
|
||||
while let Ok(item) = stream.recv().await {
|
||||
//eprintln!("RECV [q2rx] {:?}", item);
|
||||
// TODO use the transformed item
|
||||
let _item = on_sitemty_data!(item, |mut item: Box<dyn TimeBinned>| {
|
||||
if let Some(k) = item.as_any_mut().downcast_mut::<BinsDim0<i32>>() {
|
||||
coll.append_all_from(k);
|
||||
}
|
||||
sitem_data(item)
|
||||
});
|
||||
}
|
||||
eprintln!("collected 1: {:?}", coll);
|
||||
assert_eq!(coll, exp2);
|
||||
}
|
||||
Ok(())
|
||||
};
|
||||
runfut(fut)
|
||||
}
|
||||
// #[test]
|
||||
// fn timebin_multi_stage_00() -> Result<(), Error> {
|
||||
// // TODO chain two timebin stages with different binning grid.
|
||||
// let fut = async {
|
||||
// let do_time_weight = true;
|
||||
// let one_before_range = do_time_weight;
|
||||
// let range = nano_range_from_str("1970-01-01T00:00:10Z", "1970-01-01T00:01:03Z")?;
|
||||
// let range = SeriesRange::TimeRange(range);
|
||||
// let binned_range_0 = BinnedRangeEnum::covering_range(range.clone(), 22)?;
|
||||
// dbg!(&binned_range_0);
|
||||
// let range: SeriesRange = binned_range_0.binned_range_time().to_nano_range().into();
|
||||
// let binned_range_1 = BinnedRangeEnum::covering_range(range.clone(), 48)?;
|
||||
// dbg!(&binned_range_1);
|
||||
// let stream_evs = GenerateI32V01::new(0, 1, range.clone(), one_before_range);
|
||||
// let exp1 = {
|
||||
// let mut bins = BinsDim0::<i32>::empty();
|
||||
// for i in 0..54 {
|
||||
// bins.push(
|
||||
// SEC * (10 + i),
|
||||
// SEC * (11 + i),
|
||||
// 2,
|
||||
// 20 + 2 * i as i32,
|
||||
// 21 + 2 * i as i32,
|
||||
// 20.5 + 2. * i as f32,
|
||||
// 21 + 2 * i as i32,
|
||||
// );
|
||||
// }
|
||||
// bins
|
||||
// };
|
||||
// let exp2 = {
|
||||
// let mut bins = BinsDim0::<i32>::empty();
|
||||
// for i in 0..27 {
|
||||
// bins.push(
|
||||
// SEC * (10 + 2 * i),
|
||||
// SEC * (12 + 2 * i),
|
||||
// 4,
|
||||
// 20 + 4 * i as i32,
|
||||
// 23 + 4 * i as i32,
|
||||
// 21.5 + 4. * i as f32,
|
||||
// 23 + 4 * i as i32,
|
||||
// );
|
||||
// }
|
||||
// bins
|
||||
// };
|
||||
// // NOTE:
|
||||
// // can store all bins in cache for which there is some non-empty bin following, or if the container has range-final.
|
||||
// let (q1tx, q1rx) = async_channel::bounded(128);
|
||||
// let (q2tx, q2rx) = async_channel::bounded(128);
|
||||
// let stream_evs = Box::pin(stream_evs);
|
||||
// let binned_stream = {
|
||||
// TimeBinnedStream::new(stream_evs, binned_range_1, do_time_weight).map(|x| {
|
||||
// //eprintln!("STAGE 1 -- {:?}", x);
|
||||
// x
|
||||
// })
|
||||
// };
|
||||
// let binned_stream = Itemclone::new(binned_stream, q1tx).map(|x| match x {
|
||||
// Ok(x) => x,
|
||||
// Err(e) => Err(e),
|
||||
// });
|
||||
// let binned_stream = {
|
||||
// TimeBinnedStream::new(Box::pin(binned_stream), binned_range_0, do_time_weight).map(|x| {
|
||||
// eprintln!("STAGE -- 2 {:?}", x);
|
||||
// x
|
||||
// })
|
||||
// };
|
||||
// let binned_stream = Itemclone::new(binned_stream, q2tx).map(|x| match x {
|
||||
// Ok(x) => x,
|
||||
// Err(e) => Err(e),
|
||||
// });
|
||||
// let mut have_range_final = false;
|
||||
// let mut binned_stream = binned_stream;
|
||||
// while let Some(item) = binned_stream.next().await {
|
||||
// //eprintln!("{item:?}");
|
||||
// match item {
|
||||
// Ok(item) => match item {
|
||||
// StreamItem::DataItem(item) => match item {
|
||||
// RangeCompletableItem::Data(item) => {
|
||||
// if let Some(item) = item.as_any_ref().downcast_ref::<BinsDim0<i32>>() {
|
||||
// if false {
|
||||
// eprintln!("-----------------------");
|
||||
// eprintln!("item {:?}", item);
|
||||
// eprintln!("-----------------------");
|
||||
// }
|
||||
// } else {
|
||||
// return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
|
||||
// }
|
||||
// }
|
||||
// RangeCompletableItem::RangeComplete => {
|
||||
// have_range_final = true;
|
||||
// }
|
||||
// },
|
||||
// StreamItem::Log(_) => {}
|
||||
// StreamItem::Stats(_) => {}
|
||||
// },
|
||||
// Err(e) => Err(e).unwrap(),
|
||||
// }
|
||||
// }
|
||||
// assert!(have_range_final);
|
||||
// {
|
||||
// eprintln!("---------------------------------------------------------------------");
|
||||
// let mut coll = BinsDim0::empty();
|
||||
// let stream = q1rx;
|
||||
// while let Ok(item) = stream.recv().await {
|
||||
// //eprintln!("RECV [q1rx] {:?}", item);
|
||||
// // TODO use the transformed item
|
||||
// let _item = on_sitemty_data!(item, |mut item: Box<dyn TimeBinned>| {
|
||||
// if let Some(k) = item.as_any_mut().downcast_mut::<BinsDim0<i32>>() {
|
||||
// coll.append_all_from(k);
|
||||
// }
|
||||
// sitem_data(item)
|
||||
// });
|
||||
// }
|
||||
// eprintln!("collected 1: {:?}", coll);
|
||||
// assert_eq!(coll, exp1);
|
||||
// }
|
||||
// {
|
||||
// eprintln!("---------------------------------------------------------------------");
|
||||
// let mut coll = BinsDim0::empty();
|
||||
// let stream = q2rx;
|
||||
// while let Ok(item) = stream.recv().await {
|
||||
// //eprintln!("RECV [q2rx] {:?}", item);
|
||||
// // TODO use the transformed item
|
||||
// let _item = on_sitemty_data!(item, |mut item: Box<dyn TimeBinned>| {
|
||||
// if let Some(k) = item.as_any_mut().downcast_mut::<BinsDim0<i32>>() {
|
||||
// coll.append_all_from(k);
|
||||
// }
|
||||
// sitem_data(item)
|
||||
// });
|
||||
// }
|
||||
// eprintln!("collected 1: {:?}", coll);
|
||||
// assert_eq!(coll, exp2);
|
||||
// }
|
||||
// Ok(())
|
||||
// };
|
||||
// runfut(fut)
|
||||
// }
|
||||
|
||||
@@ -5,13 +5,9 @@ use items_0::collect_s::CollectableDyn;
|
||||
use items_0::streamitem::RangeCompletableItem;
|
||||
use items_0::streamitem::Sitemty;
|
||||
use items_0::streamitem::StreamItem;
|
||||
use items_0::timebin::TimeBinnable;
|
||||
use items_0::transform::CollectableStreamBox;
|
||||
use items_0::transform::CollectableStreamTrait;
|
||||
use items_0::transform::EventStreamBox;
|
||||
use items_0::transform::EventStreamTrait;
|
||||
use items_0::transform::TimeBinnableStreamBox;
|
||||
use items_0::transform::TimeBinnableStreamTrait;
|
||||
use items_0::transform::TransformEvent;
|
||||
use items_0::transform::TransformProperties;
|
||||
use items_0::transform::WithTransformProperties;
|
||||
@@ -22,8 +18,6 @@ use query::transform::EventTransformQuery;
|
||||
use query::transform::TimeBinningTransformQuery;
|
||||
use query::transform::TransformQuery;
|
||||
use std::pin::Pin;
|
||||
use std::task::Context;
|
||||
use std::task::Poll;
|
||||
|
||||
pub fn build_event_transform(tr: &TransformQuery) -> Result<TransformEvent, Error> {
|
||||
let trev = tr.get_tr_event();
|
||||
@@ -65,98 +59,12 @@ impl EventsToTimeBinnable {
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for EventsToTimeBinnable {
|
||||
type Item = Sitemty<Box<dyn TimeBinnable>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
|
||||
use Poll::*;
|
||||
match self.inp.poll_next_unpin(cx) {
|
||||
Ready(Some(item)) => Ready(Some(match item {
|
||||
Ok(item) => Ok(match item {
|
||||
StreamItem::DataItem(item) => StreamItem::DataItem(match item {
|
||||
RangeCompletableItem::RangeComplete => RangeCompletableItem::RangeComplete,
|
||||
RangeCompletableItem::Data(item) => RangeCompletableItem::Data(Box::new(item)),
|
||||
}),
|
||||
StreamItem::Log(item) => StreamItem::Log(item),
|
||||
StreamItem::Stats(item) => StreamItem::Stats(item),
|
||||
}),
|
||||
Err(e) => Err(e),
|
||||
})),
|
||||
Ready(None) => Ready(None),
|
||||
Pending => Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WithTransformProperties for EventsToTimeBinnable {
|
||||
fn query_transform_properties(&self) -> TransformProperties {
|
||||
self.inp.query_transform_properties()
|
||||
}
|
||||
}
|
||||
|
||||
impl TimeBinnableStreamTrait for EventsToTimeBinnable {}
|
||||
|
||||
pub struct TimeBinnableToCollectable {
|
||||
inp: Pin<Box<dyn TimeBinnableStreamTrait>>,
|
||||
}
|
||||
|
||||
impl TimeBinnableToCollectable {
|
||||
pub fn new<INP>(inp: INP) -> Self
|
||||
where
|
||||
INP: TimeBinnableStreamTrait + 'static,
|
||||
{
|
||||
Self { inp: Box::pin(inp) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for TimeBinnableToCollectable {
|
||||
type Item = Sitemty<Box<dyn CollectableDyn>>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
|
||||
use Poll::*;
|
||||
match self.inp.poll_next_unpin(cx) {
|
||||
Ready(Some(item)) => Ready(Some(match item {
|
||||
Ok(item) => Ok(match item {
|
||||
StreamItem::DataItem(item) => StreamItem::DataItem(match item {
|
||||
RangeCompletableItem::RangeComplete => RangeCompletableItem::RangeComplete,
|
||||
RangeCompletableItem::Data(item) => RangeCompletableItem::Data(Box::new(item)),
|
||||
}),
|
||||
StreamItem::Log(item) => StreamItem::Log(item),
|
||||
StreamItem::Stats(item) => StreamItem::Stats(item),
|
||||
}),
|
||||
Err(e) => Err(e),
|
||||
})),
|
||||
Ready(None) => Ready(None),
|
||||
Pending => Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WithTransformProperties for TimeBinnableToCollectable {
|
||||
fn query_transform_properties(&self) -> TransformProperties {
|
||||
self.inp.query_transform_properties()
|
||||
}
|
||||
}
|
||||
|
||||
impl CollectableStreamTrait for TimeBinnableToCollectable {}
|
||||
|
||||
//impl CollectableStreamTrait for Pin<Box<TimeBinnableToCollectable>> {}
|
||||
|
||||
pub fn build_time_binning_transform(
|
||||
tr: &TransformQuery,
|
||||
inp: Pin<Box<dyn TimeBinnableStreamTrait>>,
|
||||
) -> Result<TimeBinnableStreamBox, Error> {
|
||||
let trev = tr.get_tr_time_binning();
|
||||
let res = match trev {
|
||||
TimeBinningTransformQuery::None => TimeBinnableStreamBox(inp),
|
||||
_ => {
|
||||
// TODO apply the desired transformations.
|
||||
todo!()
|
||||
}
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub fn build_full_transform_collectable(
|
||||
tr: &TransformQuery,
|
||||
inp: EventStreamBox,
|
||||
|
||||
Reference in New Issue
Block a user