Move binned type, add tests
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
use err::Error;
|
||||
use futures_util::{Stream, StreamExt};
|
||||
use items::{RangeCompletableItem, Sitemty, StreamItem};
|
||||
use items_0::collect_c::{Collectable, Collector};
|
||||
use items_0::collect_c::Collectable;
|
||||
use netpod::log::*;
|
||||
use std::fmt;
|
||||
use std::time::{Duration, Instant};
|
||||
@@ -29,14 +29,14 @@ pub async fn collect<T, S>(
|
||||
stream: S,
|
||||
deadline: Instant,
|
||||
events_max: u64,
|
||||
) -> Result<<<T as Collectable>::Collector as Collector>::Output, Error>
|
||||
) -> Result<Box<dyn items_0::collect_c::Collected>, Error>
|
||||
where
|
||||
S: Stream<Item = Sitemty<T>> + Unpin,
|
||||
T: Collectable + fmt::Debug,
|
||||
{
|
||||
let span = tracing::span!(tracing::Level::TRACE, "collect");
|
||||
let fut = async {
|
||||
let mut collector: Option<<T as Collectable>::Collector> = None;
|
||||
let mut collector: Option<Box<dyn items_0::collect_c::Collector>> = None;
|
||||
let mut stream = stream;
|
||||
let deadline = deadline.into();
|
||||
let mut range_complete = false;
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::test::runfut;
|
||||
use err::Error;
|
||||
use futures_util::{stream, StreamExt};
|
||||
use items::{sitem_data, RangeCompletableItem, StreamItem};
|
||||
use items_0::Empty;
|
||||
use items_2::binsdim0::BinsDim0;
|
||||
use items_2::channelevents::{ChannelEvents, ConnStatus, ConnStatusEvent};
|
||||
use items_2::testgen::make_some_boxed_d0_f32;
|
||||
|
||||
@@ -3,24 +3,22 @@ use err::Error;
|
||||
use futures_util::StreamExt;
|
||||
#[allow(unused)]
|
||||
use netpod::log::*;
|
||||
use netpod::Cluster;
|
||||
use serde::Serialize;
|
||||
use netpod::query::{BinnedQuery, RawEventsQuery};
|
||||
use netpod::{BinnedRange, Cluster};
|
||||
use serde_json::Value as JsonValue;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
pub async fn timebinned_json<SER>(query: SER, cluster: &Cluster) -> Result<JsonValue, Error>
|
||||
where
|
||||
SER: Serialize,
|
||||
{
|
||||
// TODO should be able to ask for data-events only, instead of mixed data and status events.
|
||||
let inps = open_tcp_streams::<_, items_2::channelevents::ChannelEvents>(&query, cluster).await?;
|
||||
pub async fn timebinned_json(query: &BinnedQuery, cluster: &Cluster) -> Result<JsonValue, Error> {
|
||||
let binned_range = BinnedRange::covering_range(query.range().clone(), query.bin_count())?;
|
||||
let events_max = 10000;
|
||||
let do_time_weight = query.agg_kind().do_time_weighted();
|
||||
let deadline = Instant::now() + Duration::from_millis(7500);
|
||||
let rawquery = RawEventsQuery::new(query.channel().clone(), query.range().clone(), query.agg_kind().clone());
|
||||
let inps = open_tcp_streams::<_, items_2::channelevents::ChannelEvents>(&rawquery, cluster).await?;
|
||||
// TODO propagate also the max-buf-len for the first stage event reader:
|
||||
let stream = { items_2::merger::Merger::new(inps, 1) };
|
||||
let events_max = 10000;
|
||||
let do_time_weight = true;
|
||||
let deadline = Instant::now() + Duration::from_millis(7500);
|
||||
let stream = Box::pin(stream);
|
||||
let stream = crate::timebin::TimeBinnedStream::new(stream, Vec::new(), do_time_weight, deadline);
|
||||
let stream = crate::timebin::TimeBinnedStream::new(stream, binned_range.edges(), do_time_weight, deadline);
|
||||
if false {
|
||||
let mut stream = stream;
|
||||
let _: Option<items::Sitemty<Box<dyn items_0::TimeBinned>>> = stream.next().await;
|
||||
|
||||
Reference in New Issue
Block a user