Begin refactor frame handling, update clap
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
name = "bitshuffle"
|
||||
version = "0.0.1-a.0"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/bitshuffle.rs"
|
||||
|
||||
@@ -22,7 +22,7 @@ serde_derive = "1.0"
|
||||
serde_json = "1.0"
|
||||
chrono = "0.4"
|
||||
url = "2.2.2"
|
||||
clap = { version = "3.0.6", features = ["derive", "cargo"] }
|
||||
clap = { version = "4.0.22", features = ["derive", "cargo"] }
|
||||
lazy_static = "1.4.0"
|
||||
err = { path = "../err" }
|
||||
taskrun = { path = "../taskrun" }
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use clap::ArgAction;
|
||||
use clap::Parser;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[clap(name = "daqbuffer", author, version)]
|
||||
#[command(author, version)]
|
||||
pub struct Opts {
|
||||
#[clap(short, long, parse(from_occurrences))]
|
||||
pub verbose: i32,
|
||||
#[clap(subcommand)]
|
||||
#[arg(long, action(ArgAction::Count))]
|
||||
pub verbose: u8,
|
||||
#[command(subcommand)]
|
||||
pub subcmd: SubCmd,
|
||||
}
|
||||
|
||||
@@ -21,19 +22,19 @@ pub enum SubCmd {
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct Retrieval {
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub config: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct Proxy {
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub config: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct Client {
|
||||
#[clap(subcommand)]
|
||||
#[command(subcommand)]
|
||||
pub client_type: ClientType,
|
||||
}
|
||||
|
||||
@@ -45,36 +46,36 @@ pub enum ClientType {
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct StatusClient {
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub host: String,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub port: u16,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct BinnedClient {
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub host: String,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub port: u16,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub backend: String,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub channel: String,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub beg: String,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub end: String,
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub bins: u32,
|
||||
#[clap(long, default_value = "use")]
|
||||
#[arg(long, default_value = "use")]
|
||||
pub cache: String,
|
||||
#[clap(long, default_value = "1048576")]
|
||||
#[arg(long, default_value = "1048576")]
|
||||
pub disk_stats_every_kb: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct Logappend {
|
||||
#[clap(long)]
|
||||
#[arg(long)]
|
||||
pub dir: String,
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "daqbufp2"
|
||||
version = "0.0.1-a.dev.12"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.7.1", features = ["rt-multi-thread", "io-util", "net", "time", "sync", "fs"] }
|
||||
|
||||
@@ -476,7 +476,9 @@ read_next_scalar_values!(read_next_values_scalar_f64, f64, f64, "events_scalar_f
|
||||
|
||||
read_next_array_values!(read_next_values_array_u16, u16, i16, "events_wave_u16");
|
||||
|
||||
pub async fn make_scylla_stream(
|
||||
// TODO remove
|
||||
#[allow(unused)]
|
||||
async fn make_scylla_stream(
|
||||
_evq: &RawEventsQuery,
|
||||
_scyco: &ScyllaConfig,
|
||||
_dbconf: Database,
|
||||
|
||||
@@ -17,16 +17,13 @@ use futures_util::StreamExt;
|
||||
use items::numops::NumOps;
|
||||
use items::streams::{Collectable, Collector};
|
||||
use items::{
|
||||
Clearable, EventsNodeProcessor, FilterFittingInside, Framable, FrameType, PushableIndex, RangeCompletableItem,
|
||||
Sitemty, StreamItem, TimeBinnableType, WithLen,
|
||||
Clearable, EventsNodeProcessor, FilterFittingInside, Framable, FrameDecodable, FrameType, PushableIndex,
|
||||
RangeCompletableItem, Sitemty, StreamItem, TimeBinnableType, WithLen,
|
||||
};
|
||||
use netpod::log::*;
|
||||
use netpod::query::{BinnedQuery, RawEventsQuery};
|
||||
use netpod::{
|
||||
x_bin_count, BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchIterator, PreBinnedPatchRange, ScalarType,
|
||||
Shape,
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
use netpod::x_bin_count;
|
||||
use netpod::{BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchIterator, PreBinnedPatchRange, ScalarType, Shape};
|
||||
use std::fmt::Debug;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
@@ -72,7 +69,7 @@ impl ChannelExecFunction for BinnedBinaryChannelExec {
|
||||
+ Unpin,
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType + Framable + 'static,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>:
|
||||
FrameType + Framable + DeserializeOwned,
|
||||
FrameType + Framable + FrameDecodable,
|
||||
{
|
||||
let _ = event_value_shape;
|
||||
let range = BinnedRange::covering_range(self.query.range().clone(), self.query.bin_count())?;
|
||||
@@ -322,7 +319,7 @@ impl ChannelExecFunction for BinnedJsonChannelExec {
|
||||
+ Unpin,
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType + Framable + 'static,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>:
|
||||
FrameType + Framable + DeserializeOwned,
|
||||
FrameType + Framable + FrameDecodable,
|
||||
{
|
||||
let _ = event_value_shape;
|
||||
let range = BinnedRange::covering_range(self.query.range().clone(), self.query.bin_count())?;
|
||||
|
||||
@@ -7,14 +7,13 @@ use futures_core::Stream;
|
||||
use futures_util::{FutureExt, StreamExt};
|
||||
use http::{StatusCode, Uri};
|
||||
use items::frame::decode_frame;
|
||||
use items::{FrameType, FrameTypeStatic, RangeCompletableItem, Sitemty, StreamItem, TimeBinnableType};
|
||||
use items::{FrameDecodable, FrameType, FrameTypeStaticSYC, TimeBinnableType};
|
||||
use items::{RangeCompletableItem, Sitemty, StreamItem};
|
||||
use netpod::log::*;
|
||||
use netpod::query::CacheUsage;
|
||||
use netpod::{
|
||||
x_bin_count, AggKind, AppendToUrl, BinnedRange, ByteSize, Channel, NodeConfigCached, PerfOpts,
|
||||
PreBinnedPatchIterator, ScalarType, Shape,
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
use netpod::x_bin_count;
|
||||
use netpod::PreBinnedPatchIterator;
|
||||
use netpod::{AggKind, AppendToUrl, BinnedRange, ByteSize, Channel, NodeConfigCached, PerfOpts, ScalarType, Shape};
|
||||
use std::future::ready;
|
||||
use std::marker::PhantomData;
|
||||
use std::pin::Pin;
|
||||
@@ -34,7 +33,8 @@ pub struct FetchedPreBinned<TBT> {
|
||||
impl<TBT> FetchedPreBinned<TBT> {
|
||||
pub fn new(query: &PreBinnedQuery, host: String, port: u16) -> Result<Self, Error>
|
||||
where
|
||||
TBT: TimeBinnableType,
|
||||
TBT: FrameTypeStaticSYC + TimeBinnableType,
|
||||
Sitemty<TBT>: FrameDecodable,
|
||||
{
|
||||
// TODO should not assume http:
|
||||
let mut url = Url::parse(&format!("http://{host}:{port}/api/4/prebinned"))?;
|
||||
@@ -53,8 +53,8 @@ impl<TBT> FetchedPreBinned<TBT> {
|
||||
|
||||
impl<TBT> Stream for FetchedPreBinned<TBT>
|
||||
where
|
||||
TBT: FrameTypeStatic + TimeBinnableType,
|
||||
Sitemty<TBT>: FrameType + DeserializeOwned,
|
||||
TBT: FrameTypeStaticSYC + TimeBinnableType,
|
||||
Sitemty<TBT>: FrameDecodable,
|
||||
{
|
||||
type Item = Sitemty<TBT>;
|
||||
|
||||
@@ -156,7 +156,7 @@ where
|
||||
impl<TBT> BinnedFromPreBinned<TBT>
|
||||
where
|
||||
TBT: TimeBinnableType<Output = TBT> + Unpin + 'static,
|
||||
Sitemty<TBT>: FrameType + DeserializeOwned,
|
||||
Sitemty<TBT>: FrameType + FrameDecodable,
|
||||
{
|
||||
pub fn new(
|
||||
patch_it: PreBinnedPatchIterator,
|
||||
@@ -252,7 +252,8 @@ where
|
||||
|
||||
impl<TBT> Stream for BinnedFromPreBinned<TBT>
|
||||
where
|
||||
TBT: TimeBinnableType,
|
||||
TBT: TimeBinnableType<Output = TBT> + Unpin + 'static,
|
||||
Sitemty<TBT>: FrameType + FrameDecodable,
|
||||
{
|
||||
type Item = Sitemty<TBT>;
|
||||
|
||||
|
||||
@@ -11,15 +11,14 @@ use futures_core::Stream;
|
||||
use futures_util::{FutureExt, StreamExt};
|
||||
use items::numops::NumOps;
|
||||
use items::{
|
||||
Appendable, Clearable, EventsNodeProcessor, EventsTypeAliases, FrameType, PushableIndex, RangeCompletableItem,
|
||||
ReadableFromFile, Sitemty, StreamItem, TimeBinnableType,
|
||||
Appendable, Clearable, EventsNodeProcessor, EventsTypeAliases, FrameDecodable, FrameType, PushableIndex,
|
||||
RangeCompletableItem, ReadableFromFile, Sitemty, StreamItem, TimeBinnableType,
|
||||
};
|
||||
use netpod::log::*;
|
||||
use netpod::query::{CacheUsage, RawEventsQuery};
|
||||
use netpod::{
|
||||
x_bin_count, AggKind, BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchIterator, PreBinnedPatchRange,
|
||||
};
|
||||
use serde::de::DeserializeOwned;
|
||||
use netpod::x_bin_count;
|
||||
use netpod::{AggKind, BinnedRange, PreBinnedPatchIterator, PreBinnedPatchRange};
|
||||
use netpod::{NodeConfigCached, PerfOpts};
|
||||
use serde::Serialize;
|
||||
use std::future::Future;
|
||||
use std::io;
|
||||
@@ -70,7 +69,7 @@ where
|
||||
// TODO is this needed:
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType,
|
||||
// TODO who exactly needs this DeserializeOwned?
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>: FrameType + DeserializeOwned,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>: FrameType + FrameDecodable,
|
||||
{
|
||||
pub fn new(query: PreBinnedQuery, agg_kind: AggKind, node_config: &NodeConfigCached) -> Self {
|
||||
Self {
|
||||
@@ -484,7 +483,7 @@ where
|
||||
<ENP as EventsNodeProcessor>::Output: PushableIndex + Appendable + Clearable,
|
||||
// TODO needed?
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>: FrameType + DeserializeOwned,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>: FrameType + FrameDecodable,
|
||||
{
|
||||
type Item = Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>;
|
||||
|
||||
|
||||
@@ -12,12 +12,11 @@ use futures_core::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use items::numops::{BoolNum, NumOps, StringNum};
|
||||
use items::{
|
||||
Appendable, Clearable, EventsNodeProcessor, Framable, FrameType, PushableIndex, RangeCompletableItem, Sitemty,
|
||||
SitemtyFrameType, StreamItem, TimeBinnableType, TimeBinned,
|
||||
Appendable, Clearable, EventsNodeProcessor, Framable, FrameDecodable, FrameType, PushableIndex,
|
||||
RangeCompletableItem, Sitemty, SitemtyFrameType, StreamItem, TimeBinnableType, TimeBinned,
|
||||
};
|
||||
use netpod::log::*;
|
||||
use netpod::{AggKind, ByteOrder, ChannelTyped, NodeConfigCached, ScalarType, Shape};
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
use std::pin::Pin;
|
||||
|
||||
@@ -36,10 +35,9 @@ where
|
||||
EVS: EventValueShape<NTY, END> + EventValueFromBytes<NTY, END> + 'static,
|
||||
ENP: EventsNodeProcessor<Input = <EVS as EventValueFromBytes<NTY, END>>::Batch> + 'static,
|
||||
<ENP as EventsNodeProcessor>::Output: PushableIndex + Appendable + Clearable + 'static,
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType + Framable + 'static,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>:
|
||||
Framable + FrameType + DeserializeOwned,
|
||||
<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output: SitemtyFrameType + TimeBinned,
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType + Framable + 'static,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>: Framable + FrameType + FrameDecodable,
|
||||
{
|
||||
if let Some(scyconf) = &node_config.node_config.cluster.cache_scylla {
|
||||
trace!("~~~~~~~~~~~~~~~ make_num_pipeline_nty_end_evs_enp using scylla as cache");
|
||||
@@ -58,11 +56,11 @@ where
|
||||
)
|
||||
.await?;
|
||||
let stream = stream.map(|x| {
|
||||
//
|
||||
match x {
|
||||
let ret = match x {
|
||||
Ok(k) => Ok(StreamItem::DataItem(RangeCompletableItem::Data(k))),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
};
|
||||
ret
|
||||
});
|
||||
let stream = Box::pin(stream) as Pin<Box<dyn Stream<Item = Sitemty<Box<dyn TimeBinned>>> + Send>>;
|
||||
Ok(stream)
|
||||
|
||||
@@ -13,8 +13,8 @@ use items::numops::{BoolNum, NumOps, StringNum};
|
||||
use items::scalarevents::ScalarEvents;
|
||||
use items::streams::{Collectable, Collector};
|
||||
use items::{
|
||||
Clearable, EventsNodeProcessor, Framable, FrameType, PushableIndex, RangeCompletableItem, Sitemty, StreamItem,
|
||||
TimeBinnableType,
|
||||
Clearable, EventsNodeProcessor, Framable, FrameType, FrameTypeStatic, PushableIndex, RangeCompletableItem, Sitemty,
|
||||
StreamItem, TimeBinnableType,
|
||||
};
|
||||
use netpod::log::*;
|
||||
use netpod::query::{PlainEventsQuery, RawEventsQuery};
|
||||
@@ -75,9 +75,10 @@ where
|
||||
+ TimeBinnableType<Output = <<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>
|
||||
+ Collectable
|
||||
+ Unpin,
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType + Framable + 'static,
|
||||
// TODO shouldn't one of FrameType or FrameTypeStatic be enough?
|
||||
Sitemty<<ENP as EventsNodeProcessor>::Output>: FrameType + FrameTypeStatic + Framable + 'static,
|
||||
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>:
|
||||
FrameType + Framable + DeserializeOwned,
|
||||
FrameType + FrameTypeStatic + Framable + DeserializeOwned,
|
||||
{
|
||||
Ok(f.exec(byte_order, scalar_type, shape, event_value_shape, events_node_proc)?)
|
||||
}
|
||||
|
||||
@@ -76,8 +76,9 @@ impl Stream for EventChunkerMultifile {
|
||||
type Item = Result<StreamItem<RangeCompletableItem<EventFull>>, Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
|
||||
let span1 = span!(Level::INFO, "EventChunkerMultifile", desc = tracing::field::Empty);
|
||||
span1.record("desc", &"");
|
||||
//tracing::field::DebugValue;
|
||||
let span1 = span!(Level::INFO, "EventChunkerMultifile", node_ix = self.node_ix);
|
||||
//span1.record("node_ix", &self.node_ix);
|
||||
span1.in_scope(|| {
|
||||
use Poll::*;
|
||||
'outer: loop {
|
||||
|
||||
@@ -4,7 +4,7 @@ use bytes::{Buf, BytesMut};
|
||||
use err::Error;
|
||||
use futures_util::{Stream, StreamExt};
|
||||
use items::{
|
||||
Appendable, ByteEstimate, Clearable, FrameTypeStatic, PushableIndex, RangeCompletableItem, SitemtyFrameType,
|
||||
Appendable, ByteEstimate, Clearable, FrameTypeStaticSYC, PushableIndex, RangeCompletableItem, SitemtyFrameType,
|
||||
StatsItem, StreamItem, WithLen, WithTimestamps,
|
||||
};
|
||||
use netpod::histo::HistoLog2;
|
||||
@@ -528,18 +528,13 @@ impl EventFull {
|
||||
}
|
||||
}
|
||||
|
||||
impl FrameTypeStatic for EventFull {
|
||||
impl FrameTypeStaticSYC for EventFull {
|
||||
const FRAME_TYPE_ID: u32 = items::EVENT_FULL_FRAME_TYPE_ID;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
// TODO remove usage of this
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
impl SitemtyFrameType for EventFull {
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,10 +11,10 @@ use crate::raw::eventsfromframes::EventsFromFrames;
|
||||
use err::Error;
|
||||
use futures_core::Stream;
|
||||
use items::frame::{make_frame, make_term_frame};
|
||||
use items::{EventsNodeProcessor, FrameType, RangeCompletableItem, Sitemty, StreamItem};
|
||||
use items::{EventQueryJsonStringFrame, EventsNodeProcessor, RangeCompletableItem, Sitemty, StreamItem};
|
||||
use netpod::log::*;
|
||||
use netpod::query::RawEventsQuery;
|
||||
use netpod::{EventQueryJsonStringFrame, Node, PerfOpts};
|
||||
use netpod::{Node, PerfOpts};
|
||||
use std::pin::Pin;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::net::TcpStream;
|
||||
@@ -27,15 +27,17 @@ pub async fn x_processed_stream_from_node<ENP>(
|
||||
where
|
||||
ENP: EventsNodeProcessor,
|
||||
<ENP as EventsNodeProcessor>::Output: Unpin + 'static,
|
||||
Result<StreamItem<RangeCompletableItem<<ENP as EventsNodeProcessor>::Output>>, err::Error>: FrameType,
|
||||
{
|
||||
debug!("x_processed_stream_from_node to: {}:{}", node.host, node.port_raw);
|
||||
let net = TcpStream::connect(format!("{}:{}", node.host, node.port_raw)).await?;
|
||||
let qjs = serde_json::to_string(&query)?;
|
||||
let (netin, mut netout) = net.into_split();
|
||||
let buf = make_frame(&EventQueryJsonStringFrame(qjs))?;
|
||||
let item = Ok(StreamItem::DataItem(RangeCompletableItem::Data(
|
||||
EventQueryJsonStringFrame(qjs),
|
||||
)));
|
||||
let buf = make_frame(&item)?;
|
||||
netout.write_all(&buf).await?;
|
||||
let buf = make_term_frame();
|
||||
let buf = make_term_frame()?;
|
||||
netout.write_all(&buf).await?;
|
||||
netout.flush().await?;
|
||||
netout.forget();
|
||||
@@ -56,9 +58,12 @@ pub async fn x_processed_event_blobs_stream_from_node(
|
||||
let net = TcpStream::connect(format!("{}:{}", node.host, node.port_raw)).await?;
|
||||
let qjs = serde_json::to_string(&query)?;
|
||||
let (netin, mut netout) = net.into_split();
|
||||
let buf = make_frame(&EventQueryJsonStringFrame(qjs))?;
|
||||
let item = Ok(StreamItem::DataItem(RangeCompletableItem::Data(
|
||||
EventQueryJsonStringFrame(qjs),
|
||||
)));
|
||||
let buf = make_frame(&item)?;
|
||||
netout.write_all(&buf).await?;
|
||||
let buf = make_term_frame();
|
||||
let buf = make_term_frame()?;
|
||||
netout.write_all(&buf).await?;
|
||||
netout.flush().await?;
|
||||
netout.forget();
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
use crate::decode::{
|
||||
BigEndian, Endianness, EventValueFromBytes, EventValueShape, EventValuesDim0Case, EventValuesDim1Case,
|
||||
EventsDecodedStream, LittleEndian, NumFromBytes,
|
||||
};
|
||||
use crate::decode::{BigEndian, Endianness, LittleEndian};
|
||||
use crate::decode::{EventValueFromBytes, EventValueShape, EventsDecodedStream, NumFromBytes};
|
||||
use crate::decode::{EventValuesDim0Case, EventValuesDim1Case};
|
||||
use crate::eventblobs::EventChunkerMultifile;
|
||||
use crate::eventchunker::{EventChunkerConf, EventFull};
|
||||
use err::Error;
|
||||
use futures_core::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use futures_util::{Stream, StreamExt};
|
||||
use items::numops::{BoolNum, NumOps, StringNum};
|
||||
use items::{EventsNodeProcessor, Framable, RangeCompletableItem, Sitemty, StreamItem};
|
||||
use netpod::log::*;
|
||||
@@ -319,6 +317,8 @@ pub async fn make_event_blobs_pipe(
|
||||
let range = &evq.range;
|
||||
let entry = get_applicable_entry(&evq.range, evq.channel.clone(), node_config).await?;
|
||||
let event_chunker_conf = EventChunkerConf::new(ByteSize::kb(1024));
|
||||
type ItemType = Sitemty<EventFull>;
|
||||
// TODO should depend on host config
|
||||
let pipe = if true {
|
||||
let event_blobs = make_remote_event_blobs_stream(
|
||||
range.clone(),
|
||||
@@ -330,7 +330,7 @@ pub async fn make_event_blobs_pipe(
|
||||
evq.disk_io_tune.clone(),
|
||||
node_config,
|
||||
)?;
|
||||
let s = event_blobs.map(|item| Box::new(item) as Box<dyn Framable + Send>);
|
||||
let s = event_blobs.map(|item: ItemType| Box::new(item) as Box<dyn Framable + Send>);
|
||||
//let s = tracing_futures::Instrumented::instrument(s, tracing::info_span!("make_event_blobs_pipe"));
|
||||
let pipe: Pin<Box<dyn Stream<Item = Box<dyn Framable + Send>> + Send>>;
|
||||
pipe = Box::pin(s);
|
||||
@@ -346,7 +346,7 @@ pub async fn make_event_blobs_pipe(
|
||||
evq.disk_io_tune.clone(),
|
||||
node_config,
|
||||
)?;
|
||||
let s = event_blobs.map(|item| Box::new(item) as Box<dyn Framable + Send>);
|
||||
let s = event_blobs.map(|item: ItemType| Box::new(item) as Box<dyn Framable + Send>);
|
||||
//let s = tracing_futures::Instrumented::instrument(s, tracing::info_span!("make_event_blobs_pipe"));
|
||||
let pipe: Pin<Box<dyn Stream<Item = Box<dyn Framable + Send>> + Send>>;
|
||||
pipe = Box::pin(s);
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::frame::inmem::InMemoryFrameAsyncReadStream;
|
||||
use futures_core::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use items::frame::decode_frame;
|
||||
use items::{FrameTypeStatic, Sitemty, StreamItem};
|
||||
use items::{FrameTypeStaticSYC, Sitemty, StreamItem};
|
||||
use netpod::log::*;
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::marker::PhantomData;
|
||||
@@ -37,7 +37,7 @@ where
|
||||
impl<T, I> Stream for EventsFromFrames<T, I>
|
||||
where
|
||||
T: AsyncRead + Unpin,
|
||||
I: FrameTypeStatic + DeserializeOwned + Unpin,
|
||||
I: FrameTypeStaticSYC + DeserializeOwned + Unpin,
|
||||
{
|
||||
type Item = Sitemty<I>;
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "h5out"
|
||||
version = "0.0.1-a.0"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
#serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "httpclient"
|
||||
version = "0.0.1-a.0"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::numops::NumOps;
|
||||
use crate::streams::{Collectable, Collector, ToJsonBytes, ToJsonResult};
|
||||
use crate::{
|
||||
ts_offs_from_abs, Appendable, FilterFittingInside, Fits, FitsInside, FrameTypeStatic, IsoDateTime, NewEmpty,
|
||||
ts_offs_from_abs, Appendable, FilterFittingInside, Fits, FitsInside, FrameTypeStaticSYC, IsoDateTime, NewEmpty,
|
||||
RangeOverlapInfo, ReadPbv, ReadableFromFile, Sitemty, SitemtyFrameType, SubFrId, TimeBinnableDyn, TimeBinnableType,
|
||||
TimeBinnableTypeAggregator, TimeBinned, TimeBinnerDyn, TimeBins, WithLen,
|
||||
};
|
||||
@@ -28,16 +28,11 @@ pub struct MinMaxAvgDim0Bins<NTY> {
|
||||
pub avgs: Vec<f32>,
|
||||
}
|
||||
|
||||
impl<NTY> FrameTypeStatic for MinMaxAvgDim0Bins<NTY>
|
||||
impl<NTY> FrameTypeStaticSYC for MinMaxAvgDim0Bins<NTY>
|
||||
where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = crate::MIN_MAX_AVG_DIM_0_BINS_FRAME_TYPE_ID + NTY::SUB;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
// TODO remove usage of this
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> SitemtyFrameType for MinMaxAvgDim0Bins<NTY>
|
||||
@@ -45,7 +40,7 @@ where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::numops::NumOps;
|
||||
use crate::streams::{Collectable, Collector, ToJsonBytes, ToJsonResult};
|
||||
use crate::waveevents::WaveEvents;
|
||||
use crate::{
|
||||
pulse_offs_from_abs, ts_offs_from_abs, Appendable, FilterFittingInside, Fits, FitsInside, FrameTypeStatic,
|
||||
pulse_offs_from_abs, ts_offs_from_abs, Appendable, FilterFittingInside, Fits, FitsInside, FrameTypeStaticSYC,
|
||||
IsoDateTime, NewEmpty, RangeOverlapInfo, ReadPbv, ReadableFromFile, Sitemty, SitemtyFrameType, SubFrId,
|
||||
TimeBinnableDyn, TimeBinnableType, TimeBinnableTypeAggregator, TimeBinned, TimeBins, WithLen,
|
||||
};
|
||||
@@ -27,16 +27,11 @@ pub struct MinMaxAvgDim1Bins<NTY> {
|
||||
pub avgs: Vec<Option<Vec<f32>>>,
|
||||
}
|
||||
|
||||
impl<NTY> FrameTypeStatic for MinMaxAvgDim1Bins<NTY>
|
||||
impl<NTY> FrameTypeStaticSYC for MinMaxAvgDim1Bins<NTY>
|
||||
where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = crate::MIN_MAX_AVG_DIM_1_BINS_FRAME_TYPE_ID + NTY::SUB;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
// TODO remove usage of this
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> SitemtyFrameType for MinMaxAvgDim1Bins<NTY>
|
||||
@@ -44,7 +39,7 @@ where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,8 +11,6 @@ pub enum EventsItem {
|
||||
}
|
||||
|
||||
impl SitemtyFrameType for EventsItem {
|
||||
//const FRAME_TYPE_ID: u32 = crate::EVENTS_ITEM_FRAME_TYPE_ID;
|
||||
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
crate::EVENTS_ITEM_FRAME_TYPE_ID
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use crate::inmem::InMemoryFrame;
|
||||
use crate::{FrameDecodable, FrameType, LogItem, StatsItem};
|
||||
use crate::{
|
||||
FrameType, FrameTypeStatic, Sitemty, StreamItem, ERROR_FRAME_TYPE_ID, INMEM_FRAME_ENCID, INMEM_FRAME_HEAD,
|
||||
INMEM_FRAME_MAGIC, NON_DATA_FRAME_TYPE_ID, TERM_FRAME_TYPE_ID,
|
||||
ERROR_FRAME_TYPE_ID, INMEM_FRAME_ENCID, INMEM_FRAME_HEAD, INMEM_FRAME_MAGIC, LOG_FRAME_TYPE_ID,
|
||||
RANGE_COMPLETE_FRAME_TYPE_ID, STATS_FRAME_TYPE_ID, TERM_FRAME_TYPE_ID,
|
||||
};
|
||||
use bytes::{BufMut, BytesMut};
|
||||
use err::Error;
|
||||
#[allow(unused)]
|
||||
use netpod::log::*;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use serde::Serialize;
|
||||
|
||||
pub fn make_frame<FT>(item: &FT) -> Result<BytesMut, Error>
|
||||
where
|
||||
@@ -65,8 +66,9 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
// TODO remove duplication for these similar `make_*_frame` functions:
|
||||
|
||||
pub fn make_error_frame(error: &::err::Error) -> Result<BytesMut, Error> {
|
||||
//trace!("make_error_frame");
|
||||
match bincode::serialize(error) {
|
||||
Ok(enc) => {
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
@@ -93,8 +95,75 @@ pub fn make_error_frame(error: &::err::Error) -> Result<BytesMut, Error> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_term_frame() -> BytesMut {
|
||||
//trace!("make_term_frame");
|
||||
pub fn make_log_frame(item: &LogItem) -> Result<BytesMut, Error> {
|
||||
match bincode::serialize(item) {
|
||||
Ok(enc) => {
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&enc);
|
||||
let payload_crc = h.finalize();
|
||||
let mut buf = BytesMut::with_capacity(INMEM_FRAME_HEAD);
|
||||
buf.put_u32_le(INMEM_FRAME_MAGIC);
|
||||
buf.put_u32_le(INMEM_FRAME_ENCID);
|
||||
buf.put_u32_le(LOG_FRAME_TYPE_ID);
|
||||
buf.put_u32_le(enc.len() as u32);
|
||||
buf.put_u32_le(payload_crc);
|
||||
// TODO add padding to align to 8 bytes.
|
||||
buf.put(enc.as_ref());
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&buf);
|
||||
let frame_crc = h.finalize();
|
||||
buf.put_u32_le(frame_crc);
|
||||
Ok(buf)
|
||||
}
|
||||
Err(e) => Err(e)?,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_stats_frame(item: &StatsItem) -> Result<BytesMut, Error> {
|
||||
match bincode::serialize(item) {
|
||||
Ok(enc) => {
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&enc);
|
||||
let payload_crc = h.finalize();
|
||||
let mut buf = BytesMut::with_capacity(INMEM_FRAME_HEAD);
|
||||
buf.put_u32_le(INMEM_FRAME_MAGIC);
|
||||
buf.put_u32_le(INMEM_FRAME_ENCID);
|
||||
buf.put_u32_le(STATS_FRAME_TYPE_ID);
|
||||
buf.put_u32_le(enc.len() as u32);
|
||||
buf.put_u32_le(payload_crc);
|
||||
// TODO add padding to align to 8 bytes.
|
||||
buf.put(enc.as_ref());
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&buf);
|
||||
let frame_crc = h.finalize();
|
||||
buf.put_u32_le(frame_crc);
|
||||
Ok(buf)
|
||||
}
|
||||
Err(e) => Err(e)?,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_range_complete_frame() -> Result<BytesMut, Error> {
|
||||
let enc = [];
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&enc);
|
||||
let payload_crc = h.finalize();
|
||||
let mut buf = BytesMut::with_capacity(INMEM_FRAME_HEAD);
|
||||
buf.put_u32_le(INMEM_FRAME_MAGIC);
|
||||
buf.put_u32_le(INMEM_FRAME_ENCID);
|
||||
buf.put_u32_le(RANGE_COMPLETE_FRAME_TYPE_ID);
|
||||
buf.put_u32_le(enc.len() as u32);
|
||||
buf.put_u32_le(payload_crc);
|
||||
// TODO add padding to align to 8 bytes.
|
||||
buf.put(enc.as_ref());
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&buf);
|
||||
let frame_crc = h.finalize();
|
||||
buf.put_u32_le(frame_crc);
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub fn make_term_frame() -> Result<BytesMut, Error> {
|
||||
let enc = [];
|
||||
let mut h = crc32fast::Hasher::new();
|
||||
h.update(&enc);
|
||||
@@ -111,12 +180,12 @@ pub fn make_term_frame() -> BytesMut {
|
||||
h.update(&buf);
|
||||
let frame_crc = h.finalize();
|
||||
buf.put_u32_le(frame_crc);
|
||||
buf
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
pub fn decode_frame<T>(frame: &InMemoryFrame) -> Result<T, Error>
|
||||
where
|
||||
T: FrameTypeStatic + DeserializeOwned,
|
||||
T: FrameDecodable,
|
||||
{
|
||||
if frame.encid() != INMEM_FRAME_ENCID {
|
||||
return Err(Error::with_msg(format!("unknown encoder id {:?}", frame)));
|
||||
@@ -137,69 +206,69 @@ where
|
||||
"ERROR bincode::deserialize len {} ERROR_FRAME_TYPE_ID",
|
||||
frame.buf().len()
|
||||
);
|
||||
let n = frame.buf().len().min(64);
|
||||
let n = frame.buf().len().min(128);
|
||||
let s = String::from_utf8_lossy(&frame.buf()[..n]);
|
||||
error!("frame.buf as string: {:?}", s);
|
||||
Err(e)?
|
||||
}
|
||||
};
|
||||
Ok(T::from_error(k))
|
||||
} else if frame.tyid() == NON_DATA_FRAME_TYPE_ID {
|
||||
error!("TODO NON_DATA_FRAME_TYPE_ID");
|
||||
type TT = Sitemty<crate::scalarevents::ScalarEvents<u32>>;
|
||||
let _k: TT = match bincode::deserialize::<TT>(frame.buf()) {
|
||||
Ok(item) => match item {
|
||||
Ok(StreamItem::DataItem(_)) => {
|
||||
error!(
|
||||
"ERROR bincode::deserialize len {} NON_DATA_FRAME_TYPE_ID but found Ok(StreamItem::DataItem)",
|
||||
frame.buf().len()
|
||||
);
|
||||
let n = frame.buf().len().min(64);
|
||||
let s = String::from_utf8_lossy(&frame.buf()[..n]);
|
||||
error!("frame.buf as string: {:?}", s);
|
||||
Err(Error::with_msg_no_trace("NON_DATA_FRAME_TYPE_ID decode error"))?
|
||||
}
|
||||
Ok(StreamItem::Log(k)) => Ok(StreamItem::Log(k)),
|
||||
Ok(StreamItem::Stats(k)) => Ok(StreamItem::Stats(k)),
|
||||
Err(e) => {
|
||||
error!("decode_frame sees error: {e:?}");
|
||||
Err(e)
|
||||
}
|
||||
},
|
||||
} else if frame.tyid() == LOG_FRAME_TYPE_ID {
|
||||
let k: LogItem = match bincode::deserialize(frame.buf()) {
|
||||
Ok(item) => item,
|
||||
Err(e) => {
|
||||
error!(
|
||||
"ERROR bincode::deserialize len {} ERROR_FRAME_TYPE_ID",
|
||||
"ERROR bincode::deserialize len {} LOG_FRAME_TYPE_ID",
|
||||
frame.buf().len()
|
||||
);
|
||||
let n = frame.buf().len().min(64);
|
||||
let n = frame.buf().len().min(128);
|
||||
let s = String::from_utf8_lossy(&frame.buf()[..n]);
|
||||
error!("frame.buf as string: {:?}", s);
|
||||
Err(e)?
|
||||
}
|
||||
};
|
||||
Err(Error::with_msg_no_trace("TODO NON_DATA_FRAME_TYPE_ID"))
|
||||
Ok(T::from_log(k))
|
||||
} else if frame.tyid() == STATS_FRAME_TYPE_ID {
|
||||
let k: StatsItem = match bincode::deserialize(frame.buf()) {
|
||||
Ok(item) => item,
|
||||
Err(e) => {
|
||||
error!(
|
||||
"ERROR bincode::deserialize len {} STATS_FRAME_TYPE_ID",
|
||||
frame.buf().len()
|
||||
);
|
||||
let n = frame.buf().len().min(128);
|
||||
let s = String::from_utf8_lossy(&frame.buf()[..n]);
|
||||
error!("frame.buf as string: {:?}", s);
|
||||
Err(e)?
|
||||
}
|
||||
};
|
||||
Ok(T::from_stats(k))
|
||||
} else if frame.tyid() == RANGE_COMPLETE_FRAME_TYPE_ID {
|
||||
// There is currently no content in this variant.
|
||||
Ok(T::from_range_complete())
|
||||
} else {
|
||||
let tyid = T::FRAME_TYPE_ID;
|
||||
if frame.tyid() != tyid {
|
||||
return Err(Error::with_msg(format!(
|
||||
Err(Error::with_msg(format!(
|
||||
"type id mismatch expect {:x} found {:x} {:?}",
|
||||
tyid,
|
||||
frame.tyid(),
|
||||
frame
|
||||
)));
|
||||
}
|
||||
match bincode::deserialize(frame.buf()) {
|
||||
Ok(item) => Ok(item),
|
||||
Err(e) => {
|
||||
error!(
|
||||
"ERROR bincode::deserialize len {} tyid {:x}",
|
||||
frame.buf().len(),
|
||||
frame.tyid()
|
||||
);
|
||||
let n = frame.buf().len().min(64);
|
||||
let s = String::from_utf8_lossy(&frame.buf()[..n]);
|
||||
error!("frame.buf as string: {:?}", s);
|
||||
Err(e)?
|
||||
)))
|
||||
} else {
|
||||
match bincode::deserialize(frame.buf()) {
|
||||
Ok(item) => Ok(item),
|
||||
Err(e) => {
|
||||
error!(
|
||||
"ERROR bincode::deserialize len {} tyid {:x}",
|
||||
frame.buf().len(),
|
||||
frame.tyid()
|
||||
);
|
||||
let n = frame.buf().len().min(64);
|
||||
let s = String::from_utf8_lossy(&frame.buf()[..n]);
|
||||
error!("frame.buf as string: {:?}", s);
|
||||
Err(e)?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,10 +18,11 @@ use crate::numops::BoolNum;
|
||||
use bytes::BytesMut;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use err::Error;
|
||||
use frame::{make_error_frame, make_log_frame, make_range_complete_frame, make_stats_frame};
|
||||
#[allow(unused)]
|
||||
use netpod::log::*;
|
||||
use netpod::timeunits::{MS, SEC};
|
||||
use netpod::{log::Level, AggKind, EventDataReadStats, EventQueryJsonStringFrame, NanoRange, Shape};
|
||||
use netpod::{log::Level, AggKind, EventDataReadStats, NanoRange, Shape};
|
||||
use netpod::{DiskStats, RangeFilterStats, ScalarType};
|
||||
use numops::StringNum;
|
||||
use serde::de::{self, DeserializeOwned, Visitor};
|
||||
@@ -38,12 +39,14 @@ use tokio::io::{AsyncRead, ReadBuf};
|
||||
pub const TERM_FRAME_TYPE_ID: u32 = 0x01;
|
||||
pub const ERROR_FRAME_TYPE_ID: u32 = 0x02;
|
||||
pub const EVENT_QUERY_JSON_STRING_FRAME: u32 = 0x100;
|
||||
pub const EVENT_VALUES_FRAME_TYPE_ID: u32 = 0x500;
|
||||
pub const EVENTS_0D_FRAME_TYPE_ID: u32 = 0x500;
|
||||
pub const MIN_MAX_AVG_DIM_0_BINS_FRAME_TYPE_ID: u32 = 0x700;
|
||||
pub const MIN_MAX_AVG_DIM_1_BINS_FRAME_TYPE_ID: u32 = 0x800;
|
||||
pub const MIN_MAX_AVG_WAVE_BINS: u32 = 0xa00;
|
||||
pub const WAVE_EVENTS_FRAME_TYPE_ID: u32 = 0xb00;
|
||||
pub const NON_DATA_FRAME_TYPE_ID: u32 = 0xc00;
|
||||
pub const LOG_FRAME_TYPE_ID: u32 = 0xc00;
|
||||
pub const STATS_FRAME_TYPE_ID: u32 = 0xd00;
|
||||
pub const RANGE_COMPLETE_FRAME_TYPE_ID: u32 = 0xe00;
|
||||
pub const EVENT_FULL_FRAME_TYPE_ID: u32 = 0x2200;
|
||||
pub const EVENTS_ITEM_FRAME_TYPE_ID: u32 = 0x2300;
|
||||
pub const STATS_EVENTS_FRAME_TYPE_ID: u32 = 0x2400;
|
||||
@@ -166,65 +169,77 @@ pub trait SubFrId {
|
||||
}
|
||||
|
||||
impl SubFrId for u8 {
|
||||
const SUB: u32 = 3;
|
||||
const SUB: u32 = 0x03;
|
||||
}
|
||||
|
||||
impl SubFrId for u16 {
|
||||
const SUB: u32 = 5;
|
||||
const SUB: u32 = 0x05;
|
||||
}
|
||||
|
||||
impl SubFrId for u32 {
|
||||
const SUB: u32 = 8;
|
||||
const SUB: u32 = 0x08;
|
||||
}
|
||||
|
||||
impl SubFrId for u64 {
|
||||
const SUB: u32 = 0xa;
|
||||
const SUB: u32 = 0x0a;
|
||||
}
|
||||
|
||||
impl SubFrId for i8 {
|
||||
const SUB: u32 = 2;
|
||||
const SUB: u32 = 0x02;
|
||||
}
|
||||
|
||||
impl SubFrId for i16 {
|
||||
const SUB: u32 = 4;
|
||||
const SUB: u32 = 0x04;
|
||||
}
|
||||
|
||||
impl SubFrId for i32 {
|
||||
const SUB: u32 = 7;
|
||||
const SUB: u32 = 0x07;
|
||||
}
|
||||
|
||||
impl SubFrId for i64 {
|
||||
const SUB: u32 = 9;
|
||||
const SUB: u32 = 0x09;
|
||||
}
|
||||
|
||||
impl SubFrId for f32 {
|
||||
const SUB: u32 = 0xb;
|
||||
const SUB: u32 = 0x0b;
|
||||
}
|
||||
|
||||
impl SubFrId for f64 {
|
||||
const SUB: u32 = 0xc;
|
||||
const SUB: u32 = 0x0c;
|
||||
}
|
||||
|
||||
impl SubFrId for StringNum {
|
||||
const SUB: u32 = 0xd;
|
||||
const SUB: u32 = 0x0d;
|
||||
}
|
||||
|
||||
impl SubFrId for BoolNum {
|
||||
const SUB: u32 = 0xe;
|
||||
const SUB: u32 = 0x0e;
|
||||
}
|
||||
|
||||
// To be implemented by the data containers, i.e. the T's in Sitemty<T>, e.g. ScalarEvents.
|
||||
// TODO rename this, since it is misleading because it is not meanto to be implemented by Sitemty.
|
||||
pub trait SitemtyFrameType {
|
||||
//const FRAME_TYPE_ID: u32;
|
||||
// TODO check actual usage of this
|
||||
fn frame_type_id(&self) -> u32;
|
||||
}
|
||||
|
||||
pub trait FrameTypeStatic {
|
||||
const FRAME_TYPE_ID: u32;
|
||||
fn from_error(x: ::err::Error) -> Self;
|
||||
}
|
||||
|
||||
// Required for any inner type of Sitemty.
|
||||
pub trait FrameTypeStaticSYC {
|
||||
const FRAME_TYPE_ID: u32;
|
||||
}
|
||||
|
||||
impl<T> FrameTypeStatic for Sitemty<T>
|
||||
where
|
||||
T: FrameTypeStaticSYC,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = <T as FrameTypeStaticSYC>::FRAME_TYPE_ID;
|
||||
}
|
||||
|
||||
// Framable trait objects need some inspection to handle the supposed-to-be common Err ser format:
|
||||
// Meant to be implemented by Sitemty.
|
||||
pub trait FrameType {
|
||||
fn frame_type_id(&self) -> u32;
|
||||
@@ -232,23 +247,6 @@ pub trait FrameType {
|
||||
fn err(&self) -> Option<&::err::Error>;
|
||||
}
|
||||
|
||||
impl FrameTypeStatic for EventQueryJsonStringFrame {
|
||||
const FRAME_TYPE_ID: u32 = EVENT_QUERY_JSON_STRING_FRAME;
|
||||
|
||||
fn from_error(_x: err::Error) -> Self {
|
||||
error!("FrameTypeStatic::from_error todo");
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: FrameTypeStatic> FrameTypeStatic for Sitemty<T> {
|
||||
const FRAME_TYPE_ID: u32 = <T as FrameTypeStatic>::FRAME_TYPE_ID;
|
||||
|
||||
fn from_error(e: err::Error) -> Self {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> FrameType for Box<T>
|
||||
where
|
||||
T: FrameType,
|
||||
@@ -268,11 +266,10 @@ where
|
||||
|
||||
impl<T> FrameType for Sitemty<T>
|
||||
where
|
||||
// SitemtyFrameType
|
||||
T: FrameTypeStatic,
|
||||
T: FrameTypeStaticSYC,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<T as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<T as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
|
||||
fn is_err(&self) -> bool {
|
||||
@@ -290,20 +287,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl FrameType for EventQueryJsonStringFrame {
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
}
|
||||
|
||||
fn is_err(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn err(&self) -> Option<&::err::Error> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl SitemtyFrameType for Box<dyn TimeBinned> {
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
self.as_time_binnable_dyn().frame_type_id()
|
||||
@@ -316,7 +299,6 @@ impl SitemtyFrameType for Box<dyn EventsDyn> {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO do we need Send here?
|
||||
pub trait Framable {
|
||||
fn make_frame(&self) -> Result<BytesMut, Error>;
|
||||
}
|
||||
@@ -343,10 +325,10 @@ where
|
||||
let frame_type_id = k.frame_type_id();
|
||||
make_frame_2(self, frame_type_id)
|
||||
}
|
||||
_ => {
|
||||
let frame_type_id = NON_DATA_FRAME_TYPE_ID;
|
||||
make_frame_2(self, frame_type_id)
|
||||
}
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete)) => make_range_complete_frame(),
|
||||
Ok(StreamItem::Log(item)) => make_log_frame(item),
|
||||
Ok(StreamItem::Stats(item)) => make_stats_frame(item),
|
||||
Err(e) => make_error_frame(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -360,6 +342,41 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FrameDecodable: FrameTypeStatic + DeserializeOwned {
|
||||
fn from_error(e: ::err::Error) -> Self;
|
||||
fn from_log(item: LogItem) -> Self;
|
||||
fn from_stats(item: StatsItem) -> Self;
|
||||
fn from_range_complete() -> Self;
|
||||
}
|
||||
|
||||
impl<T> FrameDecodable for Sitemty<T>
|
||||
where
|
||||
T: FrameTypeStaticSYC + DeserializeOwned,
|
||||
{
|
||||
fn from_error(e: err::Error) -> Self {
|
||||
Err(e)
|
||||
}
|
||||
|
||||
fn from_log(item: LogItem) -> Self {
|
||||
Ok(StreamItem::Log(item))
|
||||
}
|
||||
|
||||
fn from_stats(item: StatsItem) -> Self {
|
||||
Ok(StreamItem::Stats(item))
|
||||
}
|
||||
|
||||
fn from_range_complete() -> Self {
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct EventQueryJsonStringFrame(pub String);
|
||||
|
||||
impl FrameTypeStaticSYC for EventQueryJsonStringFrame {
|
||||
const FRAME_TYPE_ID: u32 = EVENT_QUERY_JSON_STRING_FRAME;
|
||||
}
|
||||
|
||||
pub trait EventsNodeProcessor: Send + Unpin {
|
||||
type Input;
|
||||
type Output: Send + Unpin + DeserializeOwned + WithTimestamps + TimeBinnableType + ByteEstimate;
|
||||
@@ -476,8 +493,9 @@ pub trait TimeBinnableType:
|
||||
+ NewEmpty
|
||||
+ Appendable
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
+ ReadableFromFile
|
||||
+ FrameTypeStatic
|
||||
+ FrameTypeStaticSYC
|
||||
{
|
||||
type Output: TimeBinnableType;
|
||||
type Aggregator: TimeBinnableTypeAggregator<Input = Self, Output = Self::Output> + Send + Unpin;
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::numops::NumOps;
|
||||
use crate::streams::{Collectable, Collector};
|
||||
use crate::{
|
||||
pulse_offs_from_abs, ts_offs_from_abs, Appendable, ByteEstimate, Clearable, EventAppendable, EventsDyn,
|
||||
FilterFittingInside, Fits, FitsInside, FrameTypeStatic, NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv,
|
||||
FilterFittingInside, Fits, FitsInside, FrameTypeStaticSYC, NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv,
|
||||
ReadableFromFile, SitemtyFrameType, TimeBinnableDyn, TimeBinnableType, TimeBinnableTypeAggregator, TimeBinnerDyn,
|
||||
WithLen, WithTimestamps,
|
||||
};
|
||||
@@ -52,17 +52,11 @@ impl<NTY> ScalarEvents<NTY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> FrameTypeStatic for ScalarEvents<NTY>
|
||||
impl<NTY> FrameTypeStaticSYC for ScalarEvents<NTY>
|
||||
where
|
||||
NTY: NumOps,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = crate::EVENT_VALUES_FRAME_TYPE_ID + NTY::SUB;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
// TODO this method should not be used, remove.
|
||||
error!("impl<NTY> FrameTypeStatic for ScalarEvents<NTY>");
|
||||
panic!()
|
||||
}
|
||||
const FRAME_TYPE_ID: u32 = crate::EVENTS_0D_FRAME_TYPE_ID + NTY::SUB;
|
||||
}
|
||||
|
||||
impl<NTY> SitemtyFrameType for ScalarEvents<NTY>
|
||||
@@ -70,7 +64,7 @@ where
|
||||
NTY: NumOps,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::streams::{Collectable, Collector};
|
||||
use crate::{
|
||||
ts_offs_from_abs, Appendable, ByteEstimate, Clearable, EventAppendable, FilterFittingInside, Fits, FitsInside,
|
||||
FrameTypeStatic, NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv, ReadableFromFile, SitemtyFrameType,
|
||||
FrameTypeStaticSYC, NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv, ReadableFromFile, SitemtyFrameType,
|
||||
TimeBinnableType, TimeBinnableTypeAggregator, WithLen, WithTimestamps,
|
||||
};
|
||||
use err::Error;
|
||||
@@ -17,18 +17,13 @@ pub struct StatsEvents {
|
||||
pub pulses: Vec<u64>,
|
||||
}
|
||||
|
||||
impl FrameTypeStatic for StatsEvents {
|
||||
impl FrameTypeStaticSYC for StatsEvents {
|
||||
const FRAME_TYPE_ID: u32 = crate::STATS_EVENTS_FRAME_TYPE_ID;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
// TODO remove usage of this
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
impl SitemtyFrameType for StatsEvents {
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::xbinnedscalarevents::XBinnedScalarEvents;
|
||||
use crate::xbinnedwaveevents::XBinnedWaveEvents;
|
||||
use crate::{
|
||||
Appendable, ByteEstimate, Clearable, EventAppendable, EventsDyn, EventsNodeProcessor, FilterFittingInside, Fits,
|
||||
FitsInside, FrameTypeStatic, NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv, ReadableFromFile,
|
||||
FitsInside, FrameTypeStaticSYC, NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv, ReadableFromFile,
|
||||
SitemtyFrameType, SubFrId, TimeBinnableDyn, TimeBinnableType, TimeBinnableTypeAggregator, WithLen, WithTimestamps,
|
||||
};
|
||||
use err::Error;
|
||||
@@ -40,16 +40,11 @@ impl<NTY> WaveEvents<NTY> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> FrameTypeStatic for WaveEvents<NTY>
|
||||
impl<NTY> FrameTypeStaticSYC for WaveEvents<NTY>
|
||||
where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = crate::WAVE_EVENTS_FRAME_TYPE_ID + NTY::SUB;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
// TODO remove this method.
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> SitemtyFrameType for WaveEvents<NTY>
|
||||
@@ -57,7 +52,7 @@ where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::binsdim0::MinMaxAvgDim0Bins;
|
||||
use crate::numops::NumOps;
|
||||
use crate::streams::{Collectable, Collector};
|
||||
use crate::{
|
||||
ts_offs_from_abs, Appendable, ByteEstimate, Clearable, FilterFittingInside, Fits, FitsInside, FrameTypeStatic,
|
||||
ts_offs_from_abs, Appendable, ByteEstimate, Clearable, FilterFittingInside, Fits, FitsInside, FrameTypeStaticSYC,
|
||||
NewEmpty, PushableIndex, RangeOverlapInfo, ReadPbv, ReadableFromFile, SitemtyFrameType, SubFrId, TimeBinnableType,
|
||||
TimeBinnableTypeAggregator, WithLen, WithTimestamps,
|
||||
};
|
||||
@@ -23,15 +23,11 @@ pub struct XBinnedScalarEvents<NTY> {
|
||||
pub avgs: Vec<f32>,
|
||||
}
|
||||
|
||||
impl<NTY> FrameTypeStatic for XBinnedScalarEvents<NTY>
|
||||
impl<NTY> FrameTypeStaticSYC for XBinnedScalarEvents<NTY>
|
||||
where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = crate::X_BINNED_SCALAR_EVENTS_FRAME_TYPE_ID + NTY::SUB;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<NTY> SitemtyFrameType for XBinnedScalarEvents<NTY>
|
||||
@@ -39,7 +35,7 @@ where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::binsdim1::MinMaxAvgDim1Bins;
|
||||
use crate::numops::NumOps;
|
||||
use crate::streams::{Collectable, Collector};
|
||||
use crate::{
|
||||
Appendable, ByteEstimate, Clearable, FilterFittingInside, Fits, FitsInside, FrameTypeStatic, NewEmpty,
|
||||
Appendable, ByteEstimate, Clearable, FilterFittingInside, Fits, FitsInside, FrameTypeStaticSYC, NewEmpty,
|
||||
PushableIndex, RangeOverlapInfo, ReadPbv, ReadableFromFile, SitemtyFrameType, SubFrId, TimeBinnableType,
|
||||
TimeBinnableTypeAggregator, WithLen, WithTimestamps,
|
||||
};
|
||||
@@ -23,15 +23,11 @@ pub struct XBinnedWaveEvents<NTY> {
|
||||
pub avgs: Vec<Vec<f32>>,
|
||||
}
|
||||
|
||||
impl<NTY> FrameTypeStatic for XBinnedWaveEvents<NTY>
|
||||
impl<NTY> FrameTypeStaticSYC for XBinnedWaveEvents<NTY>
|
||||
where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
const FRAME_TYPE_ID: u32 = crate::X_BINNED_WAVE_EVENTS_FRAME_TYPE_ID + NTY::SUB;
|
||||
|
||||
fn from_error(_: err::Error) -> Self {
|
||||
panic!()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO use a generic impl for this:
|
||||
@@ -40,7 +36,7 @@ where
|
||||
NTY: SubFrId,
|
||||
{
|
||||
fn frame_type_id(&self) -> u32 {
|
||||
<Self as FrameTypeStatic>::FRAME_TYPE_ID
|
||||
<Self as FrameTypeStaticSYC>::FRAME_TYPE_ID
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,5 +19,6 @@ crc32fast = "1.3.2"
|
||||
futures-util = "0.3.24"
|
||||
tokio = { version = "1.20", features = ["rt-multi-thread", "sync", "time"] }
|
||||
err = { path = "../err" }
|
||||
items = { path = "../items" }
|
||||
items_proc = { path = "../items_proc" }
|
||||
netpod = { path = "../netpod" }
|
||||
|
||||
@@ -11,13 +11,6 @@ use std::any::Any;
|
||||
use std::collections::VecDeque;
|
||||
use std::{fmt, mem};
|
||||
|
||||
#[allow(unused)]
|
||||
macro_rules! trace {
|
||||
($($x:expr),*) => {
|
||||
{let _ = format!($($x),*);}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct EventsDim0<NTY> {
|
||||
pub tss: VecDeque<u64>,
|
||||
@@ -574,6 +567,7 @@ pub struct EventsDim0TimeBinner<NTY: ScalarOps> {
|
||||
edges: VecDeque<u64>,
|
||||
agg: EventsDim0Aggregator<NTY>,
|
||||
ready: Option<<EventsDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Output>,
|
||||
range_complete: bool,
|
||||
}
|
||||
|
||||
impl<NTY: ScalarOps> EventsDim0TimeBinner<NTY> {
|
||||
@@ -594,6 +588,7 @@ impl<NTY: ScalarOps> EventsDim0TimeBinner<NTY> {
|
||||
edges,
|
||||
agg,
|
||||
ready: None,
|
||||
range_complete: false,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
@@ -759,5 +754,7 @@ impl<NTY: ScalarOps> TimeBinner for EventsDim0TimeBinner<NTY> {
|
||||
}
|
||||
}
|
||||
|
||||
fn set_range_complete(&mut self) {}
|
||||
fn set_range_complete(&mut self) {
|
||||
self.range_complete = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,7 +253,7 @@ pub trait TimeBinnable: fmt::Debug + WithLen + RangeOverlapInfo + Any + Send {
|
||||
}
|
||||
|
||||
/// Container of some form of events, for use as trait object.
|
||||
pub trait Events: fmt::Debug + Any + Collectable + TimeBinnable + Send {
|
||||
pub trait Events: fmt::Debug + Any + Collectable + TimeBinnable + Send + erased_serde::Serialize {
|
||||
fn as_time_binnable(&self) -> &dyn TimeBinnable;
|
||||
fn verify(&self) -> bool;
|
||||
fn output_info(&self);
|
||||
@@ -264,6 +264,8 @@ pub trait Events: fmt::Debug + Any + Collectable + TimeBinnable + Send {
|
||||
fn partial_eq_dyn(&self, other: &dyn Events) -> bool;
|
||||
}
|
||||
|
||||
erased_serde::serialize_trait_object!(Events);
|
||||
|
||||
impl PartialEq for Box<dyn Events> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Events::partial_eq_dyn(self.as_ref(), other.as_ref())
|
||||
@@ -429,13 +431,13 @@ pub fn empty_binned_dyn(scalar_type: &ScalarType, shape: &Shape, agg_kind: &AggK
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub enum ConnStatus {
|
||||
Connect,
|
||||
Disconnect,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ConnStatusEvent {
|
||||
pub ts: u64,
|
||||
pub status: ConnStatus,
|
||||
@@ -458,7 +460,7 @@ impl<T: MergableEvents> MergableEvents for Box<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Serialize)]
|
||||
pub enum ChannelEvents {
|
||||
Events(Box<dyn Events>),
|
||||
Status(ConnStatusEvent),
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "netpod"
|
||||
version = "0.0.1-a.0"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/netpod.rs"
|
||||
|
||||
@@ -2014,9 +2014,6 @@ pub struct ChannelConfigResponse {
|
||||
pub shape: Shape,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct EventQueryJsonStringFrame(pub String);
|
||||
|
||||
/**
|
||||
Provide basic information about a channel, especially it's shape.
|
||||
Also, byte-order is important for clients that process the raw databuffer event data (python data_api3).
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "nodenet"
|
||||
version = "0.0.1-a.1"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/nodenet.rs"
|
||||
@@ -30,5 +30,7 @@ netpod = { path = "../netpod" }
|
||||
disk = { path = "../disk" }
|
||||
#parse = { path = "../parse" }
|
||||
items = { path = "../items" }
|
||||
items_2 = { path = "../items_2" }
|
||||
dbconn = { path = "../dbconn" }
|
||||
scyllaconn = { path = "../scyllaconn" }
|
||||
taskrun = { path = "../taskrun" }
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
use dbconn::events_scylla::make_scylla_stream;
|
||||
use disk::frame::inmem::InMemoryFrameAsyncReadStream;
|
||||
use err::Error;
|
||||
use futures_core::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use items::frame::{decode_frame, make_term_frame};
|
||||
use items::{Framable, StreamItem};
|
||||
use items::{EventQueryJsonStringFrame, Framable, RangeCompletableItem, Sitemty, StreamItem};
|
||||
use netpod::histo::HistoLog2;
|
||||
use netpod::log::*;
|
||||
use netpod::query::RawEventsQuery;
|
||||
use netpod::query::{PlainEventsQuery, RawEventsQuery};
|
||||
use netpod::AggKind;
|
||||
use netpod::{EventQueryJsonStringFrame, NodeConfigCached, PerfOpts};
|
||||
use netpod::{NodeConfigCached, PerfOpts};
|
||||
use std::net::SocketAddr;
|
||||
use std::pin::Pin;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
@@ -33,20 +32,6 @@ pub async fn events_service(node_config: NodeConfigCached) -> Result<(), Error>
|
||||
}
|
||||
}
|
||||
|
||||
async fn events_conn_handler(stream: TcpStream, addr: SocketAddr, node_config: NodeConfigCached) -> Result<(), Error> {
|
||||
let span1 = span!(Level::INFO, "events_conn_handler");
|
||||
let r = events_conn_handler_inner(stream, addr, &node_config)
|
||||
.instrument(span1)
|
||||
.await;
|
||||
match r {
|
||||
Ok(k) => Ok(k),
|
||||
Err(e) => {
|
||||
error!("events_conn_handler sees error: {:?}", e);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ConnErr {
|
||||
err: Error,
|
||||
#[allow(dead_code)]
|
||||
@@ -91,8 +76,18 @@ async fn events_conn_handler_inner_try(
|
||||
error!("missing command frame");
|
||||
return Err((Error::with_msg("missing command frame"), netout))?;
|
||||
}
|
||||
let qitem: EventQueryJsonStringFrame = match decode_frame(&frames[0]) {
|
||||
Ok(k) => k,
|
||||
// TODO this does not need all variants of Sitemty.
|
||||
let qitem = match decode_frame::<Sitemty<EventQueryJsonStringFrame>>(&frames[0]) {
|
||||
Ok(k) => match k {
|
||||
Ok(k) => match k {
|
||||
StreamItem::DataItem(k) => match k {
|
||||
RangeCompletableItem::Data(k) => k,
|
||||
RangeCompletableItem::RangeComplete => panic!(),
|
||||
},
|
||||
_ => panic!(),
|
||||
},
|
||||
Err(e) => return Err((e, netout).into()),
|
||||
},
|
||||
Err(e) => return Err((e, netout).into()),
|
||||
};
|
||||
let res: Result<RawEventsQuery, _> = serde_json::from_str(&qitem.0);
|
||||
@@ -113,31 +108,53 @@ async fn events_conn_handler_inner_try(
|
||||
|
||||
let mut p1: Pin<Box<dyn Stream<Item = Box<dyn Framable + Send>> + Send>> =
|
||||
if let Some(conf) = &node_config.node_config.cluster.scylla {
|
||||
// TODO depends in general on the query
|
||||
// TODO why both in PlainEventsQuery and as separate parameter? Check other usages.
|
||||
let do_one_before_range = false;
|
||||
// TODO use better builder pattern with shortcuts for production and dev defaults
|
||||
let qu = PlainEventsQuery::new(evq.channel, evq.range, 1024 * 8, None, true);
|
||||
let scyco = conf;
|
||||
let dbconf = node_config.node_config.cluster.database.clone();
|
||||
match make_scylla_stream(&evq, scyco, dbconf, evq.do_test_stream_error).await {
|
||||
Ok(s) => {
|
||||
//
|
||||
let s = s.map(|item| {
|
||||
//
|
||||
/*match item {
|
||||
Ok(StreamItem::Data(RangeCompletableItem::Data(k))) => {
|
||||
let b = Box::new(b);
|
||||
Ok(StreamItem::Data(RangeCompletableItem::Data(b)))
|
||||
}
|
||||
Ok(StreamItem::Data(RangeCompletableItem::Complete)) => {
|
||||
Ok(StreamItem::Data(RangeCompletableItem::Complete))
|
||||
}
|
||||
Ok(StreamItem::Log(k)) => Ok(StreamItem::Log(k)),
|
||||
Ok(StreamItem::Stats(k)) => Ok(StreamItem::Stats(k)),
|
||||
Err(e) => Err(e),
|
||||
}*/
|
||||
Box::new(item) as Box<dyn Framable + Send>
|
||||
});
|
||||
Box::pin(s)
|
||||
}
|
||||
let _dbconf = node_config.node_config.cluster.database.clone();
|
||||
let scy = match scyllaconn::create_scy_session(scyco).await {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err((e, netout))?,
|
||||
}
|
||||
};
|
||||
let series = err::todoval();
|
||||
let scalar_type = err::todoval();
|
||||
let shape = err::todoval();
|
||||
let do_test_stream_error = false;
|
||||
let stream = match scyllaconn::events::make_scylla_stream(
|
||||
&qu,
|
||||
do_one_before_range,
|
||||
series,
|
||||
scalar_type,
|
||||
shape,
|
||||
scy,
|
||||
do_test_stream_error,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err((e, netout))?,
|
||||
};
|
||||
let s = stream.map(|item| {
|
||||
let item = match item {
|
||||
Ok(item) => match item {
|
||||
items_2::ChannelEvents::Events(_item) => {
|
||||
// TODO
|
||||
let item = items::scalarevents::ScalarEvents::<f64>::empty();
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::Data(item)))
|
||||
}
|
||||
items_2::ChannelEvents::RangeComplete => {
|
||||
Ok(StreamItem::DataItem(RangeCompletableItem::RangeComplete))
|
||||
}
|
||||
items_2::ChannelEvents::Status(_item) => todo!(),
|
||||
},
|
||||
Err(e) => Err(e),
|
||||
};
|
||||
Box::new(item) as Box<dyn Framable + Send>
|
||||
});
|
||||
Box::pin(s)
|
||||
} else if let Some(_) = &node_config.node.channel_archiver {
|
||||
let e = Error::with_msg_no_trace("archapp not built");
|
||||
return Err((e, netout))?;
|
||||
@@ -172,7 +189,10 @@ async fn events_conn_handler_inner_try(
|
||||
}
|
||||
}
|
||||
}
|
||||
let buf = make_term_frame();
|
||||
let buf = match make_term_frame() {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err((e, netout))?,
|
||||
};
|
||||
match netout.write_all(&buf).await {
|
||||
Ok(_) => (),
|
||||
Err(e) => return Err((e, netout))?,
|
||||
@@ -205,3 +225,17 @@ async fn events_conn_handler_inner(
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn events_conn_handler(stream: TcpStream, addr: SocketAddr, node_config: NodeConfigCached) -> Result<(), Error> {
|
||||
let span1 = span!(Level::INFO, "events_conn_handler");
|
||||
let r = events_conn_handler_inner(stream, addr, &node_config)
|
||||
.instrument(span1)
|
||||
.await;
|
||||
match r {
|
||||
Ok(k) => Ok(k),
|
||||
Err(e) => {
|
||||
error!("events_conn_handler sees error: {:?}", e);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
use netpod::{Cluster, Database, FileIoBufferSize, Node, NodeConfig, SfDatabuffer};
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
use super::*;
|
||||
use disk::eventchunker::EventFull;
|
||||
use items::frame::make_frame;
|
||||
use items::Sitemty;
|
||||
use netpod::timeunits::SEC;
|
||||
use netpod::{Channel, Cluster, Database, DiskIoTune, FileIoBufferSize, NanoRange, Node, NodeConfig, SfDatabuffer};
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
#[test]
|
||||
fn raw_data_00() {
|
||||
//taskrun::run(disk::gen::gen_test_data()).unwrap();
|
||||
let fut = async {
|
||||
let lis = TcpListener::bind("127.0.0.1:0").await.unwrap();
|
||||
let con = TcpStream::connect(lis.local_addr().unwrap()).await.unwrap();
|
||||
let mut con = TcpStream::connect(lis.local_addr().unwrap()).await.unwrap();
|
||||
let (client, addr) = lis.accept().await.unwrap();
|
||||
let cfg = NodeConfigCached {
|
||||
node_config: NodeConfig {
|
||||
name: "node_name_dummy".into(),
|
||||
cluster: Cluster {
|
||||
backend: "backend_dummy".into(),
|
||||
backend: "testbackend".into(),
|
||||
nodes: vec![],
|
||||
database: Database {
|
||||
name: "".into(),
|
||||
@@ -47,7 +50,60 @@ fn raw_data_00() {
|
||||
},
|
||||
ix: 0,
|
||||
};
|
||||
events_conn_handler(client, addr, cfg).await.unwrap();
|
||||
let qu = RawEventsQuery {
|
||||
channel: Channel {
|
||||
series: None,
|
||||
backend: "testbackend".into(),
|
||||
name: "scalar-i32-be".into(),
|
||||
},
|
||||
range: NanoRange {
|
||||
beg: SEC,
|
||||
end: SEC * 10,
|
||||
},
|
||||
agg_kind: AggKind::Plain,
|
||||
disk_io_tune: DiskIoTune {
|
||||
read_sys: netpod::ReadSys::TokioAsyncRead,
|
||||
read_buffer_len: 1024 * 4,
|
||||
read_queue_len: 1,
|
||||
},
|
||||
do_decompress: true,
|
||||
do_test_main_error: false,
|
||||
do_test_stream_error: false,
|
||||
};
|
||||
let query = EventQueryJsonStringFrame(serde_json::to_string(&qu).unwrap());
|
||||
let item = Ok(StreamItem::DataItem(RangeCompletableItem::Data(query)));
|
||||
let frame = make_frame(&item).unwrap();
|
||||
let jh = taskrun::spawn(events_conn_handler(client, addr, cfg));
|
||||
con.write_all(&frame).await.unwrap();
|
||||
eprintln!("written");
|
||||
con.shutdown().await.unwrap();
|
||||
eprintln!("shut down");
|
||||
|
||||
let mut frames = InMemoryFrameAsyncReadStream::new(con, 1024 * 128);
|
||||
while let Some(frame) = frames.next().await {
|
||||
match frame {
|
||||
Ok(frame) => match frame {
|
||||
StreamItem::DataItem(k) => {
|
||||
eprintln!("{k:?}");
|
||||
if k.tyid() == items::EVENT_FULL_FRAME_TYPE_ID {
|
||||
} else if k.tyid() == items::ERROR_FRAME_TYPE_ID {
|
||||
} else if k.tyid() == items::LOG_FRAME_TYPE_ID {
|
||||
} else if k.tyid() == items::STATS_FRAME_TYPE_ID {
|
||||
} else {
|
||||
panic!("unexpected frame type id {:x}", k.tyid());
|
||||
}
|
||||
let item: Result<Sitemty<EventFull>, Error> = decode_frame(&k);
|
||||
eprintln!("decoded: {:?}", item);
|
||||
}
|
||||
StreamItem::Log(_) => todo!(),
|
||||
StreamItem::Stats(_) => todo!(),
|
||||
},
|
||||
Err(e) => {
|
||||
panic!("{e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
jh.await.unwrap().unwrap();
|
||||
Ok(())
|
||||
};
|
||||
taskrun::run(fut).unwrap();
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "parse"
|
||||
version = "0.0.1-a.0"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name = "streams"
|
||||
version = "0.0.1-a.dev.4"
|
||||
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tracing = "0.1.26"
|
||||
|
||||
Reference in New Issue
Block a user