Remove crate items

This commit is contained in:
Dominik Werder
2023-03-22 09:38:19 +01:00
parent c0bdc854ff
commit d1c10e1712
46 changed files with 598 additions and 557 deletions

View File

@@ -22,7 +22,6 @@ tokio = { version = "1.20", features = ["rt-multi-thread", "sync", "time"] }
humantime-serde = "1.1.1"
rmp-serde = "1.1.1"
err = { path = "../err" }
items = { path = "../items" }
items_0 = { path = "../items_0" }
items_proc = { path = "../items_proc" }
netpod = { path = "../netpod" }

View File

@@ -11,9 +11,9 @@ use items_0::collect_s::ToJsonResult;
use items_0::streamitem::RangeCompletableItem;
use items_0::streamitem::Sitemty;
use items_0::streamitem::StreamItem;
use items_0::EventTransform;
use items_0::TimeBinnable;
use items_0::TimeBinner;
use items_0::Transformer;
use netpod::log::*;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
@@ -69,7 +69,7 @@ fn _old_binned_collected(
scalar_type: ScalarType,
shape: Shape,
binrange: BinnedRangeEnum,
transformer: &dyn Transformer,
transformer: &dyn EventTransform,
deadline: Instant,
inp: Pin<Box<dyn Stream<Item = Sitemty<ChannelEvents>> + Send>>,
) -> Result<BinnedCollectedResult, Error> {

View File

@@ -18,10 +18,12 @@ use items_0::TimeBinned;
use items_0::TimeBinner;
use items_0::TimeBins;
use items_0::WithLen;
use netpod::is_false;
use netpod::log::*;
use netpod::range::evrange::SeriesRange;
use netpod::timeunits::SEC;
use netpod::BinnedRangeEnum;
use netpod::CmpZero;
use netpod::Dim0Kind;
use serde::Deserialize;
use serde::Serialize;
@@ -303,11 +305,11 @@ pub struct BinsDim0CollectedResult<NTY> {
maxs: VecDeque<NTY>,
#[serde(rename = "avgs")]
avgs: VecDeque<f32>,
#[serde(rename = "rangeFinal", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "rangeFinal", default, skip_serializing_if = "is_false")]
range_final: bool,
#[serde(rename = "timedOut", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "timedOut", default, skip_serializing_if = "is_false")]
timed_out: bool,
#[serde(rename = "missingBins", default, skip_serializing_if = "crate::is_zero_u32")]
#[serde(rename = "missingBins", default, skip_serializing_if = "CmpZero::is_zero")]
missing_bins: u32,
#[serde(rename = "continueAt", default, skip_serializing_if = "Option::is_none")]
continue_at: Option<IsoDateTime>,

View File

@@ -21,13 +21,14 @@ use items_0::TimeBinned;
use items_0::TimeBinner;
use items_0::TimeBins;
use items_0::WithLen;
use netpod::is_false;
use netpod::log::*;
use netpod::range::evrange::NanoRange;
use netpod::range::evrange::SeriesRange;
use netpod::timeunits::SEC;
use netpod::BinnedRangeEnum;
use netpod::Dim0Kind;
use num_traits::Zero;
use netpod::CmpZero;
use serde::Deserialize;
use serde::Serialize;
use std::any;
@@ -269,11 +270,11 @@ pub struct BinsXbinDim0CollectedResult<NTY> {
maxs: VecDeque<NTY>,
#[serde(rename = "avgs")]
avgs: VecDeque<f32>,
#[serde(rename = "rangeFinal", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "rangeFinal", default, skip_serializing_if = "is_false")]
range_final: bool,
#[serde(rename = "timedOut", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "timedOut", default, skip_serializing_if = "is_false")]
timed_out: bool,
#[serde(rename = "missingBins", default, skip_serializing_if = "Zero::is_zero")]
#[serde(rename = "missingBins", default, skip_serializing_if = "CmpZero::is_zero")]
missing_bins: u32,
#[serde(rename = "continueAt", default, skip_serializing_if = "Option::is_none")]
continue_at: Option<IsoDateTime>,

View File

@@ -1,10 +1,10 @@
use crate::framable::FrameType;
use crate::merger;
use crate::merger::Mergeable;
use crate::Events;
use items_0::collect_s::Collectable;
use items_0::collect_s::Collected;
use items_0::collect_s::Collector;
use items_0::container::ByteEstimate;
use items_0::framable::FrameTypeInnerStatic;
use items_0::streamitem::ITEMS_2_CHANNEL_EVENTS_FRAME_TYPE_ID;
use items_0::AsAnyMut;
@@ -12,9 +12,7 @@ use items_0::AsAnyRef;
use items_0::MergeError;
use items_0::WithLen;
use netpod::log::*;
use netpod::range::evrange::NanoRange;
use netpod::range::evrange::SeriesRange;
use netpod::BinnedRange;
use netpod::BinnedRangeEnum;
use serde::Deserialize;
use serde::Serialize;
@@ -55,6 +53,13 @@ impl ConnStatusEvent {
}
}
impl ByteEstimate for ConnStatusEvent {
fn byte_estimate(&self) -> u64 {
// TODO magic number, but maybe good enough
32
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ChannelStatus {
Connect,
@@ -86,6 +91,13 @@ impl ChannelStatusEvent {
}
}
impl ByteEstimate for ChannelStatusEvent {
fn byte_estimate(&self) -> u64 {
// TODO magic number, but maybe good enough
32
}
}
/// Events on a channel consist not only of e.g. timestamped values, but can be also
/// connection status changes.
#[derive(Debug)]
@@ -490,6 +502,18 @@ impl WithLen for ChannelEvents {
}
}
impl ByteEstimate for ChannelEvents {
fn byte_estimate(&self) -> u64 {
match self {
ChannelEvents::Events(k) => k.byte_estimate(),
ChannelEvents::Status(k) => match k {
Some(k) => k.byte_estimate(),
None => 0,
},
}
}
}
impl Mergeable for ChannelEvents {
fn ts_min(&self) -> Option<u64> {
match self {

View File

@@ -1,8 +1,7 @@
use crate::framable::FrameType;
use crate::merger::Mergeable;
use bytes::BytesMut;
use items::ByteEstimate;
use items::WithTimestamps;
use items_0::container::ByteEstimate;
use items_0::framable::FrameTypeInnerStatic;
use items_0::streamitem::EVENT_FULL_FRAME_TYPE_ID;
use items_0::Empty;
@@ -29,6 +28,7 @@ pub struct EventFull {
pub be: VecDeque<bool>,
pub shapes: VecDeque<Shape>,
pub comps: VecDeque<Option<CompressionMethod>>,
pub entry_payload_max: u64,
}
#[allow(unused)]
@@ -81,6 +81,9 @@ impl EventFull {
shape: Shape,
comp: Option<CompressionMethod>,
) {
let m1 = blob.as_ref().map_or(0, |x| x.len());
let m2 = decomp.as_ref().map_or(0, |x| x.len());
self.entry_payload_max = self.entry_payload_max.max(m1 as u64 + m2 as u64);
self.tss.push_back(ts);
self.pulses.push_back(pulse);
self.blobs.push_back(blob);
@@ -91,6 +94,7 @@ impl EventFull {
self.comps.push_back(comp);
}
// TODO possible to get rid of this?
pub fn truncate_ts(&mut self, end: u64) {
let mut nkeep = usize::MAX;
for (i, &ts) in self.tss.iter().enumerate() {
@@ -131,6 +135,7 @@ impl Empty for EventFull {
be: VecDeque::new(),
shapes: VecDeque::new(),
comps: VecDeque::new(),
entry_payload_max: 0,
}
}
}
@@ -141,22 +146,9 @@ impl WithLen for EventFull {
}
}
impl WithTimestamps for EventFull {
fn ts(&self, ix: usize) -> u64 {
self.tss[ix]
}
}
impl ByteEstimate for EventFull {
fn byte_estimate(&self) -> u64 {
if self.len() == 0 {
0
} else {
// TODO that is clumsy... it assumes homogenous types.
// TODO improve via a const fn on NTY
let decomp_len = self.decomps[0].as_ref().map_or(0, |h| h.len());
self.tss.len() as u64 * (40 + self.blobs[0].as_ref().map_or(0, |x| x.len()) as u64 + decomp_len as u64)
}
self.len() as u64 * (64 + self.entry_payload_max)
}
}
@@ -176,6 +168,13 @@ impl Mergeable for EventFull {
fn drain_into(&mut self, dst: &mut Self, range: (usize, usize)) -> Result<(), MergeError> {
// TODO make it harder to forget new members when the struct may get modified in the future
let r = range.0..range.1;
let mut max = dst.entry_payload_max;
for i in r.clone() {
let m1 = self.blobs[i].as_ref().map_or(0, |x| x.len());
let m2 = self.decomps[i].as_ref().map_or(0, |x| x.len());
max = max.max(m1 as u64 + m2 as u64);
}
dst.entry_payload_max = max;
dst.tss.extend(self.tss.drain(r.clone()));
dst.pulses.extend(self.pulses.drain(r.clone()));
dst.blobs.extend(self.blobs.drain(r.clone()));

View File

@@ -10,6 +10,7 @@ use items_0::collect_s::Collector;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonBytes;
use items_0::collect_s::ToJsonResult;
use items_0::container::ByteEstimate;
use items_0::scalar_ops::ScalarOps;
use items_0::Appendable;
use items_0::AsAnyMut;
@@ -21,6 +22,7 @@ use items_0::MergeError;
use items_0::TimeBinnable;
use items_0::TimeBinner;
use items_0::WithLen;
use netpod::is_false;
use netpod::log::*;
use netpod::range::evrange::NanoRange;
use netpod::range::evrange::SeriesRange;
@@ -125,6 +127,13 @@ impl<NTY> WithLen for EventsDim0<NTY> {
}
}
impl<STY> ByteEstimate for EventsDim0<STY> {
fn byte_estimate(&self) -> u64 {
let stylen = mem::size_of::<STY>();
(self.len() * (8 + 8 + stylen)) as u64
}
}
impl<NTY: ScalarOps> RangeOverlapInfo for EventsDim0<NTY> {
fn ends_before(&self, range: &SeriesRange) -> bool {
if range.is_time() {
@@ -238,9 +247,9 @@ pub struct EventsDim0CollectorOutput<NTY> {
pulse_off: VecDeque<u64>,
#[serde(rename = "values")]
values: VecDeque<NTY>,
#[serde(rename = "rangeFinal", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "rangeFinal", default, skip_serializing_if = "is_false")]
range_final: bool,
#[serde(rename = "timedOut", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "timedOut", default, skip_serializing_if = "is_false")]
timed_out: bool,
#[serde(rename = "continueAt", default, skip_serializing_if = "Option::is_none")]
continue_at: Option<IsoDateTime>,
@@ -1122,7 +1131,7 @@ mod test_frame {
use items_0::streamitem::StreamItem;
#[test]
fn events_bincode() {
fn events_serialize() {
taskrun::tracing_init().unwrap();
let mut events = EventsDim0::empty();
events.push(123, 234, 55f32);

View File

@@ -10,6 +10,7 @@ use items_0::collect_s::Collected;
use items_0::collect_s::CollectorType;
use items_0::collect_s::ToJsonBytes;
use items_0::collect_s::ToJsonResult;
use items_0::container::ByteEstimate;
use items_0::scalar_ops::ScalarOps;
use items_0::Appendable;
use items_0::AsAnyMut;
@@ -21,6 +22,7 @@ use items_0::MergeError;
use items_0::TimeBinnable;
use items_0::TimeBinner;
use items_0::WithLen;
use netpod::is_false;
use netpod::log::*;
use netpod::range::evrange::SeriesRange;
use netpod::timeunits::SEC;
@@ -31,6 +33,7 @@ use std::any;
use std::any::Any;
use std::collections::VecDeque;
use std::fmt;
use std::mem;
#[allow(unused)]
macro_rules! trace2 {
@@ -130,6 +133,14 @@ impl<NTY> WithLen for EventsDim1<NTY> {
}
}
impl<STY> ByteEstimate for EventsDim1<STY> {
fn byte_estimate(&self) -> u64 {
let stylen = mem::size_of::<STY>();
let n = self.values.front().map_or(0, Vec::len);
(self.len() * (8 + 8 + n * stylen)) as u64
}
}
impl<NTY: ScalarOps> RangeOverlapInfo for EventsDim1<NTY> {
fn ends_before(&self, range: &SeriesRange) -> bool {
todo!()
@@ -199,9 +210,9 @@ pub struct EventsDim1CollectorOutput<NTY> {
pulse_off: VecDeque<u64>,
#[serde(rename = "values")]
values: VecDeque<Vec<NTY>>,
#[serde(rename = "rangeFinal", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "rangeFinal", default, skip_serializing_if = "is_false")]
range_final: bool,
#[serde(rename = "timedOut", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "timedOut", default, skip_serializing_if = "is_false")]
timed_out: bool,
#[serde(rename = "continueAt", default, skip_serializing_if = "Option::is_none")]
continue_at: Option<IsoDateTime>,

View File

@@ -1,4 +1,5 @@
use crate::binsxbindim0::BinsXbinDim0;
use items_0::container::ByteEstimate;
use crate::IsoDateTime;
use crate::RangeOverlapInfo;
use crate::TimeBinnableType;
@@ -14,6 +15,7 @@ use items_0::AsAnyMut;
use items_0::AsAnyRef;
use items_0::Empty;
use items_0::WithLen;
use netpod::is_false;
use netpod::log::*;
use netpod::range::evrange::SeriesRange;
use netpod::BinnedRangeEnum;
@@ -69,7 +71,7 @@ where
}
}
impl<NTY> items::ByteEstimate for EventsXbinDim0<NTY> {
impl<NTY> ByteEstimate for EventsXbinDim0<NTY> {
fn byte_estimate(&self) -> u64 {
todo!("byte_estimate")
}
@@ -365,9 +367,9 @@ pub struct EventsXbinDim0CollectorOutput<NTY> {
maxs: VecDeque<NTY>,
#[serde(rename = "avgs")]
avgs: VecDeque<f32>,
#[serde(rename = "rangeFinal", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "rangeFinal", default, skip_serializing_if = "is_false")]
range_final: bool,
#[serde(rename = "timedOut", default, skip_serializing_if = "crate::bool_is_false")]
#[serde(rename = "timedOut", default, skip_serializing_if = "is_false")]
timed_out: bool,
#[serde(rename = "continueAt", default, skip_serializing_if = "Option::is_none")]
continue_at: Option<IsoDateTime>,

View File

@@ -90,7 +90,7 @@ where
}
pub trait FrameDecodable: FrameTypeStatic + DeserializeOwned {
fn from_error(e: ::err::Error) -> Self;
fn from_error(e: err::Error) -> Self;
fn from_log(item: LogItem) -> Self;
fn from_stats(item: StatsItem) -> Self;
fn from_range_complete() -> Self;
@@ -148,3 +148,34 @@ where
}
}
}
#[test]
fn test_frame_log() {
use crate::channelevents::ChannelEvents;
use crate::frame::decode_from_slice;
use netpod::log::Level;
let item = LogItem {
node_ix: 123,
level: Level::TRACE,
msg: format!("test-log-message"),
};
let item: Sitemty<ChannelEvents> = Ok(StreamItem::Log(item));
let buf = Framable::make_frame(&item).unwrap();
let len = u32::from_le_bytes(buf[12..16].try_into().unwrap());
let item2: LogItem = decode_from_slice(&buf[20..20 + len as usize]).unwrap();
}
#[test]
fn test_frame_error() {
use crate::channelevents::ChannelEvents;
use crate::frame::decode_from_slice;
let item: Sitemty<ChannelEvents> = Err(Error::with_msg_no_trace(format!("dummy-error-message")));
let buf = Framable::make_frame(&item).unwrap();
let len = u32::from_le_bytes(buf[12..16].try_into().unwrap());
let tyid = u32::from_le_bytes(buf[8..12].try_into().unwrap());
if tyid != ERROR_FRAME_TYPE_ID {
panic!("bad tyid");
}
eprintln!("buf len {} len {}", buf.len(), len);
let item2: Error = decode_from_slice(&buf[20..20 + len as usize]).unwrap();
}

View File

@@ -1,5 +1,4 @@
use crate::framable::FrameDecodable;
use crate::framable::FrameType;
use crate::framable::INMEM_FRAME_ENCID;
use crate::framable::INMEM_FRAME_HEAD;
use crate::framable::INMEM_FRAME_MAGIC;
@@ -15,7 +14,6 @@ use bytes::BufMut;
use bytes::BytesMut;
use err::Error;
use items_0::bincode;
use items_0::streamitem::ContainsError;
use items_0::streamitem::LogItem;
use items_0::streamitem::StatsItem;
use items_0::streamitem::ERROR_FRAME_TYPE_ID;
@@ -23,9 +21,10 @@ use items_0::streamitem::LOG_FRAME_TYPE_ID;
use items_0::streamitem::RANGE_COMPLETE_FRAME_TYPE_ID;
use items_0::streamitem::STATS_FRAME_TYPE_ID;
use items_0::streamitem::TERM_FRAME_TYPE_ID;
#[allow(unused)]
use netpod::log::*;
use serde::Serialize;
use std::any;
use std::io;
trait EC {
fn ec(self) -> err::Error;
@@ -43,17 +42,6 @@ impl EC for rmp_serde::decode::Error {
}
}
pub fn make_frame<FT>(item: &FT) -> Result<BytesMut, Error>
where
FT: FrameType + ContainsError + Serialize,
{
if item.is_err() {
make_error_frame(item.err().unwrap())
} else {
make_frame_2(item, item.frame_type_id())
}
}
pub fn bincode_ser<W>(
w: W,
) -> bincode::Serializer<
@@ -64,7 +52,7 @@ pub fn bincode_ser<W>(
>,
>
where
W: std::io::Write,
W: io::Write,
{
use bincode::Options;
let opts = DefaultOptions::new()
@@ -98,73 +86,83 @@ where
<T as serde::Deserialize>::deserialize(&mut de).map_err(|e| format!("{e}").into())
}
pub fn encode_to_vec<S>(item: S) -> Result<Vec<u8>, Error>
pub fn msgpack_to_vec<T>(item: T) -> Result<Vec<u8>, Error>
where
S: Serialize,
T: Serialize,
{
if false {
serde_json::to_vec(&item).map_err(|e| e.into())
} else {
bincode_to_vec(&item)
}
rmp_serde::to_vec_named(&item).map_err(|e| format!("{e}").into())
}
pub fn msgpack_erased_to_vec<T>(item: T) -> Result<Vec<u8>, Error>
where
T: erased_serde::Serialize,
{
let mut out = Vec::new();
let mut ser1 = rmp_serde::Serializer::new(&mut out).with_struct_map();
let mut ser2 = <dyn erased_serde::Serializer>::erase(&mut ser1);
item.erased_serialize(&mut ser2)
.map_err(|e| Error::from(format!("{e}")))?;
Ok(out)
}
pub fn msgpack_from_slice<T>(buf: &[u8]) -> Result<T, Error>
where
T: for<'de> serde::Deserialize<'de>,
{
rmp_serde::from_slice(buf).map_err(|e| format!("{e}").into())
}
pub fn encode_to_vec<T>(item: T) -> Result<Vec<u8>, Error>
where
T: Serialize,
{
msgpack_to_vec(item)
}
pub fn encode_erased_to_vec<T>(item: T) -> Result<Vec<u8>, Error>
where
T: erased_serde::Serialize,
{
msgpack_erased_to_vec(item)
}
pub fn decode_from_slice<T>(buf: &[u8]) -> Result<T, Error>
where
T: for<'de> serde::Deserialize<'de>,
{
if false {
serde_json::from_slice(buf).map_err(|e| e.into())
} else {
bincode_from_slice(buf)
}
msgpack_from_slice(buf)
}
pub fn make_frame_2<T>(item: &T, fty: u32) -> Result<BytesMut, Error>
pub fn make_frame_2<T>(item: T, fty: u32) -> Result<BytesMut, Error>
where
T: erased_serde::Serialize,
{
let mut out = Vec::new();
//let mut ser = rmp_serde::Serializer::new(&mut out).with_struct_map();
//let writer = ciborium::ser::into_writer(&item, &mut out).unwrap();
let mut ser = bincode_ser(&mut out);
let mut ser2 = <dyn erased_serde::Serializer>::erase(&mut ser);
//let mut ser = serde_json::Serializer::new(&mut out);
//let mut ser2 = <dyn erased_serde::Serializer>::erase(&mut ser);
match item.erased_serialize(&mut ser2) {
Ok(_) => {
let enc = out;
if enc.len() > u32::MAX as usize {
return Err(Error::with_msg(format!("too long payload {}", enc.len())));
}
let mut h = crc32fast::Hasher::new();
h.update(&enc);
let payload_crc = h.finalize();
// TODO reserve also for footer via constant
let mut buf = BytesMut::with_capacity(enc.len() + INMEM_FRAME_HEAD);
buf.put_u32_le(INMEM_FRAME_MAGIC);
buf.put_u32_le(INMEM_FRAME_ENCID);
buf.put_u32_le(fty);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
//trace!("enc len {}", enc.len());
//trace!("payload_crc {}", payload_crc);
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
let frame_crc = h.finalize();
buf.put_u32_le(frame_crc);
//trace!("frame_crc {}", frame_crc);
Ok(buf)
}
Err(e) => Err(e)?,
let enc = encode_erased_to_vec(item)?;
if enc.len() > u32::MAX as usize {
return Err(Error::with_msg(format!("too long payload {}", enc.len())));
}
let mut h = crc32fast::Hasher::new();
h.update(&enc);
let payload_crc = h.finalize();
// TODO reserve also for footer via constant
let mut buf = BytesMut::with_capacity(enc.len() + INMEM_FRAME_HEAD);
buf.put_u32_le(INMEM_FRAME_MAGIC);
buf.put_u32_le(INMEM_FRAME_ENCID);
buf.put_u32_le(fty);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
let frame_crc = h.finalize();
buf.put_u32_le(frame_crc);
return Ok(buf);
}
// TODO remove duplication for these similar `make_*_frame` functions:
pub fn make_error_frame(error: &::err::Error) -> Result<BytesMut, Error> {
pub fn make_error_frame(error: &err::Error) -> Result<BytesMut, Error> {
match encode_to_vec(error) {
Ok(enc) => {
let mut h = crc32fast::Hasher::new();
@@ -176,24 +174,18 @@ pub fn make_error_frame(error: &::err::Error) -> Result<BytesMut, Error> {
buf.put_u32_le(ERROR_FRAME_TYPE_ID);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
//trace!("enc len {}", enc.len());
//trace!("payload_crc {}", payload_crc);
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
let frame_crc = h.finalize();
buf.put_u32_le(frame_crc);
//trace!("frame_crc {}", frame_crc);
Ok(buf)
}
Err(e) => Err(e)?,
}
}
// TODO can I remove this usage?
pub fn make_log_frame(item: &LogItem) -> Result<BytesMut, Error> {
warn!("make_log_frame {item:?}");
match encode_to_vec(item) {
Ok(enc) => {
let mut h = crc32fast::Hasher::new();
@@ -203,10 +195,8 @@ pub fn make_log_frame(item: &LogItem) -> Result<BytesMut, Error> {
buf.put_u32_le(INMEM_FRAME_MAGIC);
buf.put_u32_le(INMEM_FRAME_ENCID);
buf.put_u32_le(LOG_FRAME_TYPE_ID);
warn!("make_log_frame payload len {}", enc.len());
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
@@ -230,7 +220,6 @@ pub fn make_stats_frame(item: &StatsItem) -> Result<BytesMut, Error> {
buf.put_u32_le(STATS_FRAME_TYPE_ID);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
@@ -253,7 +242,6 @@ pub fn make_range_complete_frame() -> Result<BytesMut, Error> {
buf.put_u32_le(RANGE_COMPLETE_FRAME_TYPE_ID);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
@@ -273,7 +261,6 @@ pub fn make_term_frame() -> Result<BytesMut, Error> {
buf.put_u32_le(TERM_FRAME_TYPE_ID);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
// TODO add padding to align to 8 bytes.
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
@@ -298,11 +285,15 @@ where
)));
}
if frame.tyid() == ERROR_FRAME_TYPE_ID {
let k: ::err::Error = match decode_from_slice(frame.buf()) {
let k: err::Error = match decode_from_slice(frame.buf()) {
Ok(item) => item,
Err(e) => {
error!("ERROR deserialize len {} ERROR_FRAME_TYPE_ID", frame.buf().len());
let n = frame.buf().len().min(128);
error!(
"ERROR deserialize len {} ERROR_FRAME_TYPE_ID {}",
frame.buf().len(),
e
);
let n = frame.buf().len().min(256);
let s = String::from_utf8_lossy(&frame.buf()[..n]);
error!("frame.buf as string: {:?}", s);
Err(e)?
@@ -313,7 +304,7 @@ where
let k: LogItem = match decode_from_slice(frame.buf()) {
Ok(item) => item,
Err(e) => {
error!("ERROR deserialize len {} LOG_FRAME_TYPE_ID", frame.buf().len());
error!("ERROR deserialize len {} LOG_FRAME_TYPE_ID {}", frame.buf().len(), e);
let n = frame.buf().len().min(128);
let s = String::from_utf8_lossy(&frame.buf()[..n]);
error!("frame.buf as string: {:?}", s);
@@ -325,7 +316,11 @@ where
let k: StatsItem = match decode_from_slice(frame.buf()) {
Ok(item) => item,
Err(e) => {
error!("ERROR deserialize len {} STATS_FRAME_TYPE_ID", frame.buf().len());
error!(
"ERROR deserialize len {} STATS_FRAME_TYPE_ID {}",
frame.buf().len(),
e
);
let n = frame.buf().len().min(128);
let s = String::from_utf8_lossy(&frame.buf()[..n]);
error!("frame.buf as string: {:?}", s);
@@ -349,7 +344,7 @@ where
match decode_from_slice(frame.buf()) {
Ok(item) => Ok(item),
Err(e) => {
error!("decode_frame T = {}", std::any::type_name::<T>());
error!("decode_frame T = {}", any::type_name::<T>());
error!("ERROR deserialize len {} tyid {:x}", frame.buf().len(), frame.tyid());
let n = frame.buf().len().min(64);
let s = String::from_utf8_lossy(&frame.buf()[..n]);

View File

@@ -29,7 +29,6 @@ use items_0::Events;
use items_0::MergeError;
use items_0::RangeOverlapInfo;
use merger::Mergeable;
use netpod::range::evrange::NanoRange;
use netpod::range::evrange::SeriesRange;
use netpod::timeunits::*;
use serde::Deserialize;
@@ -38,14 +37,6 @@ use serde::Serializer;
use std::collections::VecDeque;
use std::fmt;
pub fn bool_is_false(x: &bool) -> bool {
*x == false
}
pub fn is_zero_u32(x: &u32) -> bool {
*x == 0
}
pub fn ts_offs_from_abs(tss: &[u64]) -> (u64, VecDeque<u64>, VecDeque<u64>) {
let ts_anchor_sec = tss.first().map_or(0, |&k| k) / SEC;
let ts_anchor_ns = ts_anchor_sec * SEC;
@@ -208,9 +199,9 @@ pub trait TimeBinnableTypeAggregator: Send {
fn result_reset(&mut self, range: SeriesRange, expand: bool) -> Self::Output;
}
pub trait ChannelEventsInput: Stream<Item = Sitemty<ChannelEvents>> + items_0::Transformer + Send {}
pub trait ChannelEventsInput: Stream<Item = Sitemty<ChannelEvents>> + items_0::EventTransform + Send {}
impl<T> ChannelEventsInput for T where T: Stream<Item = Sitemty<ChannelEvents>> + items_0::Transformer + Send {}
impl<T> ChannelEventsInput for T where T: Stream<Item = Sitemty<ChannelEvents>> + items_0::EventTransform + Send {}
pub fn runfut<T, F>(fut: F) -> Result<T, err::Error>
where

View File

@@ -2,6 +2,7 @@ pub use crate::Error;
use futures_util::Stream;
use futures_util::StreamExt;
use items_0::container::ByteEstimate;
use items_0::streamitem::sitem_data;
use items_0::streamitem::RangeCompletableItem;
use items_0::streamitem::Sitemty;
@@ -16,6 +17,8 @@ use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
const OUT_MAX_BYTES: u64 = 1024 * 200;
#[allow(unused)]
macro_rules! trace2 {
(__$($arg:tt)*) => ();
@@ -34,7 +37,7 @@ macro_rules! trace4 {
($($arg:tt)*) => (trace!($($arg)*));
}
pub trait Mergeable<Rhs = Self>: fmt::Debug + WithLen + Unpin {
pub trait Mergeable<Rhs = Self>: fmt::Debug + WithLen + ByteEstimate + Unpin {
fn ts_min(&self) -> Option<u64>;
fn ts_max(&self) -> Option<u64>;
fn new_empty(&self) -> Self;
@@ -316,7 +319,7 @@ where
if let Some(o) = self.out.as_ref() {
// A good threshold varies according to scalar type and shape.
// TODO replace this magic number by a bound on the bytes estimate.
if o.len() >= self.out_max_len || self.do_clear_out {
if o.len() >= self.out_max_len || o.byte_estimate() >= OUT_MAX_BYTES || self.do_clear_out {
trace3!("decide to output");
self.do_clear_out = false;
Break(Ready(Some(Ok(self.out.take().unwrap()))))
@@ -409,7 +412,7 @@ where
}
}
impl<T> items_0::Transformer for Merger<T> {
impl<T> items_0::EventTransform for Merger<T> {
fn query_transform_properties(&self) -> items_0::TransformProperties {
todo!()
}

View File

@@ -2,8 +2,8 @@ use futures_util::Future;
use futures_util::FutureExt;
use futures_util::Stream;
use futures_util::StreamExt;
use items_0::EventTransform;
use items_0::TransformProperties;
use items_0::Transformer;
use std::collections::VecDeque;
use std::pin::Pin;
use std::task::Context;
@@ -17,7 +17,7 @@ pub struct Enumerate2<T> {
impl<T> Enumerate2<T> {
pub fn new(inp: T) -> Self
where
T: Transformer,
T: EventTransform,
{
Self { inp, cnt: 0 }
}
@@ -43,7 +43,7 @@ where
}
}
impl<T> Transformer for Enumerate2<T> {
impl<T> EventTransform for Enumerate2<T> {
fn query_transform_properties(&self) -> TransformProperties {
todo!()
}
@@ -114,7 +114,7 @@ where
}
}
impl<T, F, Fut> Transformer for Then2<T, F, Fut> {
impl<T, F, Fut> EventTransform for Then2<T, F, Fut> {
fn query_transform_properties(&self) -> TransformProperties {
todo!()
}
@@ -123,11 +123,11 @@ impl<T, F, Fut> Transformer for Then2<T, F, Fut> {
pub trait TransformerExt {
fn enumerate2(self) -> Enumerate2<Self>
where
Self: Transformer + Sized;
Self: EventTransform + Sized;
fn then2<F, Fut>(self, f: F) -> Then2<Self, F, Fut>
where
Self: Transformer + Stream + Sized,
Self: EventTransform + Stream + Sized,
F: Fn(<Self as Stream>::Item) -> Fut,
Fut: Future;
}
@@ -135,14 +135,14 @@ pub trait TransformerExt {
impl<T> TransformerExt for T {
fn enumerate2(self) -> Enumerate2<Self>
where
Self: Transformer + Sized,
Self: EventTransform + Sized,
{
Enumerate2::new(self)
}
fn then2<F, Fut>(self, f: F) -> Then2<Self, F, Fut>
where
Self: Transformer + Stream + Sized,
Self: EventTransform + Stream + Sized,
F: Fn(<Self as Stream>::Item) -> Fut,
Fut: Future,
{
@@ -178,7 +178,7 @@ where
}
}
impl<T> Transformer for VecStream<T> {
impl<T> EventTransform for VecStream<T> {
fn query_transform_properties(&self) -> TransformProperties {
todo!()
}