Rename EventValues → ScalarEvents

This commit is contained in:
Dominik Werder
2022-01-19 13:30:47 +01:00
parent 353db96a76
commit 9dbebb24cf
13 changed files with 107 additions and 94 deletions

View File

@@ -2,7 +2,7 @@
name = "archapp"
version = "0.0.1-a.dev.4"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2018"
edition = "2021"
[dependencies]
tokio = { version = "1.7.1", features = ["io-util", "net", "time", "sync", "fs", "parking_lot"] }

View File

@@ -4,8 +4,8 @@ use commonio::ringbuf::RingBuf;
use commonio::{read_exact, seek};
use err::Error;
use items::eventsitem::EventsItem;
use items::eventvalues::EventValues;
use items::plainevents::{PlainEvents, ScalarPlainEvents, WavePlainEvents};
use items::scalarevents::ScalarEvents;
use items::waveevents::WaveEvents;
use netpod::log::*;
use netpod::timeunits::SEC;
@@ -353,7 +353,7 @@ ex_v!(f64, ex_v_f64);
macro_rules! read_msg {
($sty:ident, $exfs:ident, $exfv:ident, $evvar:ident, $rb:expr, $msglen:expr, $numsamples:expr, $dbrt:expr, $dbrcount:ident) => {
if $dbrcount == 1 {
let mut evs = EventValues::empty();
let mut evs = ScalarEvents::empty();
for _ in 0..$numsamples {
$rb.fill_min($msglen).await?;
let buf = $rb.data();
@@ -459,7 +459,7 @@ pub async fn read_data2(
DbrType::DbrTimeString => {
if dbrcount == 1 {
// TODO
let evs = ScalarPlainEvents::I8(EventValues::empty());
let evs = ScalarPlainEvents::I8(ScalarEvents::empty());
let plain = PlainEvents::Scalar(evs);
let item = EventsItem::Plain(plain);
item
@@ -494,7 +494,7 @@ pub async fn read_data_1(
DbrType::DbrTimeDouble => {
if datafile_header.dbr_count == 1 {
trace!("~~~~~~~~~~~~~~~~~~~~~ read scalar DbrTimeDouble");
let mut evs = EventValues {
let mut evs = ScalarEvents {
tss: vec![],
values: vec![],
};

View File

@@ -9,8 +9,8 @@ use futures_core::Stream;
use futures_util::StreamExt;
use items::binnedevents::{MultiBinWaveEvents, SingleBinWaveEvents, XBinnedEvents};
use items::eventsitem::EventsItem;
use items::eventvalues::EventValues;
use items::plainevents::{PlainEvents, ScalarPlainEvents, WavePlainEvents};
use items::scalarevents::ScalarEvents;
use items::waveevents::WaveEvents;
use items::xbinnedscalarevents::XBinnedScalarEvents;
use items::xbinnedwaveevents::XBinnedWaveEvents;
@@ -190,7 +190,7 @@ macro_rules! arm1 {
}
AggKind::Plain => arm2!(
$item,
EventValues,
ScalarEvents,
Plain,
PlainEvents,
Scalar,
@@ -200,7 +200,7 @@ macro_rules! arm1 {
),
AggKind::TimeWeightedScalar => arm2!(
$item,
EventValues,
ScalarEvents,
XBinnedEvents,
XBinnedEvents,
Scalar,
@@ -210,7 +210,7 @@ macro_rules! arm1 {
),
AggKind::DimXBins1 => arm2!(
$item,
EventValues,
ScalarEvents,
XBinnedEvents,
XBinnedEvents,
Scalar,
@@ -220,7 +220,7 @@ macro_rules! arm1 {
),
AggKind::DimXBinsN(_) => arm2!(
$item,
EventValues,
ScalarEvents,
XBinnedEvents,
XBinnedEvents,
Scalar,
@@ -643,7 +643,7 @@ fn events_item_to_framable(ei: EventsItem) -> Result<Box<dyn Framable + Send>, E
b.push(k);
(a, b)
});
let b = EventValues { tss: x, values: y };
let b = ScalarEvents { tss: x, values: y };
let b = Ok(StreamItem::DataItem(RangeCompletableItem::Data(b)));
let ret = Box::new(b);
Ok(ret)

View File

@@ -8,8 +8,8 @@ use async_channel::{bounded, Receiver};
use chrono::{TimeZone, Utc};
use err::{ErrStr, Error};
use items::eventsitem::EventsItem;
use items::eventvalues::EventValues;
use items::plainevents::{PlainEvents, ScalarPlainEvents, WavePlainEvents};
use items::scalarevents::ScalarEvents;
use items::waveevents::WaveEvents;
use netpod::log::*;
use netpod::{ArchiverAppliance, ChannelConfigQuery, ChannelConfigResponse};
@@ -41,7 +41,7 @@ pub struct PbFileReader {
fn parse_scalar_byte(m: &[u8], year: u32) -> Result<EventsItem, Error> {
let msg = crate::generated::EPICSEvent::ScalarByte::parse_from_bytes(m)
.map_err(|_| Error::with_msg(format!("can not parse pb-type {}", "ScalarByte")))?;
let mut t = EventValues::<i8> {
let mut t = ScalarEvents::<i8> {
tss: vec![],
values: vec![],
};
@@ -57,7 +57,7 @@ macro_rules! scalar_parse {
($m:expr, $year:expr, $pbt:ident, $eit:ident, $evty:ident) => {{
let msg = crate::generated::EPICSEvent::$pbt::parse_from_bytes($m)
.map_err(|e| Error::with_msg(format!("can not parse pb-type {} {:?}", stringify!($pbt), e)))?;
let mut t = EventValues::<$evty> {
let mut t = ScalarEvents::<$evty> {
tss: vec![],
values: vec![],
};

View File

@@ -8,7 +8,7 @@ use err::Error;
use futures_util::{StreamExt, TryStreamExt};
use http::StatusCode;
use hyper::Body;
use items::eventvalues::EventValues;
use items::scalarevents::ScalarEvents;
use items::numops::NumOps;
use items::{FrameType, RangeCompletableItem, Sitemty, StatsItem, StreamItem, WithLen};
use netpod::log::*;
@@ -147,11 +147,11 @@ where
None
}
StreamItem::DataItem(frame) => {
if frame.tyid() != <Sitemty<EventValues<NTY>> as FrameType>::FRAME_TYPE_ID {
if frame.tyid() != <Sitemty<ScalarEvents<NTY>> as FrameType>::FRAME_TYPE_ID {
error!("test receives unexpected tyid {:x}", frame.tyid());
None
} else {
match bincode::deserialize::<Sitemty<EventValues<NTY>>>(frame.buf()) {
match bincode::deserialize::<Sitemty<ScalarEvents<NTY>>>(frame.buf()) {
Ok(item) => match item {
Ok(item) => match item {
StreamItem::Log(item) => {

View File

@@ -1,5 +1,5 @@
use items::eventvalues::EventValues;
use items::numops::NumOps;
use items::scalarevents::ScalarEvents;
use items::EventsNodeProcessor;
use netpod::{AggKind, Shape};
use std::marker::PhantomData;
@@ -12,8 +12,8 @@ impl<NTY> EventsNodeProcessor for Identity<NTY>
where
NTY: NumOps,
{
type Input = EventValues<NTY>;
type Output = EventValues<NTY>;
type Input = ScalarEvents<NTY>;
type Output = ScalarEvents<NTY>;
fn create(_shape: Shape, _agg_kind: AggKind) -> Self {
Self { _m1: PhantomData }

View File

@@ -21,8 +21,8 @@ use items::{
Clearable, EventsNodeProcessor, FilterFittingInside, Framable, FrameType, PushableIndex, RangeCompletableItem,
Sitemty, StreamItem, TimeBinnableType, WithLen,
};
use netpod::log::*;
use netpod::query::{BinnedQuery, RawEventsQuery};
use netpod::{log::*, ScalarType};
use netpod::{
x_bin_count, BinnedRange, NodeConfigCached, PerfOpts, PreBinnedPatchIterator, PreBinnedPatchRange, Shape,
};
@@ -54,6 +54,7 @@ impl ChannelExecFunction for BinnedBinaryChannelExec {
fn exec<NTY, END, EVS, ENP>(
self,
_byte_order: END,
_scalar_type: ScalarType,
shape: Shape,
event_value_shape: EVS,
_events_node_proc: ENP,
@@ -297,6 +298,7 @@ impl ChannelExecFunction for BinnedJsonChannelExec {
fn exec<NTY, END, EVS, ENP>(
self,
_byte_order: END,
_scalar_type: ScalarType,
shape: Shape,
event_value_shape: EVS,
_events_node_proc: ENP,

View File

@@ -9,7 +9,7 @@ use err::Error;
use futures_core::Stream;
use futures_util::future::FutureExt;
use futures_util::StreamExt;
use items::eventvalues::EventValues;
use items::scalarevents::ScalarEvents;
use items::numops::{BoolNum, NumOps};
use items::streams::{Collectable, Collector};
use items::{
@@ -34,6 +34,7 @@ pub trait ChannelExecFunction {
fn exec<NTY, END, EVS, ENP>(
self,
byte_order: END,
scalar_type: ScalarType,
shape: Shape,
event_value_shape: EVS,
events_node_proc: ENP,
@@ -59,6 +60,7 @@ pub trait ChannelExecFunction {
fn channel_exec_nty_end_evs_enp<F, NTY, END, EVS, ENP>(
f: F,
byte_order: END,
scalar_type: ScalarType,
shape: Shape,
event_value_shape: EVS,
events_node_proc: ENP,
@@ -79,15 +81,21 @@ where
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>:
FrameType + Framable + DeserializeOwned,
{
Ok(f.exec(byte_order, shape, event_value_shape, events_node_proc)?)
Ok(f.exec(byte_order, scalar_type, shape, event_value_shape, events_node_proc)?)
}
fn channel_exec_nty_end<F, NTY, END>(f: F, byte_order: END, shape: Shape, agg_kind: AggKind) -> Result<F::Output, Error>
fn channel_exec_nty_end<F, NTY, END>(
f: F,
byte_order: END,
scalar_type: ScalarType,
shape: Shape,
agg_kind: AggKind,
) -> Result<F::Output, Error>
where
F: ChannelExecFunction,
NTY: NumOps + NumFromBytes<NTY, END> + 'static,
END: Endianness + 'static,
EventValues<NTY>: Collectable,
ScalarEvents<NTY>: Collectable,
{
match shape {
Shape::Scalar => {
@@ -97,22 +105,22 @@ where
AggKind::Plain => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggPlain as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::TimeWeightedScalar => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBins1 => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBinsN(_) => {
let evs = EventValuesDim0Case::new();
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToNBins as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
}
}
@@ -123,22 +131,22 @@ where
AggKind::Plain => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggPlain as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::TimeWeightedScalar => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBins1 => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
AggKind::DimXBinsN(_) => {
let evs = EventValuesDim1Case::new(n);
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToNBins as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
channel_exec_nty_end_evs_enp(f, byte_order, shape, evs, events_node_proc)
channel_exec_nty_end_evs_enp(f, byte_order, scalar_type, shape, evs, events_node_proc)
}
}
}
@@ -150,10 +158,10 @@ where
}
macro_rules! match_end {
($f:expr, $nty:ident, $end:expr, $shape:expr, $agg_kind:expr, $node_config:expr) => {
($f:expr, $nty:ident, $end:expr, $scalar_type:expr, $shape:expr, $agg_kind:expr, $node_config:expr) => {
match $end {
ByteOrder::LE => channel_exec_nty_end::<_, $nty, _>($f, LittleEndian {}, $shape, $agg_kind),
ByteOrder::BE => channel_exec_nty_end::<_, $nty, _>($f, BigEndian {}, $shape, $agg_kind),
ByteOrder::LE => channel_exec_nty_end::<_, $nty, _>($f, LittleEndian {}, $scalar_type, $shape, $agg_kind),
ByteOrder::BE => channel_exec_nty_end::<_, $nty, _>($f, BigEndian {}, $scalar_type, $shape, $agg_kind),
}
};
}
@@ -170,17 +178,17 @@ where
F: ChannelExecFunction,
{
match scalar_type {
ScalarType::U8 => match_end!(f, u8, byte_order, shape, agg_kind, node_config),
ScalarType::U16 => match_end!(f, u16, byte_order, shape, agg_kind, node_config),
ScalarType::U32 => match_end!(f, u32, byte_order, shape, agg_kind, node_config),
ScalarType::U64 => match_end!(f, u64, byte_order, shape, agg_kind, node_config),
ScalarType::I8 => match_end!(f, i8, byte_order, shape, agg_kind, node_config),
ScalarType::I16 => match_end!(f, i16, byte_order, shape, agg_kind, node_config),
ScalarType::I32 => match_end!(f, i32, byte_order, shape, agg_kind, node_config),
ScalarType::I64 => match_end!(f, i64, byte_order, shape, agg_kind, node_config),
ScalarType::F32 => match_end!(f, f32, byte_order, shape, agg_kind, node_config),
ScalarType::F64 => match_end!(f, f64, byte_order, shape, agg_kind, node_config),
ScalarType::BOOL => match_end!(f, BoolNum, byte_order, shape, agg_kind, node_config),
ScalarType::U8 => match_end!(f, u8, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::U16 => match_end!(f, u16, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::U32 => match_end!(f, u32, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::U64 => match_end!(f, u64, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::I8 => match_end!(f, i8, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::I16 => match_end!(f, i16, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::I32 => match_end!(f, i32, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::I64 => match_end!(f, i64, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::F32 => match_end!(f, f32, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::F64 => match_end!(f, f64, byte_order, scalar_type, shape, agg_kind, node_config),
ScalarType::BOOL => match_end!(f, BoolNum, byte_order, scalar_type, shape, agg_kind, node_config),
}
}
@@ -246,6 +254,7 @@ impl ChannelExecFunction for PlainEvents {
fn exec<NTY, END, EVS, ENP>(
self,
byte_order: END,
_scalar_type: ScalarType,
_shape: Shape,
event_value_shape: EVS,
_events_node_proc: ENP,
@@ -407,6 +416,7 @@ impl ChannelExecFunction for PlainEventsJson {
fn exec<NTY, END, EVS, ENP>(
self,
byte_order: END,
_scalar_type: ScalarType,
_shape: Shape,
event_value_shape: EVS,
_events_node_proc: ENP,
@@ -456,6 +466,7 @@ impl ChannelExecFunction for PlainEventsJson {
}
}
// TODO remove when done.
pub fn dummy_impl() {
let channel: Channel = err::todoval();
let range: NanoRange = err::todoval();

View File

@@ -5,7 +5,7 @@ use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use items::eventsitem::EventsItem;
use items::eventvalues::EventValues;
use items::scalarevents::ScalarEvents;
use items::numops::{BoolNum, NumOps};
use items::plainevents::{PlainEvents, ScalarPlainEvents};
use items::waveevents::{WaveEvents, WaveNBinner, WavePlainProc, WaveXBinner};
@@ -100,7 +100,7 @@ where
NTY: NumOps + NumFromBytes<NTY, END>,
{
type Output = NTY;
type Batch = EventValues<NTY>;
type Batch = ScalarEvents<NTY>;
fn convert(&self, buf: &[u8], big_endian: bool) -> Result<Self::Output, Error> {
Ok(NTY::convert(buf, big_endian))
@@ -351,52 +351,52 @@ impl EventsItemStream {
Shape::Scalar => match &tyi.as_ref().unwrap().0 {
ScalarType::U8 => {
// TODO
let cont = EventValues::<i8>::empty();
let cont = ScalarEvents::<i8>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I8(cont))));
}
ScalarType::U16 => {
// TODO
let cont = EventValues::<i16>::empty();
let cont = ScalarEvents::<i16>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I16(cont))));
}
ScalarType::U32 => {
// TODO
let cont = EventValues::<i32>::empty();
let cont = ScalarEvents::<i32>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I32(cont))));
}
ScalarType::U64 => {
// TODO
let cont = EventValues::<i32>::empty();
let cont = ScalarEvents::<i32>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I32(cont))));
}
ScalarType::I8 => {
let cont = EventValues::<i8>::empty();
let cont = ScalarEvents::<i8>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I8(cont))));
}
ScalarType::I16 => {
let cont = EventValues::<i16>::empty();
let cont = ScalarEvents::<i16>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I16(cont))));
}
ScalarType::I32 => {
let cont = EventValues::<i32>::empty();
let cont = ScalarEvents::<i32>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I32(cont))));
}
ScalarType::I64 => {
// TODO
let cont = EventValues::<i32>::empty();
let cont = ScalarEvents::<i32>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I32(cont))));
}
ScalarType::F32 => {
let cont = EventValues::<f32>::empty();
let cont = ScalarEvents::<f32>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::F32(cont))));
}
ScalarType::F64 => {
let cont = EventValues::<f64>::empty();
let cont = ScalarEvents::<f64>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::F64(cont))));
}
ScalarType::BOOL => {
// TODO
let cont = EventValues::<i8>::empty();
let cont = ScalarEvents::<i8>::empty();
ret = Some(EventsItem::Plain(PlainEvents::Scalar(ScalarPlainEvents::I8(cont))));
}
},

View File

@@ -2,7 +2,7 @@
name = "items"
version = "0.0.1-a.dev.4"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2018"
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }

View File

@@ -1,6 +1,5 @@
pub mod binnedevents;
pub mod eventsitem;
pub mod eventvalues;
pub mod frame;
pub mod inmem;
pub mod minmaxavgbins;
@@ -8,6 +7,7 @@ pub mod minmaxavgdim1bins;
pub mod minmaxavgwavebins;
pub mod numops;
pub mod plainevents;
pub mod scalarevents;
pub mod streams;
pub mod waveevents;
pub mod xbinnedscalarevents;

View File

@@ -1,6 +1,6 @@
use crate::binnedevents::{SingleBinWaveEvents, XBinnedEvents};
use crate::eventsitem::EventsItem;
use crate::eventvalues::EventValues;
use crate::scalarevents::ScalarEvents;
use crate::waveevents::{WaveEvents, WaveXBinner};
use crate::xbinnedscalarevents::XBinnedScalarEvents;
use crate::{Appendable, Clearable, EventsNodeProcessor, PushableIndex, WithLen, WithTimestamps};
@@ -12,16 +12,16 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub enum ScalarPlainEvents {
U8(EventValues<u8>),
U16(EventValues<u16>),
U32(EventValues<u32>),
U64(EventValues<u64>),
I8(EventValues<i8>),
I16(EventValues<i16>),
I32(EventValues<i32>),
I64(EventValues<i64>),
F32(EventValues<f32>),
F64(EventValues<f64>),
U8(ScalarEvents<u8>),
U16(ScalarEvents<u16>),
U32(ScalarEvents<u32>),
U64(ScalarEvents<u64>),
I8(ScalarEvents<i8>),
I16(ScalarEvents<i16>),
I32(ScalarEvents<i32>),
I64(ScalarEvents<i64>),
F32(ScalarEvents<f32>),
F64(ScalarEvents<f64>),
}
impl ScalarPlainEvents {

View File

@@ -16,19 +16,19 @@ use tokio::fs::File;
// TODO add pulse. Is this even used??
// TODO change name, it's not only about values, but more like batch of whole events.
#[derive(Serialize, Deserialize)]
pub struct EventValues<NTY> {
pub struct ScalarEvents<NTY> {
pub tss: Vec<u64>,
pub values: Vec<NTY>,
}
impl<NTY> SitemtyFrameType for EventValues<NTY>
impl<NTY> SitemtyFrameType for ScalarEvents<NTY>
where
NTY: NumOps,
{
const FRAME_TYPE_ID: u32 = crate::EVENT_VALUES_FRAME_TYPE_ID + NTY::SUB;
}
impl<NTY> EventValues<NTY> {
impl<NTY> ScalarEvents<NTY> {
pub fn empty() -> Self {
Self {
tss: vec![],
@@ -37,7 +37,7 @@ impl<NTY> EventValues<NTY> {
}
}
impl<NTY> fmt::Debug for EventValues<NTY>
impl<NTY> fmt::Debug for ScalarEvents<NTY>
where
NTY: fmt::Debug,
{
@@ -54,7 +54,7 @@ where
}
}
impl<NTY> WithLen for EventValues<NTY>
impl<NTY> WithLen for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -63,7 +63,7 @@ where
}
}
impl<NTY> WithTimestamps for EventValues<NTY>
impl<NTY> WithTimestamps for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -72,7 +72,7 @@ where
}
}
impl<NTY> ByteEstimate for EventValues<NTY>
impl<NTY> ByteEstimate for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -86,7 +86,7 @@ where
}
}
impl<NTY> RangeOverlapInfo for EventValues<NTY> {
impl<NTY> RangeOverlapInfo for ScalarEvents<NTY> {
fn ends_before(&self, range: NanoRange) -> bool {
match self.tss.last() {
Some(&ts) => ts < range.beg,
@@ -109,7 +109,7 @@ impl<NTY> RangeOverlapInfo for EventValues<NTY> {
}
}
impl<NTY> FitsInside for EventValues<NTY> {
impl<NTY> FitsInside for ScalarEvents<NTY> {
fn fits_inside(&self, range: NanoRange) -> Fits {
if self.tss.is_empty() {
Fits::Empty
@@ -133,7 +133,7 @@ impl<NTY> FitsInside for EventValues<NTY> {
}
}
impl<NTY> FilterFittingInside for EventValues<NTY> {
impl<NTY> FilterFittingInside for ScalarEvents<NTY> {
fn filter_fitting_inside(self, fit_range: NanoRange) -> Option<Self> {
match self.fits_inside(fit_range) {
Fits::Inside | Fits::PartlyGreater | Fits::PartlyLower | Fits::PartlyLowerAndGreater => Some(self),
@@ -142,7 +142,7 @@ impl<NTY> FilterFittingInside for EventValues<NTY> {
}
}
impl<NTY> PushableIndex for EventValues<NTY>
impl<NTY> PushableIndex for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -152,7 +152,7 @@ where
}
}
impl<NTY> Appendable for EventValues<NTY>
impl<NTY> Appendable for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -166,14 +166,14 @@ where
}
}
impl<NTY> Clearable for EventValues<NTY> {
impl<NTY> Clearable for ScalarEvents<NTY> {
fn clear(&mut self) {
self.tss.clear();
self.values.clear();
}
}
impl<NTY> ReadableFromFile for EventValues<NTY>
impl<NTY> ReadableFromFile for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -187,7 +187,7 @@ where
}
}
impl<NTY> TimeBinnableType for EventValues<NTY>
impl<NTY> TimeBinnableType for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -204,7 +204,7 @@ where
}
pub struct EventValuesCollector<NTY> {
vals: EventValues<NTY>,
vals: ScalarEvents<NTY>,
range_complete: bool,
timed_out: bool,
}
@@ -212,7 +212,7 @@ pub struct EventValuesCollector<NTY> {
impl<NTY> EventValuesCollector<NTY> {
pub fn new() -> Self {
Self {
vals: EventValues::empty(),
vals: ScalarEvents::empty(),
range_complete: false,
timed_out: false,
}
@@ -244,7 +244,7 @@ impl<NTY> Collector for EventValuesCollector<NTY>
where
NTY: NumOps,
{
type Input = EventValues<NTY>;
type Input = ScalarEvents<NTY>;
type Output = EventValuesCollectorOutput<NTY>;
fn ingest(&mut self, src: &Self::Input) {
@@ -273,7 +273,7 @@ where
}
}
impl<NTY> Collectable for EventValues<NTY>
impl<NTY> Collectable for ScalarEvents<NTY>
where
NTY: NumOps,
{
@@ -467,7 +467,7 @@ impl<NTY> TimeBinnableTypeAggregator for EventValuesAggregator<NTY>
where
NTY: NumOps,
{
type Input = EventValues<NTY>;
type Input = ScalarEvents<NTY>;
type Output = MinMaxAvgBins<NTY>;
fn range(&self) -> &NanoRange {
@@ -493,7 +493,7 @@ where
}
}
impl<NTY> EventAppendable for EventValues<NTY>
impl<NTY> EventAppendable for ScalarEvents<NTY>
where
NTY: NumOps,
{