Move types into separate module

This commit is contained in:
Dominik Werder
2021-07-07 17:12:22 +02:00
parent a8f15da101
commit c92e266662
41 changed files with 3516 additions and 3546 deletions

View File

@@ -2,7 +2,6 @@
use bytes::BytesMut;
use err::Error;
use netpod::NanoRange;
use netpod::ScalarType;
use serde::{Deserialize, Serialize};
use std::time::Duration;
@@ -57,20 +56,6 @@ impl std::fmt::Debug for ValuesDim1 {
}
}
pub enum Fits {
Empty,
Lower,
Greater,
Inside,
PartlyLower,
PartlyGreater,
PartlyLowerAndGreater,
}
pub trait FitsInside {
fn fits_inside(&self, range: NanoRange) -> Fits;
}
trait NumEx {
const BY: usize;
}

View File

@@ -1,11 +1,8 @@
use crate::agg::streams::Appendable;
use crate::binned::{FilterFittingInside, RangeOverlapInfo, ReadableFromFile};
use futures_core::Stream;
use futures_util::StreamExt;
use items::{RangeCompletableItem, Sitemty, StreamItem};
use items::{RangeCompletableItem, Sitemty, StreamItem, TimeBinnableType, TimeBinnableTypeAggregator};
use netpod::log::*;
use netpod::{BinnedRange, NanoRange};
use serde::Serialize;
use netpod::BinnedRange;
use std::collections::VecDeque;
use std::marker::PhantomData;
use std::pin::Pin;
@@ -15,22 +12,6 @@ pub struct DefaultBinsTimeBinner<NTY> {
_m1: PhantomData<NTY>,
}
pub trait TimeBinnableTypeAggregator: Send {
type Input: TimeBinnableType;
type Output: TimeBinnableType;
fn range(&self) -> &NanoRange;
fn ingest(&mut self, item: &Self::Input);
fn result(self) -> Self::Output;
}
pub trait TimeBinnableType:
Send + Unpin + RangeOverlapInfo + FilterFittingInside + Appendable + Serialize + ReadableFromFile
{
type Output: TimeBinnableType;
type Aggregator: TimeBinnableTypeAggregator<Input = Self, Output = Self::Output> + Send + Unpin;
fn aggregator(range: NanoRange, bin_count: usize) -> Self::Aggregator;
}
pub struct TBinnerStream<S, TBT>
where
S: Stream<Item = Sitemty<TBT>>,

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,5 @@
use crate::agg::streams::Appendable;
use crate::binned::{MakeBytesFrame, RangeOverlapInfo};
use crate::frame::makeframe::make_frame;
use bytes::{BufMut, Bytes, BytesMut};
use err::Error;
use items::{RangeCompletableItem, SitemtyFrameType, StreamItem};
use items::{Appendable, RangeOverlapInfo, SitemtyFrameType};
use netpod::log::*;
use netpod::NanoRange;
use serde::{Deserialize, Serialize};
@@ -128,11 +124,13 @@ impl MinMaxAvgScalarEventBatch {
}
}
/*
TODO remove?
impl MakeBytesFrame for Result<StreamItem<RangeCompletableItem<MinMaxAvgScalarEventBatch>>, Error> {
fn make_bytes_frame(&self) -> Result<Bytes, Error> {
Ok(make_frame(self)?.freeze())
}
}
}*/
impl RangeOverlapInfo for MinMaxAvgScalarEventBatch {
fn ends_before(&self, range: NanoRange) -> bool {

View File

@@ -1,217 +1 @@
use crate::agg::streams::{Appendable, ToJsonBytes};
use crate::agg::{Fits, FitsInside};
use crate::binned::MakeBytesFrame;
use crate::frame::makeframe::make_frame;
use bytes::{BufMut, Bytes, BytesMut};
use err::Error;
use items::{RangeCompletableItem, SitemtyFrameType, StreamItem};
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::NanoRange;
use serde::{Deserialize, Serialize};
use std::mem::size_of;
#[allow(dead_code)]
#[derive(Serialize, Deserialize)]
pub struct MinMaxAvgScalarBinBatch {
pub ts1s: Vec<u64>,
pub ts2s: Vec<u64>,
pub counts: Vec<u64>,
pub mins: Vec<f32>,
pub maxs: Vec<f32>,
pub avgs: Vec<f32>,
}
impl SitemtyFrameType for MinMaxAvgScalarBinBatch {
const FRAME_TYPE_ID: u32 = 0x200;
}
impl MinMaxAvgScalarBinBatch {
pub fn empty() -> Self {
Self {
ts1s: vec![],
ts2s: vec![],
counts: vec![],
mins: vec![],
maxs: vec![],
avgs: vec![],
}
}
pub fn len(&self) -> usize {
self.ts1s.len()
}
#[allow(dead_code)]
fn old_from_full_frame(buf: &Bytes) -> Self {
info!("MinMaxAvgScalarBinBatch construct from full frame len {}", buf.len());
assert!(buf.len() >= 4);
let mut g = MinMaxAvgScalarBinBatch::empty();
let n1;
unsafe {
let ptr = (&buf[0] as *const u8) as *const [u8; 4];
n1 = u32::from_le_bytes(*ptr);
trace!(
"MinMaxAvgScalarBinBatch construct --- +++ --- +++ --- +++ n1: {}",
n1
);
}
if n1 == 0 {
g
} else {
let n2 = n1 as usize;
g.ts1s.reserve(n2);
g.ts2s.reserve(n2);
g.counts.reserve(n2);
g.mins.reserve(n2);
g.maxs.reserve(n2);
g.avgs.reserve(n2);
unsafe {
// TODO Can I unsafely create ptrs and just assign them?
// TODO What are cases where I really need transmute?
g.ts1s.set_len(n2);
g.ts2s.set_len(n2);
g.counts.set_len(n2);
g.mins.set_len(n2);
g.maxs.set_len(n2);
g.avgs.set_len(n2);
let ptr0 = &buf[4] as *const u8;
{
let ptr1 = ptr0.add(0) as *const u64;
for i1 in 0..n2 {
g.ts1s[i1] = *ptr1.add(i1);
}
}
{
let ptr1 = ptr0.add((8) * n2) as *const u64;
for i1 in 0..n2 {
g.ts2s[i1] = *ptr1.add(i1);
}
}
{
let ptr1 = ptr0.add((8 + 8) * n2) as *const u64;
for i1 in 0..n2 {
g.counts[i1] = *ptr1.add(i1);
}
}
{
let ptr1 = ptr0.add((8 + 8 + 8) * n2) as *const f32;
for i1 in 0..n2 {
g.mins[i1] = *ptr1.add(i1);
}
}
{
let ptr1 = ptr0.add((8 + 8 + 8 + 4) * n2) as *const f32;
for i1 in 0..n2 {
g.maxs[i1] = *ptr1;
}
}
{
let ptr1 = ptr0.add((8 + 8 + 8 + 4 + 4) * n2) as *const f32;
for i1 in 0..n2 {
g.avgs[i1] = *ptr1;
}
}
}
info!("CONTENT {:?}", g);
g
}
}
}
impl std::fmt::Debug for MinMaxAvgScalarBinBatch {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
fmt,
"MinMaxAvgScalarBinBatch count {} ts1s {:?} ts2s {:?} counts {:?} mins {:?} maxs {:?} avgs {:?}",
self.ts1s.len(),
self.ts1s.iter().map(|k| k / SEC).collect::<Vec<_>>(),
self.ts2s.iter().map(|k| k / SEC).collect::<Vec<_>>(),
self.counts,
self.mins,
self.maxs,
self.avgs,
)
}
}
impl FitsInside for MinMaxAvgScalarBinBatch {
fn fits_inside(&self, range: NanoRange) -> Fits {
if self.ts1s.is_empty() {
Fits::Empty
} else {
let t1 = *self.ts1s.first().unwrap();
let t2 = *self.ts2s.last().unwrap();
if t2 <= range.beg {
Fits::Lower
} else if t1 >= range.end {
Fits::Greater
} else if t1 < range.beg && t2 > range.end {
Fits::PartlyLowerAndGreater
} else if t1 < range.beg {
Fits::PartlyLower
} else if t2 > range.end {
Fits::PartlyGreater
} else {
Fits::Inside
}
}
}
}
impl MinMaxAvgScalarBinBatch {
#[allow(dead_code)]
fn old_serialized(&self) -> Bytes {
let n1 = self.ts1s.len();
let mut g = BytesMut::with_capacity(4 + n1 * (3 * 8 + 3 * 4));
g.put_u32_le(n1 as u32);
if n1 > 0 {
let ptr = &self.ts1s[0] as *const u64 as *const u8;
let a = unsafe { std::slice::from_raw_parts(ptr, size_of::<u64>() * n1) };
g.put(a);
let ptr = &self.ts2s[0] as *const u64 as *const u8;
let a = unsafe { std::slice::from_raw_parts(ptr, size_of::<u64>() * n1) };
g.put(a);
let ptr = &self.counts[0] as *const u64 as *const u8;
let a = unsafe { std::slice::from_raw_parts(ptr, size_of::<u64>() * n1) };
g.put(a);
let ptr = &self.mins[0] as *const f32 as *const u8;
let a = unsafe { std::slice::from_raw_parts(ptr, size_of::<f32>() * n1) };
g.put(a);
let ptr = &self.maxs[0] as *const f32 as *const u8;
let a = unsafe { std::slice::from_raw_parts(ptr, size_of::<f32>() * n1) };
g.put(a);
let ptr = &self.avgs[0] as *const f32 as *const u8;
let a = unsafe { std::slice::from_raw_parts(ptr, size_of::<f32>() * n1) };
g.put(a);
}
g.freeze()
}
}
impl MakeBytesFrame for Result<StreamItem<RangeCompletableItem<MinMaxAvgScalarBinBatch>>, Error> {
fn make_bytes_frame(&self) -> Result<Bytes, Error> {
Ok(make_frame(self)?.freeze())
}
}
impl Appendable for MinMaxAvgScalarBinBatch {
fn empty() -> Self {
Self::empty()
}
fn append(&mut self, src: &Self) {
self.ts1s.extend_from_slice(&src.ts1s);
self.ts2s.extend_from_slice(&src.ts2s);
self.counts.extend_from_slice(&src.counts);
self.mins.extend_from_slice(&src.mins);
self.maxs.extend_from_slice(&src.maxs);
self.avgs.extend_from_slice(&src.avgs);
}
}
impl ToJsonBytes for MinMaxAvgScalarBinBatch {
fn to_json_bytes(&self) -> Result<Vec<u8>, Error> {
Ok(serde_json::to_vec(self)?)
}
}

View File

@@ -1,30 +1 @@
use crate::binned::WithLen;
use err::Error;
use serde::Serialize;
pub trait Collector: Send + Unpin + WithLen {
type Input: Collectable;
type Output: Serialize;
fn ingest(&mut self, src: &Self::Input);
fn set_range_complete(&mut self);
fn set_timed_out(&mut self);
fn result(self) -> Result<Self::Output, Error>;
}
pub trait Collectable {
type Collector: Collector<Input = Self>;
fn new_collector(bin_count_exp: u32) -> Self::Collector;
}
pub trait ToJsonBytes {
fn to_json_bytes(&self) -> Result<Vec<u8>, Error>;
}
pub trait ToJsonResult {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error>;
}
pub trait Appendable: WithLen {
fn empty() -> Self;
fn append(&mut self, src: &Self);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,13 @@
use crate::agg::binnedt::{TBinnerStream, TimeBinnableType};
use crate::agg::binnedt::TBinnerStream;
use crate::binned::query::{CacheUsage, PreBinnedQuery};
use crate::cache::{node_ix_for_patch, HttpBodyAsAsyncRead};
use crate::frame::inmem::InMemoryFrameAsyncReadStream;
use crate::frame::makeframe::decode_frame;
use err::Error;
use futures_core::Stream;
use futures_util::{FutureExt, StreamExt};
use http::{StatusCode, Uri};
use items::{FrameType, RangeCompletableItem, Sitemty, StreamItem};
use items::frame::decode_frame;
use items::{FrameType, RangeCompletableItem, Sitemty, StreamItem, TimeBinnableType};
use netpod::log::*;
use netpod::{
x_bin_count, AggKind, AppendToUrl, BinnedRange, ByteSize, Channel, NodeConfigCached, PerfOpts,

View File

@@ -1,511 +1 @@
use crate::agg::binnedt::{TimeBinnableType, TimeBinnableTypeAggregator};
use crate::agg::enp::{ts_offs_from_abs, WaveEvents};
use crate::agg::streams::{Appendable, Collectable, Collector, ToJsonBytes, ToJsonResult};
use crate::agg::{Fits, FitsInside};
use crate::binned::{
Bool, FilterFittingInside, IsoDateTime, NumOps, RangeOverlapInfo, ReadPbv, ReadableFromFile, TimeBins, WithLen,
};
use chrono::{TimeZone, Utc};
use err::Error;
use items::{Sitemty, SitemtyFrameType, SubFrId};
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::NanoRange;
use num_traits::Zero;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::marker::PhantomData;
use tokio::fs::File;
#[derive(Serialize, Deserialize)]
pub struct MinMaxAvgDim1Bins<NTY> {
pub ts1s: Vec<u64>,
pub ts2s: Vec<u64>,
pub counts: Vec<u64>,
pub mins: Vec<Option<Vec<NTY>>>,
pub maxs: Vec<Option<Vec<NTY>>>,
pub avgs: Vec<Option<Vec<f32>>>,
}
impl<NTY> SitemtyFrameType for MinMaxAvgDim1Bins<NTY>
where
NTY: SubFrId,
{
const FRAME_TYPE_ID: u32 = 0xb00 + NTY::SUB;
}
impl<NTY> fmt::Debug for MinMaxAvgDim1Bins<NTY>
where
NTY: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(
fmt,
"MinMaxAvgDim1Bins count {} ts1s {:?} ts2s {:?} counts {:?} mins {:?} maxs {:?} avgs {:?}",
self.ts1s.len(),
self.ts1s.iter().map(|k| k / SEC).collect::<Vec<_>>(),
self.ts2s.iter().map(|k| k / SEC).collect::<Vec<_>>(),
self.counts,
self.mins.first(),
self.maxs.first(),
self.avgs.first(),
)
}
}
impl<NTY> MinMaxAvgDim1Bins<NTY> {
pub fn empty() -> Self {
Self {
ts1s: vec![],
ts2s: vec![],
counts: vec![],
mins: vec![],
maxs: vec![],
avgs: vec![],
}
}
}
impl<NTY> FitsInside for MinMaxAvgDim1Bins<NTY> {
fn fits_inside(&self, range: NanoRange) -> Fits {
if self.ts1s.is_empty() {
Fits::Empty
} else {
let t1 = *self.ts1s.first().unwrap();
let t2 = *self.ts2s.last().unwrap();
if t2 <= range.beg {
Fits::Lower
} else if t1 >= range.end {
Fits::Greater
} else if t1 < range.beg && t2 > range.end {
Fits::PartlyLowerAndGreater
} else if t1 < range.beg {
Fits::PartlyLower
} else if t2 > range.end {
Fits::PartlyGreater
} else {
Fits::Inside
}
}
}
}
impl<NTY> FilterFittingInside for MinMaxAvgDim1Bins<NTY> {
fn filter_fitting_inside(self, fit_range: NanoRange) -> Option<Self> {
match self.fits_inside(fit_range) {
Fits::Inside | Fits::PartlyGreater | Fits::PartlyLower | Fits::PartlyLowerAndGreater => Some(self),
_ => None,
}
}
}
impl<NTY> RangeOverlapInfo for MinMaxAvgDim1Bins<NTY> {
fn ends_before(&self, range: NanoRange) -> bool {
match self.ts2s.last() {
Some(&ts) => ts <= range.beg,
None => true,
}
}
fn ends_after(&self, range: NanoRange) -> bool {
match self.ts2s.last() {
Some(&ts) => ts > range.end,
None => panic!(),
}
}
fn starts_after(&self, range: NanoRange) -> bool {
match self.ts1s.first() {
Some(&ts) => ts >= range.end,
None => panic!(),
}
}
}
impl<NTY> TimeBins for MinMaxAvgDim1Bins<NTY>
where
NTY: NumOps,
{
fn ts1s(&self) -> &Vec<u64> {
&self.ts1s
}
fn ts2s(&self) -> &Vec<u64> {
&self.ts2s
}
}
impl<NTY> WithLen for MinMaxAvgDim1Bins<NTY> {
fn len(&self) -> usize {
self.ts1s.len()
}
}
impl<NTY> Appendable for MinMaxAvgDim1Bins<NTY>
where
NTY: NumOps,
{
fn empty() -> Self {
Self::empty()
}
fn append(&mut self, src: &Self) {
self.ts1s.extend_from_slice(&src.ts1s);
self.ts2s.extend_from_slice(&src.ts2s);
self.counts.extend_from_slice(&src.counts);
self.mins.extend_from_slice(&src.mins);
self.maxs.extend_from_slice(&src.maxs);
self.avgs.extend_from_slice(&src.avgs);
}
}
impl<NTY> ReadableFromFile for MinMaxAvgDim1Bins<NTY>
where
NTY: NumOps,
{
// TODO this function is not needed in the trait:
fn read_from_file(file: File) -> Result<ReadPbv<Self>, Error> {
Ok(ReadPbv::new(file))
}
fn from_buf(buf: &[u8]) -> Result<Self, Error> {
let dec = serde_cbor::from_slice(&buf)?;
Ok(dec)
}
}
impl<NTY> TimeBinnableType for MinMaxAvgDim1Bins<NTY>
where
NTY: NumOps,
{
type Output = MinMaxAvgDim1Bins<NTY>;
type Aggregator = MinMaxAvgDim1BinsAggregator<NTY>;
fn aggregator(range: NanoRange, x_bin_count: usize) -> Self::Aggregator {
Self::Aggregator::new(range, x_bin_count)
}
}
impl<NTY> ToJsonResult for Sitemty<MinMaxAvgDim1Bins<NTY>>
where
NTY: NumOps,
{
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error> {
Ok(Box::new(serde_json::Value::String(format!(
"MinMaxAvgDim1Bins/non-json-item"
))))
}
}
pub struct MinMaxAvgDim1BinsCollected<NTY> {
_m1: PhantomData<NTY>,
}
impl<NTY> MinMaxAvgDim1BinsCollected<NTY> {
pub fn new() -> Self {
Self { _m1: PhantomData }
}
}
#[derive(Serialize)]
pub struct MinMaxAvgDim1BinsCollectedResult<NTY> {
ts_bin_edges: Vec<IsoDateTime>,
counts: Vec<u64>,
mins: Vec<Option<Vec<NTY>>>,
maxs: Vec<Option<Vec<NTY>>>,
avgs: Vec<Option<Vec<f32>>>,
#[serde(skip_serializing_if = "Bool::is_false", rename = "finalisedRange")]
finalised_range: bool,
#[serde(skip_serializing_if = "Zero::is_zero", rename = "missingBins")]
missing_bins: u32,
#[serde(skip_serializing_if = "Option::is_none", rename = "continueAt")]
continue_at: Option<IsoDateTime>,
}
pub struct MinMaxAvgDim1BinsCollector<NTY> {
bin_count_exp: u32,
timed_out: bool,
range_complete: bool,
vals: MinMaxAvgDim1Bins<NTY>,
_m1: PhantomData<NTY>,
}
impl<NTY> MinMaxAvgDim1BinsCollector<NTY> {
pub fn new(bin_count_exp: u32) -> Self {
Self {
bin_count_exp,
timed_out: false,
range_complete: false,
vals: MinMaxAvgDim1Bins::<NTY>::empty(),
_m1: PhantomData,
}
}
}
impl<NTY> WithLen for MinMaxAvgDim1BinsCollector<NTY>
where
NTY: NumOps + Serialize,
{
fn len(&self) -> usize {
self.vals.ts1s.len()
}
}
impl<NTY> Collector for MinMaxAvgDim1BinsCollector<NTY>
where
NTY: NumOps + Serialize,
{
type Input = MinMaxAvgDim1Bins<NTY>;
type Output = MinMaxAvgDim1BinsCollectedResult<NTY>;
fn ingest(&mut self, src: &Self::Input) {
Appendable::append(&mut self.vals, src);
}
fn set_range_complete(&mut self) {
self.range_complete = true;
}
fn set_timed_out(&mut self) {
self.timed_out = true;
}
fn result(self) -> Result<Self::Output, Error> {
let bin_count = self.vals.ts1s.len() as u32;
let mut tsa: Vec<_> = self
.vals
.ts1s
.iter()
.map(|&k| IsoDateTime(Utc.timestamp_nanos(k as i64)))
.collect();
if let Some(&z) = self.vals.ts2s.last() {
tsa.push(IsoDateTime(Utc.timestamp_nanos(z as i64)));
}
let tsa = tsa;
let continue_at = if self.vals.ts1s.len() < self.bin_count_exp as usize {
match tsa.last() {
Some(k) => Some(k.clone()),
None => Err(Error::with_msg("partial_content but no bin in result"))?,
}
} else {
None
};
let ret = MinMaxAvgDim1BinsCollectedResult::<NTY> {
ts_bin_edges: tsa,
counts: self.vals.counts,
mins: self.vals.mins,
maxs: self.vals.maxs,
avgs: self.vals.avgs,
finalised_range: self.range_complete,
missing_bins: self.bin_count_exp - bin_count,
continue_at,
};
Ok(ret)
}
}
impl<NTY> Collectable for MinMaxAvgDim1Bins<NTY>
where
NTY: NumOps + Serialize,
{
type Collector = MinMaxAvgDim1BinsCollector<NTY>;
fn new_collector(bin_count_exp: u32) -> Self::Collector {
Self::Collector::new(bin_count_exp)
}
}
pub struct MinMaxAvgDim1BinsAggregator<NTY> {
range: NanoRange,
count: u64,
min: Option<Vec<NTY>>,
max: Option<Vec<NTY>>,
sumc: u64,
sum: Option<Vec<f32>>,
}
impl<NTY> MinMaxAvgDim1BinsAggregator<NTY> {
pub fn new(range: NanoRange, _x_bin_count: usize) -> Self {
Self {
range,
count: 0,
// TODO get rid of Option
min: err::todoval(),
max: None,
sumc: 0,
sum: None,
}
}
}
impl<NTY> TimeBinnableTypeAggregator for MinMaxAvgDim1BinsAggregator<NTY>
where
NTY: NumOps,
{
type Input = MinMaxAvgDim1Bins<NTY>;
type Output = MinMaxAvgDim1Bins<NTY>;
fn range(&self) -> &NanoRange {
&self.range
}
fn ingest(&mut self, item: &Self::Input) {
for i1 in 0..item.ts1s.len() {
if item.ts2s[i1] <= self.range.beg {
continue;
} else if item.ts1s[i1] >= self.range.end {
continue;
} else {
match self.min.as_mut() {
None => self.min = item.mins[i1].clone(),
Some(min) => match item.mins[i1].as_ref() {
None => {}
Some(v) => {
for (a, b) in min.iter_mut().zip(v.iter()) {
if *b < *a {
*a = *b;
}
}
}
},
};
match self.max.as_mut() {
None => self.max = item.maxs[i1].clone(),
Some(max) => match item.maxs[i1].as_ref() {
None => {}
Some(v) => {
for (a, b) in max.iter_mut().zip(v.iter()) {
if *b > *a {
*a = *b;
}
}
}
},
};
match self.sum.as_mut() {
None => {
self.sum = item.avgs[i1].clone();
}
Some(sum) => match item.avgs[i1].as_ref() {
None => {}
Some(v) => {
for (a, b) in sum.iter_mut().zip(v.iter()) {
if (*b).is_nan() {
} else {
*a += *b;
}
}
self.sumc += 1;
}
},
}
self.count += item.counts[i1];
}
}
}
fn result(self) -> Self::Output {
let avg = if self.sumc == 0 {
None
} else {
let avg = self
.sum
.as_ref()
.unwrap()
.iter()
.map(|k| k / self.sumc as f32)
.collect();
Some(avg)
};
Self::Output {
ts1s: vec![self.range.beg],
ts2s: vec![self.range.end],
counts: vec![self.count],
mins: vec![self.min],
maxs: vec![self.max],
avgs: vec![avg],
}
}
}
#[derive(Serialize)]
pub struct WaveEventsCollectedResult<NTY> {
#[serde(rename = "tsAnchor")]
ts_anchor_sec: u64,
#[serde(rename = "tsMs")]
ts_off_ms: Vec<u64>,
#[serde(rename = "tsNs")]
ts_off_ns: Vec<u64>,
values: Vec<Vec<NTY>>,
#[serde(skip_serializing_if = "Bool::is_false", rename = "finalisedRange")]
range_complete: bool,
#[serde(skip_serializing_if = "Bool::is_false", rename = "timedOut")]
timed_out: bool,
}
pub struct WaveEventsCollector<NTY> {
vals: WaveEvents<NTY>,
range_complete: bool,
timed_out: bool,
}
impl<NTY> WaveEventsCollector<NTY> {
pub fn new(_bin_count_exp: u32) -> Self {
info!("\n\nWaveEventsCollector\n\n");
Self {
vals: WaveEvents::empty(),
range_complete: false,
timed_out: false,
}
}
}
impl<NTY> WithLen for WaveEventsCollector<NTY> {
fn len(&self) -> usize {
self.vals.tss.len()
}
}
impl<NTY> Collector for WaveEventsCollector<NTY>
where
NTY: NumOps,
{
type Input = WaveEvents<NTY>;
type Output = WaveEventsCollectedResult<NTY>;
fn ingest(&mut self, src: &Self::Input) {
self.vals.append(src);
}
fn set_range_complete(&mut self) {
self.range_complete = true;
}
fn set_timed_out(&mut self) {
self.timed_out = true;
}
fn result(self) -> Result<Self::Output, Error> {
let tst = ts_offs_from_abs(&self.vals.tss);
let ret = Self::Output {
ts_anchor_sec: tst.0,
ts_off_ms: tst.1,
ts_off_ns: tst.2,
values: self.vals.vals,
range_complete: self.range_complete,
timed_out: self.timed_out,
};
Ok(ret)
}
}
impl<NTY> Collectable for WaveEvents<NTY>
where
NTY: NumOps,
{
type Collector = WaveEventsCollector<NTY>;
fn new_collector(bin_count_exp: u32) -> Self::Collector {
Self::Collector::new(bin_count_exp)
}
}

View File

@@ -1,8 +1,7 @@
use crate::agg::binnedt::{TBinnerStream, TimeBinnableType};
use crate::agg::streams::Appendable;
use crate::agg::binnedt::TBinnerStream;
use crate::binned::binnedfrompbv::FetchedPreBinned;
use crate::binned::query::{CacheUsage, PreBinnedQuery};
use crate::binned::{EventsNodeProcessor, NumOps, PushableIndex, ReadableFromFile, WithLen};
use crate::binned::WithLen;
use crate::cache::{write_pb_cache_min_max_avg_scalar, CacheFileDesc, WrittenPbCache};
use crate::decode::{Endianness, EventValueFromBytes, EventValueShape, NumFromBytes};
use crate::merge::mergedfromremotes::MergedFromRemotes;
@@ -10,7 +9,11 @@ use crate::streamlog::Streamlog;
use err::Error;
use futures_core::Stream;
use futures_util::{FutureExt, StreamExt};
use items::{FrameType, RangeCompletableItem, Sitemty, StreamItem};
use items::numops::NumOps;
use items::{
Appendable, EventsNodeProcessor, FrameType, PushableIndex, RangeCompletableItem, ReadableFromFile, Sitemty,
StreamItem, TimeBinnableType,
};
use netpod::log::*;
use netpod::query::RawEventsQuery;
use netpod::{

View File

@@ -1,8 +1,5 @@
use crate::agg::binnedt::TimeBinnableType;
use crate::agg::streams::Appendable;
use crate::binned::pbv::PreBinnedValueStream;
use crate::binned::query::PreBinnedQuery;
use crate::binned::{EventsNodeProcessor, NumOps, PushableIndex};
use crate::cache::node_ix_for_patch;
use crate::decode::{
BigEndian, Endianness, EventValueFromBytes, EventValueShape, EventValuesDim0Case, EventValuesDim1Case,
@@ -12,8 +9,9 @@ use bytes::Bytes;
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use items::{Framable, FrameType, Sitemty};
use netpod::{AggKind, BoolNum, ByteOrder, NodeConfigCached, ScalarType, Shape};
use items::numops::{BoolNum, NumOps};
use items::{Appendable, EventsNodeProcessor, Framable, FrameType, PushableIndex, Sitemty, TimeBinnableType};
use netpod::{AggKind, ByteOrder, NodeConfigCached, ScalarType, Shape};
use parse::channelconfig::{extract_matching_config_entry, read_local_config, MatchingConfigEntry};
use serde::de::DeserializeOwned;
use serde::Serialize;

View File

@@ -1,9 +1,6 @@
use crate::agg::binnedt::TimeBinnableType;
use crate::agg::enp::Identity;
use crate::agg::streams::{Collectable, Collector};
use crate::binned::{EventsNodeProcessor, NumOps, PushableIndex};
use crate::decode::{
BigEndian, Endianness, EventValueFromBytes, EventValueShape, EventValues, EventValuesDim0Case, EventValuesDim1Case,
BigEndian, Endianness, EventValueFromBytes, EventValueShape, EventValuesDim0Case, EventValuesDim1Case,
LittleEndian, NumFromBytes,
};
use crate::merge::mergedfromremotes::MergedFromRemotes;
@@ -12,10 +9,16 @@ use err::Error;
use futures_core::Stream;
use futures_util::future::FutureExt;
use futures_util::StreamExt;
use items::{Framable, FrameType, RangeCompletableItem, Sitemty, StreamItem};
use items::eventvalues::EventValues;
use items::numops::{BoolNum, NumOps};
use items::streams::{Collectable, Collector};
use items::{
EventsNodeProcessor, Framable, FrameType, PushableIndex, RangeCompletableItem, Sitemty, StreamItem,
TimeBinnableType,
};
use netpod::log::*;
use netpod::query::RawEventsQuery;
use netpod::{AggKind, BoolNum, ByteOrder, Channel, NanoRange, NodeConfigCached, PerfOpts, ScalarType, Shape};
use netpod::{AggKind, ByteOrder, Channel, NanoRange, NodeConfigCached, PerfOpts, ScalarType, Shape};
use parse::channelconfig::{extract_matching_config_entry, read_local_config, MatchingConfigEntry};
use serde::de::DeserializeOwned;
use serde_json::Value as JsonValue;

View File

@@ -1,24 +1,17 @@
use crate::agg::binnedt::TimeBinnableType;
use crate::agg::enp::{ts_offs_from_abs, Identity, WaveNBinner, WavePlainProc, WaveXBinner};
use crate::agg::streams::{Appendable, Collectable, Collector};
use crate::agg::{Fits, FitsInside};
use crate::binned::{
Bool, EventValuesAggregator, EventsNodeProcessor, FilterFittingInside, MinMaxAvgBins, NumOps, PushableIndex,
RangeOverlapInfo, ReadPbv, ReadableFromFile, WithLen, WithTimestamps,
};
use crate::agg::enp::Identity;
use crate::eventblobs::EventChunkerMultifile;
use crate::eventchunker::EventFull;
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use items::{RangeCompletableItem, SitemtyFrameType, StreamItem};
use netpod::{BoolNum, NanoRange};
use serde::{Deserialize, Serialize};
use items::eventvalues::EventValues;
use items::numops::{BoolNum, NumOps};
use items::waveevents::{WaveNBinner, WavePlainProc, WaveXBinner};
use items::{EventsNodeProcessor, RangeCompletableItem, StreamItem};
use std::marker::PhantomData;
use std::mem::size_of;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::fs::File;
pub trait Endianness: Send + Unpin {
fn is_big() -> bool;
@@ -183,246 +176,6 @@ where
type NumXAggPlain = WavePlainProc<NTY>;
}
// TODO add pulse.
// TODO change name, it's not only about values, but more like batch of whole events.
#[derive(Serialize, Deserialize)]
pub struct EventValues<VT> {
pub tss: Vec<u64>,
pub values: Vec<VT>,
}
impl<NTY> SitemtyFrameType for EventValues<NTY>
where
NTY: NumOps,
{
const FRAME_TYPE_ID: u32 = 0x500 + NTY::SUB;
}
impl<VT> EventValues<VT> {
pub fn empty() -> Self {
Self {
tss: vec![],
values: vec![],
}
}
}
impl<VT> std::fmt::Debug for EventValues<VT>
where
VT: std::fmt::Debug,
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
fmt,
"count {} ts {:?} .. {:?} vals {:?} .. {:?}",
self.tss.len(),
self.tss.first(),
self.tss.last(),
self.values.first(),
self.values.last(),
)
}
}
impl<VT> WithLen for EventValues<VT> {
fn len(&self) -> usize {
self.tss.len()
}
}
impl<VT> WithTimestamps for EventValues<VT> {
fn ts(&self, ix: usize) -> u64 {
self.tss[ix]
}
}
impl<VT> RangeOverlapInfo for EventValues<VT> {
fn ends_before(&self, range: NanoRange) -> bool {
match self.tss.last() {
Some(&ts) => ts < range.beg,
None => true,
}
}
fn ends_after(&self, range: NanoRange) -> bool {
match self.tss.last() {
Some(&ts) => ts >= range.end,
None => panic!(),
}
}
fn starts_after(&self, range: NanoRange) -> bool {
match self.tss.first() {
Some(&ts) => ts >= range.end,
None => panic!(),
}
}
}
impl<VT> FitsInside for EventValues<VT> {
fn fits_inside(&self, range: NanoRange) -> Fits {
if self.tss.is_empty() {
Fits::Empty
} else {
let t1 = *self.tss.first().unwrap();
let t2 = *self.tss.last().unwrap();
if t2 < range.beg {
Fits::Lower
} else if t1 > range.end {
Fits::Greater
} else if t1 < range.beg && t2 > range.end {
Fits::PartlyLowerAndGreater
} else if t1 < range.beg {
Fits::PartlyLower
} else if t2 > range.end {
Fits::PartlyGreater
} else {
Fits::Inside
}
}
}
}
impl<VT> FilterFittingInside for EventValues<VT> {
fn filter_fitting_inside(self, fit_range: NanoRange) -> Option<Self> {
match self.fits_inside(fit_range) {
Fits::Inside | Fits::PartlyGreater | Fits::PartlyLower | Fits::PartlyLowerAndGreater => Some(self),
_ => None,
}
}
}
impl<NTY> PushableIndex for EventValues<NTY>
where
NTY: NumOps,
{
fn push_index(&mut self, src: &Self, ix: usize) {
self.tss.push(src.tss[ix]);
self.values.push(src.values[ix]);
}
}
impl<NTY> Appendable for EventValues<NTY>
where
NTY: NumOps,
{
fn empty() -> Self {
Self::empty()
}
fn append(&mut self, src: &Self) {
self.tss.extend_from_slice(&src.tss);
self.values.extend_from_slice(&src.values);
}
}
impl<NTY> ReadableFromFile for EventValues<NTY>
where
NTY: NumOps,
{
fn read_from_file(_file: File) -> Result<ReadPbv<Self>, Error> {
// TODO refactor types such that this can be removed.
panic!()
}
fn from_buf(_buf: &[u8]) -> Result<Self, Error> {
panic!()
}
}
impl<NTY> TimeBinnableType for EventValues<NTY>
where
NTY: NumOps,
{
type Output = MinMaxAvgBins<NTY>;
type Aggregator = EventValuesAggregator<NTY>;
fn aggregator(range: NanoRange, _bin_count: usize) -> Self::Aggregator {
Self::Aggregator::new(range)
}
}
pub struct EventValuesCollector<NTY> {
vals: EventValues<NTY>,
range_complete: bool,
timed_out: bool,
}
impl<NTY> EventValuesCollector<NTY> {
pub fn new() -> Self {
Self {
vals: EventValues::empty(),
range_complete: false,
timed_out: false,
}
}
}
impl<NTY> WithLen for EventValuesCollector<NTY> {
fn len(&self) -> usize {
self.vals.tss.len()
}
}
#[derive(Serialize)]
pub struct EventValuesCollectorOutput<NTY> {
#[serde(rename = "tsAnchor")]
ts_anchor_sec: u64,
#[serde(rename = "tsMs")]
ts_off_ms: Vec<u64>,
#[serde(rename = "tsNs")]
ts_off_ns: Vec<u64>,
values: Vec<NTY>,
#[serde(skip_serializing_if = "Bool::is_false", rename = "finalisedRange")]
range_complete: bool,
#[serde(skip_serializing_if = "Bool::is_false", rename = "timedOut")]
timed_out: bool,
}
impl<NTY> Collector for EventValuesCollector<NTY>
where
NTY: NumOps,
{
type Input = EventValues<NTY>;
type Output = EventValuesCollectorOutput<NTY>;
fn ingest(&mut self, src: &Self::Input) {
self.vals.append(src);
}
fn set_range_complete(&mut self) {
self.range_complete = true;
}
fn set_timed_out(&mut self) {
self.timed_out = true;
}
fn result(self) -> Result<Self::Output, Error> {
let tst = ts_offs_from_abs(&self.vals.tss);
let ret = Self::Output {
ts_anchor_sec: tst.0,
ts_off_ms: tst.1,
ts_off_ns: tst.2,
values: self.vals.values,
range_complete: self.range_complete,
timed_out: self.timed_out,
};
Ok(ret)
}
}
impl<NTY> Collectable for EventValues<NTY>
where
NTY: NumOps,
{
type Collector = EventValuesCollector<NTY>;
fn new_collector(_bin_count_exp: u32) -> Self::Collector {
Self::Collector::new()
}
}
pub struct EventsDecodedStream<NTY, END, EVS>
where
NTY: NumOps + NumFromBytes<NTY, END>,

View File

@@ -1,7 +1,8 @@
use bytes::{BufMut, Bytes, BytesMut};
use bytes::{BufMut, BytesMut};
use err::Error;
use futures_core::Stream;
use futures_util::pin_mut;
use items::inmem::InMemoryFrame;
use items::StreamItem;
use items::{INMEM_FRAME_FOOT, INMEM_FRAME_HEAD, INMEM_FRAME_MAGIC};
use netpod::log::*;
@@ -206,38 +207,6 @@ where
}
}
pub struct InMemoryFrame {
encid: u32,
tyid: u32,
len: u32,
buf: Bytes,
}
impl InMemoryFrame {
pub fn encid(&self) -> u32 {
self.encid
}
pub fn tyid(&self) -> u32 {
self.tyid
}
pub fn len(&self) -> u32 {
self.len
}
pub fn buf(&self) -> &Bytes {
&self.buf
}
}
impl std::fmt::Debug for InMemoryFrame {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
fmt,
"InMemoryFrame {{ encid: {:x} tyid: {:x} len {} }}",
self.encid, self.tyid, self.len
)
}
}
impl<T> Stream for InMemoryFrameAsyncReadStream<T>
where
T: AsyncRead + Unpin,

View File

@@ -1,89 +1 @@
use crate::frame::inmem::InMemoryFrame;
use bytes::{BufMut, BytesMut};
use err::Error;
use items::{FrameType, INMEM_FRAME_ENCID, INMEM_FRAME_HEAD, INMEM_FRAME_MAGIC};
use serde::{de::DeserializeOwned, Serialize};
pub fn make_frame<FT>(item: &FT) -> Result<BytesMut, Error>
where
FT: FrameType + Serialize,
{
match bincode::serialize(item) {
Ok(enc) => {
if enc.len() > u32::MAX as usize {
return Err(Error::with_msg(format!("too long payload {}", enc.len())));
}
let mut h = crc32fast::Hasher::new();
h.update(&enc);
let payload_crc = h.finalize();
let mut buf = BytesMut::with_capacity(enc.len() + INMEM_FRAME_HEAD);
buf.put_u32_le(INMEM_FRAME_MAGIC);
buf.put_u32_le(INMEM_FRAME_ENCID);
buf.put_u32_le(FT::FRAME_TYPE_ID);
buf.put_u32_le(enc.len() as u32);
buf.put_u32_le(payload_crc);
buf.put(enc.as_ref());
let mut h = crc32fast::Hasher::new();
h.update(&buf);
let frame_crc = h.finalize();
buf.put_u32_le(frame_crc);
Ok(buf)
}
Err(e) => Err(e)?,
}
}
pub fn make_term_frame() -> BytesMut {
let mut h = crc32fast::Hasher::new();
h.update(&[]);
let payload_crc = h.finalize();
let mut buf = BytesMut::with_capacity(INMEM_FRAME_HEAD);
buf.put_u32_le(INMEM_FRAME_MAGIC);
buf.put_u32_le(INMEM_FRAME_ENCID);
buf.put_u32_le(0x01);
buf.put_u32_le(0);
buf.put_u32_le(payload_crc);
let mut h = crc32fast::Hasher::new();
h.update(&buf);
let frame_crc = h.finalize();
buf.put_u32_le(frame_crc);
buf
}
pub fn decode_frame<T>(frame: &InMemoryFrame) -> Result<T, Error>
where
T: FrameType + DeserializeOwned,
{
if frame.encid() != INMEM_FRAME_ENCID {
return Err(Error::with_msg(format!("unknown encoder id {:?}", frame)));
}
if frame.tyid() != <T as FrameType>::FRAME_TYPE_ID {
return Err(Error::with_msg(format!(
"type id mismatch expect {:x} found {:?}",
<T as FrameType>::FRAME_TYPE_ID,
frame
)));
}
if frame.len() as usize != frame.buf().len() {
return Err(Error::with_msg(format!(
"buf mismatch {} vs {} in {:?}",
frame.len(),
frame.buf().len(),
frame
)));
}
match bincode::deserialize(frame.buf()) {
Ok(item) => Ok(item),
Err(e) => Err(e.into()),
}
}
pub fn crchex<T>(t: T) -> String
where
T: AsRef<[u8]>,
{
let mut h = crc32fast::Hasher::new();
h.update(t.as_ref());
let crc = h.finalize();
format!("{:08x}", crc)
}

View File

@@ -1,9 +1,10 @@
use crate::agg::streams::Appendable;
use crate::binned::{EventsNodeProcessor, PushableIndex, WithLen, WithTimestamps};
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use items::{LogItem, RangeCompletableItem, Sitemty, StatsItem, StreamItem};
use items::{
Appendable, EventsNodeProcessor, LogItem, PushableIndex, RangeCompletableItem, Sitemty, StatsItem, StreamItem,
WithLen, WithTimestamps,
};
use netpod::log::*;
use netpod::EventDataReadStats;
use std::collections::VecDeque;

View File

@@ -1,11 +1,9 @@
use crate::agg::streams::Appendable;
use crate::binned::{EventsNodeProcessor, PushableIndex};
use crate::merge::MergedStream;
use crate::raw::client::x_processed_stream_from_node;
use err::Error;
use futures_core::Stream;
use futures_util::{pin_mut, StreamExt};
use items::{FrameType, Sitemty};
use items::{Appendable, EventsNodeProcessor, FrameType, PushableIndex, Sitemty};
use netpod::log::*;
use netpod::query::RawEventsQuery;
use netpod::{Cluster, PerfOpts};

View File

@@ -5,13 +5,12 @@ Delivers event data (not yet time-binned) from local storage and provides client
to request such data from nodes.
*/
use crate::binned::EventsNodeProcessor;
use crate::frame::inmem::InMemoryFrameAsyncReadStream;
use crate::frame::makeframe::{make_frame, make_term_frame};
use crate::raw::eventsfromframes::EventsFromFrames;
use err::Error;
use futures_core::Stream;
use items::{FrameType, RangeCompletableItem, Sitemty, StreamItem};
use items::frame::{make_frame, make_term_frame};
use items::{EventsNodeProcessor, FrameType, RangeCompletableItem, Sitemty, StreamItem};
use netpod::query::RawEventsQuery;
use netpod::{EventQueryJsonStringFrame, Node, PerfOpts};
use std::pin::Pin;

View File

@@ -1,4 +1,3 @@
use crate::binned::{EventsNodeProcessor, NumOps};
use crate::decode::{
BigEndian, Endianness, EventValueFromBytes, EventValueShape, EventValuesDim0Case, EventValuesDim1Case,
EventsDecodedStream, LittleEndian, NumFromBytes,
@@ -8,9 +7,10 @@ use crate::eventchunker::EventChunkerConf;
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
use items::{Framable, RangeCompletableItem, Sitemty, StreamItem};
use items::numops::{BoolNum, NumOps};
use items::{EventsNodeProcessor, Framable, RangeCompletableItem, Sitemty, StreamItem};
use netpod::query::RawEventsQuery;
use netpod::{AggKind, BoolNum, ByteOrder, ByteSize, NodeConfigCached, ScalarType, Shape};
use netpod::{AggKind, ByteOrder, ByteSize, NodeConfigCached, ScalarType, Shape};
use parse::channelconfig::{extract_matching_config_entry, read_local_config, MatchingConfigEntry};
use std::pin::Pin;

View File

@@ -1,7 +1,7 @@
use crate::frame::inmem::InMemoryFrameAsyncReadStream;
use crate::frame::makeframe::decode_frame;
use futures_core::Stream;
use futures_util::StreamExt;
use items::frame::decode_frame;
use items::{FrameType, Sitemty, StreamItem};
use netpod::log::*;
use serde::de::DeserializeOwned;