Restructure scylla caching

This commit is contained in:
Dominik Werder
2022-06-21 15:57:52 +02:00
parent a8c7f281fc
commit 9161b7cc1d
11 changed files with 382 additions and 120 deletions

View File

@@ -336,7 +336,7 @@ impl ChannelExecFunction for BinnedJsonChannelExec {
let perf_opts = PerfOpts { inmem_bufcap: 512 };
let souter = match PreBinnedPatchRange::covering_range(self.query.range().clone(), self.query.bin_count()) {
Ok(Some(pre_range)) => {
debug!("BinnedJsonChannelExec found pre_range: {pre_range:?}");
info!("BinnedJsonChannelExec found pre_range: {pre_range:?}");
if range.grid_spec.bin_t_len() < pre_range.grid_spec.bin_t_len() {
let msg = format!(
"BinnedJsonChannelExec incompatible ranges:\npre_range: {pre_range:?}\nrange: {range:?}"
@@ -364,7 +364,7 @@ impl ChannelExecFunction for BinnedJsonChannelExec {
Ok(Box::pin(s) as Pin<Box<dyn Stream<Item = Result<Bytes, Error>> + Send>>)
}
Ok(None) => {
debug!("BinnedJsonChannelExec no covering range for prebinned, merge from remotes instead {range:?}");
info!("BinnedJsonChannelExec no covering range for prebinned, merge from remotes instead {range:?}");
// TODO let BinnedQuery provide the DiskIoTune and pass to RawEventsQuery:
let evq = RawEventsQuery::new(
self.query.channel().clone(),
@@ -392,6 +392,7 @@ impl ChannelExecFunction for BinnedJsonChannelExec {
}
fn empty() -> Self::Output {
info!("BinnedJsonChannelExec fn empty");
Box::pin(futures_util::stream::empty())
}
}

View File

@@ -32,14 +32,9 @@ pub struct FetchedPreBinned<TBT> {
}
impl<TBT> FetchedPreBinned<TBT> {
pub fn new(query: &PreBinnedQuery, node_config: &NodeConfigCached) -> Result<Self, Error> {
let nodeix = node_ix_for_patch(&query.patch(), &query.channel(), &node_config.node_config.cluster);
let node = &node_config.node_config.cluster.nodes[nodeix as usize];
let mut url = {
let host = &node.host;
let port = node.port;
Url::parse(&format!("http://{host}:{port}/api/4/prebinned"))?
};
pub fn new(query: &PreBinnedQuery, host: String, port: u16) -> Result<Self, Error> {
// TODO should not assume http:
let mut url = Url::parse(&format!("http://{host}:{port}/api/4/prebinned"))?;
query.append_to_url(&mut url);
let ret = Self {
uri: Uri::from_str(&url.to_string()).map_err(Error::from_string)?,
@@ -201,8 +196,10 @@ where
disk_stats_every.clone(),
report_error,
);
let nodeix = node_ix_for_patch(&query.patch(), &query.channel(), &node_config.node_config.cluster);
let node = &node_config.node_config.cluster.nodes[nodeix as usize];
let ret: Pin<Box<dyn Stream<Item = _> + Send>> =
match FetchedPreBinned::<TBT>::new(&query, &node_config) {
match FetchedPreBinned::<TBT>::new(&query, node.host.clone(), node.port.clone()) {
Ok(stream) => Box::pin(stream.map(move |q| (pix, q))),
Err(e) => {
error!("error from PreBinnedValueFetchedStream::new {e:?}");

View File

@@ -185,10 +185,17 @@ where
disk_stats_every.clone(),
report_error,
);
let nodeix = crate::cache::node_ix_for_patch(
&query.patch(),
&query.channel(),
&node_config.node_config.cluster,
);
let node = &node_config.node_config.cluster.nodes[nodeix as usize];
let ret =
FetchedPreBinned::<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>::new(
&query,
&node_config,
node.host.clone(),
node.port.clone(),
)?;
Ok(ret)
}

View File

@@ -6,6 +6,7 @@ use crate::decode::{
LittleEndian, NumFromBytes,
};
use bytes::Bytes;
use dbconn::bincache::pre_binned_value_stream;
use err::Error;
use futures_core::Stream;
use futures_util::StreamExt;
@@ -13,19 +14,21 @@ use items::numops::{BoolNum, NumOps, StringNum};
use items::{
Appendable, Clearable, EventsNodeProcessor, Framable, FrameType, PushableIndex, Sitemty, TimeBinnableType,
};
use netpod::{AggKind, ByteOrder, NodeConfigCached, ScalarType, Shape};
use netpod::log::*;
use netpod::{AggKind, ByteOrder, ChannelTyped, NodeConfigCached, ScalarType, Shape};
use serde::de::DeserializeOwned;
use serde::Serialize;
use std::pin::Pin;
fn make_num_pipeline_nty_end_evs_enp<NTY, END, EVS, ENP>(
_shape: Shape,
async fn make_num_pipeline_nty_end_evs_enp<NTY, END, EVS, ENP>(
scalar_type: ScalarType,
shape: Shape,
agg_kind: AggKind,
_event_value_shape: EVS,
_events_node_proc: ENP,
query: PreBinnedQuery,
node_config: &NodeConfigCached,
) -> Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>
) -> Result<Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>, Error>
where
NTY: NumOps + NumFromBytes<NTY, END> + Serialize + 'static,
END: Endianness + 'static,
@@ -36,17 +39,43 @@ where
Sitemty<<<ENP as EventsNodeProcessor>::Output as TimeBinnableType>::Output>:
Framable + FrameType + DeserializeOwned,
{
let ret = PreBinnedValueStream::<NTY, END, EVS, ENP>::new(query, agg_kind, node_config);
let ret = StreamExt::map(ret, |item| Box::new(item) as Box<dyn Framable>);
Box::pin(ret)
if let Some(scyconf) = &node_config.node_config.cluster.cache_scylla {
info!("~~~~~~~~~~~~~~~ make_num_pipeline_nty_end_evs_enp using scylla as cache");
let chn = ChannelTyped {
channel: query.channel().clone(),
scalar_type,
shape,
};
let stream = pre_binned_value_stream(&chn, query.patch(), scyconf).await?;
let stream = stream.map(|x| {
//
match x {
Ok(k) => {
let g = Box::new(k) as Box<dyn Framable>;
g
}
Err(e) => {
let u: Sitemty<items::scalarevents::ScalarEvents<f32>> = Err(e);
Box::new(u) as Box<dyn Framable>
}
}
});
let stream = Box::pin(stream) as Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>;
Ok(stream)
} else {
let ret = PreBinnedValueStream::<NTY, END, EVS, ENP>::new(query, agg_kind, node_config);
let ret = StreamExt::map(ret, |item| Box::new(item) as Box<dyn Framable>);
Ok(Box::pin(ret))
}
}
fn make_num_pipeline_nty_end<NTY, END>(
async fn make_num_pipeline_nty_end<NTY, END>(
scalar_type: ScalarType,
shape: Shape,
agg_kind: AggKind,
query: PreBinnedQuery,
node_config: &NodeConfigCached,
) -> Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>
) -> Result<Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>, Error>
where
NTY: NumOps + NumFromBytes<NTY, END> + Serialize + 'static,
END: Endianness + 'static,
@@ -59,6 +88,7 @@ where
AggKind::TimeWeightedScalar | AggKind::DimXBins1 => {
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
make_num_pipeline_nty_end_evs_enp::<NTY, END, _, _>(
scalar_type,
shape,
agg_kind,
evs,
@@ -66,10 +96,12 @@ where
query,
node_config,
)
.await
}
AggKind::DimXBinsN(_) => {
let events_node_proc = <<EventValuesDim0Case<NTY> as EventValueShape<NTY, END>>::NumXAggToNBins as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
make_num_pipeline_nty_end_evs_enp::<NTY, END, _, _>(
scalar_type,
shape,
agg_kind,
evs,
@@ -77,6 +109,7 @@ where
query,
node_config,
)
.await
}
AggKind::Plain => {
panic!();
@@ -94,6 +127,7 @@ where
AggKind::TimeWeightedScalar | AggKind::DimXBins1 => {
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToSingleBin as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
make_num_pipeline_nty_end_evs_enp::<NTY, END, _, _>(
scalar_type,
shape,
agg_kind,
evs,
@@ -101,10 +135,12 @@ where
query,
node_config,
)
.await
}
AggKind::DimXBinsN(_) => {
let events_node_proc = <<EventValuesDim1Case<NTY> as EventValueShape<NTY, END>>::NumXAggToNBins as EventsNodeProcessor>::create(shape.clone(), agg_kind.clone());
make_num_pipeline_nty_end_evs_enp::<NTY, END, _, _>(
scalar_type,
shape,
agg_kind,
evs,
@@ -112,6 +148,7 @@ where
query,
node_config,
)
.await
}
AggKind::Plain => {
panic!();
@@ -130,35 +167,41 @@ where
}
macro_rules! match_end {
($nty:ident, $end:expr, $shape:expr, $agg_kind:expr, $query:expr, $node_config:expr) => {
($nty:ident, $end:expr, $scalar_type:expr, $shape:expr, $agg_kind:expr, $query:expr, $node_config:expr) => {
match $end {
ByteOrder::LE => make_num_pipeline_nty_end::<$nty, LittleEndian>($shape, $agg_kind, $query, $node_config),
ByteOrder::BE => make_num_pipeline_nty_end::<$nty, BigEndian>($shape, $agg_kind, $query, $node_config),
ByteOrder::LE => {
make_num_pipeline_nty_end::<$nty, LittleEndian>($scalar_type, $shape, $agg_kind, $query, $node_config)
.await
}
ByteOrder::BE => {
make_num_pipeline_nty_end::<$nty, BigEndian>($scalar_type, $shape, $agg_kind, $query, $node_config)
.await
}
}
};
}
fn make_num_pipeline(
async fn make_num_pipeline(
scalar_type: ScalarType,
byte_order: ByteOrder,
shape: Shape,
agg_kind: AggKind,
query: PreBinnedQuery,
node_config: &NodeConfigCached,
) -> Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>> {
) -> Result<Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>, Error> {
match scalar_type {
ScalarType::U8 => match_end!(u8, byte_order, shape, agg_kind, query, node_config),
ScalarType::U16 => match_end!(u16, byte_order, shape, agg_kind, query, node_config),
ScalarType::U32 => match_end!(u32, byte_order, shape, agg_kind, query, node_config),
ScalarType::U64 => match_end!(u64, byte_order, shape, agg_kind, query, node_config),
ScalarType::I8 => match_end!(i8, byte_order, shape, agg_kind, query, node_config),
ScalarType::I16 => match_end!(i16, byte_order, shape, agg_kind, query, node_config),
ScalarType::I32 => match_end!(i32, byte_order, shape, agg_kind, query, node_config),
ScalarType::I64 => match_end!(i64, byte_order, shape, agg_kind, query, node_config),
ScalarType::F32 => match_end!(f32, byte_order, shape, agg_kind, query, node_config),
ScalarType::F64 => match_end!(f64, byte_order, shape, agg_kind, query, node_config),
ScalarType::BOOL => match_end!(BoolNum, byte_order, shape, agg_kind, query, node_config),
ScalarType::STRING => match_end!(StringNum, byte_order, shape, agg_kind, query, node_config),
ScalarType::U8 => match_end!(u8, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::U16 => match_end!(u16, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::U32 => match_end!(u32, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::U64 => match_end!(u64, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::I8 => match_end!(i8, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::I16 => match_end!(i16, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::I32 => match_end!(i32, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::I64 => match_end!(i64, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::F32 => match_end!(f32, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::F64 => match_end!(f64, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::BOOL => match_end!(BoolNum, byte_order, scalar_type, shape, agg_kind, query, node_config),
ScalarType::STRING => match_end!(StringNum, byte_order, scalar_type, shape, agg_kind, query, node_config),
}
}
@@ -191,6 +234,7 @@ pub async fn pre_binned_bytes_for_http(
query.clone(),
node_config,
)
.await?
.map(|item| match item.make_frame() {
Ok(item) => Ok(item.freeze()),
Err(e) => Err(e),

View File

@@ -1,6 +1,7 @@
use err::Error;
use http::request::Parts;
use netpod::query::{agg_kind_from_binning_scheme, binning_scheme_append_to_url, CacheUsage};
use netpod::timeunits::SEC;
use netpod::{
channel_append_to_url, channel_from_pairs, AggKind, AppendToUrl, ByteSize, Channel, PreBinnedPatchCoord,
ScalarType, Shape,
@@ -52,11 +53,11 @@ impl PreBinnedQuery {
pairs.insert(j.to_string(), k.to_string());
}
let pairs = pairs;
let bin_t_len = pairs
let bin_t_len: u64 = pairs
.get("binTlen")
.ok_or_else(|| Error::with_msg("missing binTlen"))?
.parse()?;
let patch_t_len = pairs
let patch_t_len: u64 = pairs
.get("patchTlen")
.ok_or_else(|| Error::with_msg("missing patchTlen"))?
.parse()?;
@@ -79,7 +80,7 @@ impl PreBinnedQuery {
.ok_or_else(|| Error::with_msg("missing shape"))
.map(|x| Shape::from_url_str(&x))??;
let ret = Self {
patch: PreBinnedPatchCoord::new(bin_t_len, patch_t_len, patch_ix),
patch: PreBinnedPatchCoord::new(bin_t_len * SEC, patch_t_len * SEC, patch_ix),
channel: channel_from_pairs(&pairs)?,
scalar_type,
shape,
@@ -148,9 +149,10 @@ impl AppendToUrl for PreBinnedQuery {
self.patch.append_to_url(url);
binning_scheme_append_to_url(&self.agg_kind, url);
channel_append_to_url(url, &self.channel);
self.shape.append_to_url(url);
self.scalar_type.append_to_url(url);
let mut g = url.query_pairs_mut();
g.append_pair("scalarType", &format!("{:?}", self.scalar_type));
g.append_pair("shape", &format!("{:?}", self.shape));
// TODO add also impl AppendToUrl for these if applicable:
g.append_pair("cacheUsage", &format!("{}", self.cache_usage.query_param_value()));
g.append_pair("diskIoBufferSize", &format!("{}", self.disk_io_buffer_size));
g.append_pair("diskStatsEveryKb", &format!("{}", self.disk_stats_every.bytes() / 1024));