Restructure config between the kinds of backends

This commit is contained in:
Dominik Werder
2022-02-18 19:24:14 +01:00
parent b7aaad7a7b
commit 96fa8b5b09
20 changed files with 195 additions and 138 deletions

View File

@@ -1,7 +1,7 @@
use crate::eventblobs::EventChunkerMultifile;
use crate::eventchunker::EventChunkerConf;
use netpod::timeunits::*;
use netpod::{test_data_base_path_databuffer, FileIoBufferSize};
use netpod::{timeunits::*, SfDatabuffer};
use netpod::{ByteOrder, ByteSize, Channel, ChannelConfig, NanoRange, Nanos, Node, ScalarType, Shape};
#[allow(unused_imports)]
use tracing::{debug, error, info, trace, warn};
@@ -13,11 +13,13 @@ pub fn make_test_node(id: u32) -> Node {
port: 8800 + id as u16,
port_raw: 8800 + id as u16 + 100,
// TODO use a common function to supply the tmp path.
data_base_path: test_data_base_path_databuffer().join(format!("node{:02}", id)),
cache_base_path: test_data_base_path_databuffer().join(format!("node{:02}", id)),
ksprefix: "ks".into(),
backend: "testbackend".into(),
splits: None,
sf_databuffer: Some(SfDatabuffer {
data_base_path: test_data_base_path_databuffer().join(format!("node{:02}", id)),
ksprefix: "ks".into(),
splits: None,
}),
archiver_appliance: None,
channel_archiver: None,
}

View File

@@ -2,7 +2,7 @@ use crate::ChannelConfigExt;
use bitshuffle::bitshuffle_compress;
use bytes::{BufMut, BytesMut};
use err::Error;
use netpod::{timeunits::*, ByteOrder, Channel, ChannelConfig, GenVar, Node, Shape};
use netpod::{timeunits::*, ByteOrder, Channel, ChannelConfig, GenVar, Node, SfDatabuffer, Shape};
use netpod::{Nanos, ScalarType};
use std::path::{Path, PathBuf};
use tokio::fs::{File, OpenOptions};
@@ -123,11 +123,13 @@ pub async fn gen_test_data() -> Result<(), Error> {
listen: "0.0.0.0".into(),
port: 7780 + i1 as u16,
port_raw: 7780 + i1 as u16 + 100,
data_base_path: data_base_path.join(format!("node{:02}", i1)),
cache_base_path: data_base_path.join(format!("node{:02}", i1)),
ksprefix: ksprefix.clone(),
backend: "testbackend".into(),
splits: None,
sf_databuffer: Some(SfDatabuffer {
data_base_path: data_base_path.join(format!("node{:02}", i1)),
ksprefix: ksprefix.clone(),
splits: None,
}),
archiver_appliance: None,
channel_archiver: None,
};
@@ -158,10 +160,11 @@ async fn gen_node(split: u32, node: &Node, ensemble: &Ensemble) -> Result<(), Er
}
async fn gen_channel(chn: &ChannelGenProps, split: u32, node: &Node, ensemble: &Ensemble) -> Result<(), Error> {
let config_path = node.data_base_path.join("config").join(&chn.config.channel.name);
let channel_path = node
let sfc = node.sf_databuffer.as_ref().unwrap();
let config_path = sfc.data_base_path.join("config").join(&chn.config.channel.name);
let channel_path = sfc
.data_base_path
.join(format!("{}_{}", node.ksprefix, chn.config.keyspace))
.join(format!("{}_{}", sfc.ksprefix, chn.config.keyspace))
.join("byTime")
.join(&chn.config.channel.name);
tokio::fs::create_dir_all(&channel_path).await?;

View File

@@ -6,8 +6,15 @@ use std::path::PathBuf;
// TODO remove/replace this
pub fn datapath(timebin: u64, config: &netpod::ChannelConfig, split: u32, node: &Node) -> PathBuf {
node.data_base_path
.join(format!("{}_{}", node.ksprefix, config.keyspace))
node.sf_databuffer
.as_ref()
.unwrap()
.data_base_path
.join(format!(
"{}_{}",
node.sf_databuffer.as_ref().unwrap().ksprefix,
config.keyspace
))
.join("byTime")
.join(config.channel.name.clone())
.join(format!("{:019}", timebin))
@@ -26,9 +33,10 @@ pub async fn datapaths_for_timebin(
config: &netpod::ChannelConfig,
node: &Node,
) -> Result<Vec<PathBuf>, Error> {
let timebin_path = node
let sfc = node.sf_databuffer.as_ref().unwrap();
let timebin_path = sfc
.data_base_path
.join(format!("{}_{}", node.ksprefix, config.keyspace))
.join(format!("{}_{}", sfc.ksprefix, config.keyspace))
.join("byTime")
.join(config.channel.name.clone())
.join(format!("{:019}", timebin));
@@ -47,7 +55,7 @@ pub async fn datapaths_for_timebin(
let vv = dn.chars().fold(0, |a, x| if x.is_digit(10) { a + 1 } else { a });
if vv == 10 {
let split: u64 = dn.parse()?;
match &node.splits {
match &sfc.splits {
Some(sps) => {
if sps.contains(&split) {
splits.push(split);
@@ -61,9 +69,9 @@ pub async fn datapaths_for_timebin(
}
let mut ret = vec![];
for split in splits {
let path = node
let path = sfc
.data_base_path
.join(format!("{}_{}", node.ksprefix, config.keyspace))
.join(format!("{}_{}", sfc.ksprefix, config.keyspace))
.join("byTime")
.join(config.channel.name.clone())
.join(format!("{:019}", timebin))
@@ -75,9 +83,10 @@ pub async fn datapaths_for_timebin(
}
pub fn channel_timebins_dir_path(channel_config: &ChannelConfig, node: &Node) -> Result<PathBuf, Error> {
let ret = node
let sfc = node.sf_databuffer.as_ref().unwrap();
let ret = sfc
.data_base_path
.join(format!("{}_{}", node.ksprefix, channel_config.keyspace))
.join(format!("{}_{}", sfc.ksprefix, channel_config.keyspace))
.join("byTime")
.join(&channel_config.channel.name);
Ok(ret)
@@ -103,9 +112,10 @@ pub fn index_path(ts: Nanos, channel_config: &ChannelConfig, split: u32, node: &
}
pub fn data_dir_path_tb(ks: u32, channel_name: &str, tb: u32, split: u32, node: &Node) -> Result<PathBuf, Error> {
let ret = node
let sfc = node.sf_databuffer.as_ref().unwrap();
let ret = sfc
.data_base_path
.join(format!("{}_{}", node.ksprefix, ks))
.join(format!("{}_{}", sfc.ksprefix, ks))
.join("byTime")
.join(channel_name)
.join(format!("{:019}", tb))