Move workspace crates into subfolder
This commit is contained in:
@@ -0,0 +1,321 @@
|
||||
use crate::query::datetime::Datetime;
|
||||
use crate::{DiskIoTune, FileIoBufferSize, ReadSys};
|
||||
use err::Error;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::time::Duration;
|
||||
|
||||
fn bool_true() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn bool_is_true(x: &bool) -> bool {
|
||||
*x
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Api1Range {
|
||||
#[serde(rename = "type", default, skip_serializing_if = "String::is_empty")]
|
||||
ty: String,
|
||||
#[serde(rename = "startDate")]
|
||||
beg: Datetime,
|
||||
#[serde(rename = "endDate")]
|
||||
end: Datetime,
|
||||
}
|
||||
|
||||
impl Api1Range {
|
||||
pub fn new(beg: Datetime, end: Datetime) -> Result<Self, Error> {
|
||||
let ret = Self {
|
||||
ty: String::new(),
|
||||
beg,
|
||||
end,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
pub fn beg(&self) -> &Datetime {
|
||||
&self.beg
|
||||
}
|
||||
|
||||
pub fn end(&self) -> &Datetime {
|
||||
&self.end
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde_de_range_zulu() {
|
||||
let s = r#"{"startDate": "2022-11-22T10:15:12.412Z", "endDate": "2022-11-22T10:15:12.413556Z"}"#;
|
||||
let range: Api1Range = serde_json::from_str(s).unwrap();
|
||||
assert_eq!(range.beg().offset().local_minus_utc(), 0);
|
||||
assert_eq!(range.end().offset().local_minus_utc(), 0);
|
||||
assert_eq!(range.beg().timestamp_subsec_micros(), 412000);
|
||||
assert_eq!(range.end().timestamp_subsec_micros(), 413556);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde_de_range_offset() {
|
||||
let s = r#"{"startDate": "2022-11-22T10:15:12.412Z", "endDate": "2022-11-22T10:15:12.413556Z"}"#;
|
||||
let range: Api1Range = serde_json::from_str(s).unwrap();
|
||||
assert_eq!(range.beg().offset().local_minus_utc(), 0);
|
||||
assert_eq!(range.end().offset().local_minus_utc(), 0);
|
||||
assert_eq!(range.beg().timestamp_subsec_micros(), 412000);
|
||||
assert_eq!(range.end().timestamp_subsec_micros(), 413556);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde_ser_range_offset() {
|
||||
use chrono::{FixedOffset, NaiveDate, TimeZone};
|
||||
let beg = FixedOffset::east_opt(60 * 60 * 3)
|
||||
.unwrap()
|
||||
.from_local_datetime(
|
||||
&NaiveDate::from_ymd_opt(2022, 11, 22)
|
||||
.unwrap()
|
||||
.and_hms_milli_opt(13, 14, 15, 16)
|
||||
.unwrap(),
|
||||
)
|
||||
.earliest()
|
||||
.unwrap();
|
||||
let end = FixedOffset::east_opt(-60 * 60 * 1)
|
||||
.unwrap()
|
||||
.from_local_datetime(
|
||||
&NaiveDate::from_ymd_opt(2022, 11, 22)
|
||||
.unwrap()
|
||||
.and_hms_milli_opt(13, 14, 15, 800)
|
||||
.unwrap(),
|
||||
)
|
||||
.earliest()
|
||||
.unwrap();
|
||||
let range = Api1Range::new(beg.into(), end.into()).unwrap();
|
||||
let js = serde_json::to_string(&range).unwrap();
|
||||
let exp = r#"{"startDate":"2022-11-22T13:14:15.016+03:00","endDate":"2022-11-22T13:14:15.800-01:00"}"#;
|
||||
assert_eq!(js, exp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde_ser_range_01() -> Result<(), Error> {
|
||||
let beg = Datetime::try_from("2022-11-22T02:03:04Z")?;
|
||||
let end = Datetime::try_from("2022-11-22T02:03:04.123Z")?;
|
||||
let range = Api1Range::new(beg, end).unwrap();
|
||||
let js = serde_json::to_string(&range).unwrap();
|
||||
let exp = r#"{"startDate":"2022-11-22T02:03:04Z","endDate":"2022-11-22T02:03:04.123Z"}"#;
|
||||
assert_eq!(js, exp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde_ser_range_02() -> Result<(), Error> {
|
||||
let beg = Datetime::try_from("2022-11-22T02:03:04.987654Z")?;
|
||||
let end = Datetime::try_from("2022-11-22T02:03:04.777000Z")?;
|
||||
let range = Api1Range::new(beg, end).unwrap();
|
||||
let js = serde_json::to_string(&range).unwrap();
|
||||
let exp = r#"{"startDate":"2022-11-22T02:03:04.987654Z","endDate":"2022-11-22T02:03:04.777Z"}"#;
|
||||
assert_eq!(js, exp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// In Api1, the list of channels consists of either `BACKEND/CHANNELNAME`
|
||||
/// or just `CHANNELNAME`.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct ChannelTuple {
|
||||
backend: Option<String>,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl ChannelTuple {
|
||||
pub fn new(backend: String, name: String) -> Self {
|
||||
Self {
|
||||
backend: Some(backend),
|
||||
name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_name(name: String) -> Self {
|
||||
Self { backend: None, name }
|
||||
}
|
||||
|
||||
pub fn backend(&self) -> Option<&String> {
|
||||
self.backend.as_ref()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
mod serde_channel_tuple {
|
||||
use super::*;
|
||||
use serde::de::{Deserialize, Deserializer, Visitor};
|
||||
use serde::ser::{Serialize, Serializer};
|
||||
|
||||
impl Serialize for ChannelTuple {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
if let Some(backend) = self.backend.as_ref() {
|
||||
serializer.serialize_str(&format!("{}/{}", backend, self.name))
|
||||
} else {
|
||||
serializer.serialize_str(&self.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Vis;
|
||||
|
||||
impl<'de> Visitor<'de> for Vis {
|
||||
type Value = ChannelTuple;
|
||||
|
||||
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(fmt, "[Backendname/]Channelname")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, val: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let mut it = val.split("/");
|
||||
// Even empty string splits into one element of empty string
|
||||
let s0 = it.next().unwrap();
|
||||
if let Some(s1) = it.next() {
|
||||
let ret = ChannelTuple {
|
||||
backend: Some(s0.into()),
|
||||
name: s1.into(),
|
||||
};
|
||||
Ok(ret)
|
||||
} else {
|
||||
let ret = ChannelTuple {
|
||||
backend: None,
|
||||
name: s0.into(),
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ChannelTuple {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_str(Vis)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ser_name() {
|
||||
let x = ChannelTuple {
|
||||
backend: None,
|
||||
name: "temperature".into(),
|
||||
};
|
||||
let js = serde_json::to_string(&x).unwrap();
|
||||
assert_eq!(js, r#""temperature""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ser_backend_name() {
|
||||
let x = ChannelTuple {
|
||||
backend: Some("beach".into()),
|
||||
name: "temperature".into(),
|
||||
};
|
||||
let js = serde_json::to_string(&x).unwrap();
|
||||
assert_eq!(js, r#""beach/temperature""#);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Api1Query {
|
||||
range: Api1Range,
|
||||
channels: Vec<ChannelTuple>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
timeout: Option<Duration>,
|
||||
// All following parameters are private and not to be used
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
file_io_buffer_size: Option<FileIoBufferSize>,
|
||||
#[serde(default = "bool_true", skip_serializing_if = "bool_is_true")]
|
||||
decompress: bool,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
events_max: Option<u64>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
io_queue_len: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "String::is_empty")]
|
||||
log_level: String,
|
||||
#[serde(default, skip_serializing_if = "String::is_empty")]
|
||||
read_sys: String,
|
||||
}
|
||||
|
||||
impl Api1Query {
|
||||
pub fn new(range: Api1Range, channels: Vec<ChannelTuple>) -> Self {
|
||||
Self {
|
||||
range,
|
||||
channels,
|
||||
timeout: None,
|
||||
decompress: true,
|
||||
events_max: None,
|
||||
file_io_buffer_size: None,
|
||||
io_queue_len: None,
|
||||
log_level: String::new(),
|
||||
read_sys: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disk_io_tune(&self) -> DiskIoTune {
|
||||
let mut k = DiskIoTune::default();
|
||||
if let Some(x) = &self.file_io_buffer_size {
|
||||
k.read_buffer_len = x.0;
|
||||
}
|
||||
if let Some(io_queue_len) = self.io_queue_len {
|
||||
k.read_queue_len = io_queue_len as usize;
|
||||
}
|
||||
let read_sys: ReadSys = self.read_sys.as_str().into();
|
||||
k.read_sys = read_sys;
|
||||
k
|
||||
}
|
||||
|
||||
pub fn range(&self) -> &Api1Range {
|
||||
&self.range
|
||||
}
|
||||
|
||||
pub fn channels(&self) -> &[ChannelTuple] {
|
||||
&self.channels
|
||||
}
|
||||
|
||||
pub fn timeout(&self) -> Option<Duration> {
|
||||
self.timeout
|
||||
}
|
||||
|
||||
pub fn timeout_or_default(&self) -> Duration {
|
||||
Duration::from_secs(60 * 30)
|
||||
}
|
||||
|
||||
pub fn log_level(&self) -> &str {
|
||||
&self.log_level
|
||||
}
|
||||
|
||||
pub fn decompress(&self) -> bool {
|
||||
self.decompress
|
||||
}
|
||||
|
||||
pub fn events_max(&self) -> Option<u64> {
|
||||
self.events_max
|
||||
}
|
||||
|
||||
pub fn set_decompress(&mut self, v: bool) {
|
||||
self.decompress = v;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serde_api1_query() -> Result<(), Error> {
|
||||
let beg = Datetime::try_from("2022-11-22T08:09:10Z")?;
|
||||
let end = Datetime::try_from("2022-11-23T08:11:05.455009+02:00")?;
|
||||
let range = Api1Range::new(beg, end).unwrap();
|
||||
let ch0 = ChannelTuple::from_name("nameonly".into());
|
||||
let ch1 = ChannelTuple::new("somebackend".into(), "somechan".into());
|
||||
let qu = Api1Query::new(range, vec![ch0, ch1]);
|
||||
let js = serde_json::to_string(&qu).unwrap();
|
||||
assert_eq!(
|
||||
js,
|
||||
r#"{"range":{"startDate":"2022-11-22T08:09:10Z","endDate":"2022-11-23T08:11:05.455009+02:00"},"channels":["nameonly","somebackend/somechan"]}"#
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
use chrono::DateTime;
|
||||
use chrono::FixedOffset;
|
||||
use err::Error;
|
||||
use serde::de::Visitor;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::fmt;
|
||||
use std::ops;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Datetime(DateTime<FixedOffset>);
|
||||
|
||||
impl From<DateTime<FixedOffset>> for Datetime {
|
||||
fn from(x: DateTime<FixedOffset>) -> Self {
|
||||
Datetime(x)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Datetime {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(val: &str) -> Result<Self, Self::Error> {
|
||||
let dt =
|
||||
DateTime::<FixedOffset>::parse_from_rfc3339(val).map_err(|e| Error::with_msg_no_trace(format!("{e}")))?;
|
||||
Ok(Datetime(dt))
|
||||
}
|
||||
}
|
||||
|
||||
impl ops::Deref for Datetime {
|
||||
type Target = DateTime<FixedOffset>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
// RFC 3339 (subset of ISO 8601)
|
||||
|
||||
impl Serialize for Datetime {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
use fmt::Write;
|
||||
use serde::ser::Error;
|
||||
let val = &self.0;
|
||||
let mut s = String::with_capacity(64);
|
||||
write!(&mut s, "{}", val.format("%Y-%m-%dT%H:%M:%S")).map_err(|_| Error::custom("fmt"))?;
|
||||
let ns = val.timestamp_subsec_nanos();
|
||||
let mus = val.timestamp_subsec_micros();
|
||||
if ns % 1000 != 0 {
|
||||
write!(&mut s, "{}", val.format(".%9f")).map_err(|_| Error::custom("fmt"))?;
|
||||
} else if mus % 1000 != 0 {
|
||||
write!(&mut s, "{}", val.format(".%6f")).map_err(|_| Error::custom("fmt"))?;
|
||||
} else if mus != 0 {
|
||||
write!(&mut s, "{}", val.format(".%3f")).map_err(|_| Error::custom("fmt"))?;
|
||||
}
|
||||
if val.offset().local_minus_utc() == 0 {
|
||||
write!(&mut s, "Z").map_err(|_| Error::custom("fmt"))?;
|
||||
} else {
|
||||
write!(&mut s, "{}", val.format("%:z")).map_err(|_| Error::custom("fmt"))?;
|
||||
}
|
||||
serializer.collect_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
mod ser_impl_2 {
|
||||
use super::Datetime;
|
||||
use crate::DATETIME_FMT_0MS;
|
||||
use crate::DATETIME_FMT_3MS;
|
||||
use crate::DATETIME_FMT_6MS;
|
||||
use crate::DATETIME_FMT_9MS;
|
||||
use fmt::Write;
|
||||
use serde::ser::Error;
|
||||
use std::fmt;
|
||||
|
||||
#[allow(unused)]
|
||||
fn serialize<S>(obj: &Datetime, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let val = &obj.0;
|
||||
let mut s = String::with_capacity(64);
|
||||
write!(&mut s, "{}", val.format("%Y-%m-%dT%H:%M:%S")).map_err(|_| Error::custom("fmt"))?;
|
||||
let ns = val.timestamp_subsec_nanos();
|
||||
let s = if ns % 1000 != 0 {
|
||||
val.format(DATETIME_FMT_9MS)
|
||||
} else {
|
||||
let mus = val.timestamp_subsec_micros();
|
||||
if mus % 1000 != 0 {
|
||||
val.format(DATETIME_FMT_6MS)
|
||||
} else {
|
||||
let ms = val.timestamp_subsec_millis();
|
||||
if ms != 0 {
|
||||
val.format(DATETIME_FMT_3MS)
|
||||
} else {
|
||||
val.format(DATETIME_FMT_0MS)
|
||||
}
|
||||
}
|
||||
};
|
||||
serializer.collect_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
struct Vis1;
|
||||
|
||||
impl<'de> Visitor<'de> for Vis1 {
|
||||
type Value = Datetime;
|
||||
|
||||
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(fmt, "Datetime")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, val: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Datetime::try_from(val).map_err(|e| serde::de::Error::custom(format!("{e}")))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Datetime {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_str(Vis1)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ser_00() {
|
||||
use chrono::TimeZone;
|
||||
let x = FixedOffset::east_opt(0)
|
||||
.unwrap()
|
||||
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
|
||||
.earliest()
|
||||
.unwrap();
|
||||
let x = Datetime(x);
|
||||
let s = serde_json::to_string(&x).unwrap();
|
||||
|
||||
assert_eq!(s, r#""2023-02-03T15:12:40Z""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ser_01() {
|
||||
use chrono::TimeZone;
|
||||
let x = FixedOffset::east_opt(0)
|
||||
.unwrap()
|
||||
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
|
||||
.earliest()
|
||||
.unwrap()
|
||||
.checked_add_signed(chrono::Duration::milliseconds(876))
|
||||
.unwrap();
|
||||
let x = Datetime(x);
|
||||
let s = serde_json::to_string(&x).unwrap();
|
||||
|
||||
assert_eq!(s, r#""2023-02-03T15:12:40.876Z""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ser_02() {
|
||||
use chrono::TimeZone;
|
||||
let x = FixedOffset::east_opt(0)
|
||||
.unwrap()
|
||||
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
|
||||
.earliest()
|
||||
.unwrap()
|
||||
.checked_add_signed(chrono::Duration::nanoseconds(543430000))
|
||||
.unwrap();
|
||||
let x = Datetime(x);
|
||||
let s = serde_json::to_string(&x).unwrap();
|
||||
|
||||
assert_eq!(s, r#""2023-02-03T15:12:40.543430Z""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ser_03() {
|
||||
use chrono::TimeZone;
|
||||
let x = FixedOffset::east_opt(0)
|
||||
.unwrap()
|
||||
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
|
||||
.earliest()
|
||||
.unwrap()
|
||||
.checked_add_signed(chrono::Duration::nanoseconds(543432321))
|
||||
.unwrap();
|
||||
let x = Datetime(x);
|
||||
let s = serde_json::to_string(&x).unwrap();
|
||||
|
||||
assert_eq!(s, r#""2023-02-03T15:12:40.543432321Z""#);
|
||||
}
|
||||
@@ -0,0 +1,141 @@
|
||||
use super::agg_kind_from_binning_scheme;
|
||||
use super::binning_scheme_append_to_url;
|
||||
use super::CacheUsage;
|
||||
use crate::AggKind;
|
||||
use crate::AppendToUrl;
|
||||
use crate::ByteSize;
|
||||
use crate::FromUrl;
|
||||
use crate::PreBinnedPatchCoordEnum;
|
||||
use crate::ScalarType;
|
||||
use crate::SfDbChannel;
|
||||
use crate::Shape;
|
||||
use err::Error;
|
||||
use std::collections::BTreeMap;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PreBinnedQuery {
|
||||
patch: PreBinnedPatchCoordEnum,
|
||||
channel: SfDbChannel,
|
||||
scalar_type: ScalarType,
|
||||
shape: Shape,
|
||||
agg_kind: Option<AggKind>,
|
||||
cache_usage: Option<CacheUsage>,
|
||||
buf_len_disk_io: Option<usize>,
|
||||
disk_stats_every: Option<ByteSize>,
|
||||
}
|
||||
|
||||
impl PreBinnedQuery {
|
||||
pub fn new(
|
||||
patch: PreBinnedPatchCoordEnum,
|
||||
channel: SfDbChannel,
|
||||
scalar_type: ScalarType,
|
||||
shape: Shape,
|
||||
agg_kind: Option<AggKind>,
|
||||
cache_usage: Option<CacheUsage>,
|
||||
buf_len_disk_io: Option<usize>,
|
||||
disk_stats_every: Option<ByteSize>,
|
||||
) -> Self {
|
||||
Self {
|
||||
patch,
|
||||
channel,
|
||||
scalar_type,
|
||||
shape,
|
||||
agg_kind,
|
||||
cache_usage,
|
||||
buf_len_disk_io,
|
||||
disk_stats_every,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_url(url: &Url) -> Result<Self, Error> {
|
||||
let mut pairs = BTreeMap::new();
|
||||
for (j, k) in url.query_pairs() {
|
||||
pairs.insert(j.to_string(), k.to_string());
|
||||
}
|
||||
let pairs = pairs;
|
||||
let scalar_type = pairs
|
||||
.get("scalarType")
|
||||
.ok_or_else(|| Error::with_msg("missing scalarType"))
|
||||
.map(|x| ScalarType::from_url_str(&x))??;
|
||||
let shape = pairs
|
||||
.get("shape")
|
||||
.ok_or_else(|| Error::with_msg("missing shape"))
|
||||
.map(|x| Shape::from_url_str(&x))??;
|
||||
let ret = Self {
|
||||
patch: PreBinnedPatchCoordEnum::from_pairs(&pairs)?,
|
||||
channel: SfDbChannel::from_pairs(&pairs)?,
|
||||
scalar_type,
|
||||
shape,
|
||||
agg_kind: agg_kind_from_binning_scheme(&pairs)?,
|
||||
cache_usage: CacheUsage::from_pairs(&pairs)?,
|
||||
buf_len_disk_io: pairs
|
||||
.get("bufLenDiskIo")
|
||||
.map_or(Ok(None), |k| k.parse().map(|k| Some(k)))?,
|
||||
disk_stats_every: pairs
|
||||
.get("diskStatsEveryKb")
|
||||
.map(|k| k.parse().ok())
|
||||
.unwrap_or(None)
|
||||
.map(ByteSize::from_kb),
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
pub fn patch(&self) -> &PreBinnedPatchCoordEnum {
|
||||
&self.patch
|
||||
}
|
||||
|
||||
pub fn channel(&self) -> &SfDbChannel {
|
||||
&self.channel
|
||||
}
|
||||
|
||||
pub fn scalar_type(&self) -> &ScalarType {
|
||||
&self.scalar_type
|
||||
}
|
||||
|
||||
pub fn shape(&self) -> &Shape {
|
||||
&self.shape
|
||||
}
|
||||
|
||||
pub fn agg_kind(&self) -> &Option<AggKind> {
|
||||
&self.agg_kind
|
||||
}
|
||||
|
||||
pub fn disk_stats_every(&self) -> ByteSize {
|
||||
match &self.disk_stats_every {
|
||||
Some(x) => x.clone(),
|
||||
None => ByteSize(1024 * 1024 * 4),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_usage(&self) -> CacheUsage {
|
||||
self.cache_usage.as_ref().map_or(CacheUsage::Use, |x| x.clone())
|
||||
}
|
||||
|
||||
pub fn buf_len_disk_io(&self) -> usize {
|
||||
self.buf_len_disk_io.unwrap_or(1024 * 8)
|
||||
}
|
||||
}
|
||||
|
||||
impl AppendToUrl for PreBinnedQuery {
|
||||
fn append_to_url(&self, url: &mut Url) {
|
||||
self.patch.append_to_url(url);
|
||||
self.channel.append_to_url(url);
|
||||
self.shape.append_to_url(url);
|
||||
self.scalar_type.append_to_url(url);
|
||||
if let Some(x) = &self.agg_kind {
|
||||
binning_scheme_append_to_url(x, url);
|
||||
}
|
||||
let mut g = url.query_pairs_mut();
|
||||
// TODO add also impl AppendToUrl for these if applicable:
|
||||
if let Some(x) = &self.cache_usage {
|
||||
g.append_pair("cacheUsage", &x.query_param_value());
|
||||
}
|
||||
if let Some(x) = self.buf_len_disk_io {
|
||||
g.append_pair("bufLenDiskIo", &format!("{}", x));
|
||||
}
|
||||
if let Some(x) = &self.disk_stats_every {
|
||||
g.append_pair("diskStatsEveryKb", &format!("{}", x.bytes() / 1024));
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user