Move workspace crates into subfolder

This commit is contained in:
Dominik Werder
2023-07-10 14:45:25 +02:00
parent 8938e55f86
commit 30c7fcb1e5
212 changed files with 246 additions and 41 deletions

View File

@@ -0,0 +1,30 @@
#[derive(Debug)]
pub struct HistoLog2 {
histo: [u64; 16],
sub: usize,
}
impl HistoLog2 {
pub fn new(sub: usize) -> Self {
Self { histo: [0; 16], sub }
}
#[inline]
pub fn ingest(&mut self, mut v: u32) {
let mut po = 0;
while v != 0 && po < 15 {
v = v >> 1;
po += 1;
}
let po = if po >= self.histo.len() + self.sub {
self.histo.len() - 1
} else {
if po > self.sub {
po - self.sub
} else {
0
}
};
self.histo[po] += 1;
}
}

2989
crates/netpod/src/netpod.rs Normal file

File diff suppressed because it is too large Load Diff

333
crates/netpod/src/query.rs Normal file
View File

@@ -0,0 +1,333 @@
pub mod api1;
pub mod datetime;
pub mod prebinned;
use crate::get_url_query_pairs;
use crate::is_false;
use crate::log::*;
use crate::AggKind;
use crate::AppendToUrl;
use crate::ByteSize;
use crate::FromUrl;
use crate::HasBackend;
use crate::HasTimeout;
use crate::NanoRange;
use crate::PulseRange;
use crate::SeriesRange;
use crate::SfDbChannel;
use crate::ToNanos;
use crate::DATETIME_FMT_6MS;
use chrono::DateTime;
use chrono::TimeZone;
use chrono::Utc;
use err::Error;
use serde::Deserialize;
use serde::Serialize;
use std::collections::BTreeMap;
use std::fmt;
use std::time::Duration;
use url::Url;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum CacheUsage {
Use,
Ignore,
Recreate,
}
impl CacheUsage {
pub fn query_param_value(&self) -> String {
match self {
CacheUsage::Use => "use",
CacheUsage::Ignore => "ignore",
CacheUsage::Recreate => "recreate",
}
.into()
}
// Missing query parameter is not an error
pub fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Option<Self>, Error> {
pairs
.get("cacheUsage")
.map(|k| {
if k == "use" {
Ok(Some(CacheUsage::Use))
} else if k == "ignore" {
Ok(Some(CacheUsage::Ignore))
} else if k == "recreate" {
Ok(Some(CacheUsage::Recreate))
} else {
Err(Error::with_msg(format!("unexpected cacheUsage {:?}", k)))?
}
})
.unwrap_or(Ok(None))
}
pub fn from_string(s: &str) -> Result<Self, Error> {
let ret = if s == "ignore" {
CacheUsage::Ignore
} else if s == "recreate" {
CacheUsage::Recreate
} else if s == "use" {
CacheUsage::Use
} else {
return Err(Error::with_msg(format!("can not interpret cache usage string: {}", s)));
};
Ok(ret)
}
}
impl fmt::Display for CacheUsage {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.query_param_value())
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TimeRangeQuery {
range: NanoRange,
}
impl FromUrl for TimeRangeQuery {
fn from_url(url: &Url) -> Result<Self, Error> {
let pairs = get_url_query_pairs(url);
Self::from_pairs(&pairs)
}
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, Error> {
if let (Some(beg), Some(end)) = (pairs.get("begDate"), pairs.get("endDate")) {
let ret = Self {
range: NanoRange {
beg: beg.parse::<DateTime<Utc>>()?.to_nanos(),
end: end.parse::<DateTime<Utc>>()?.to_nanos(),
},
};
Ok(ret)
} else if let (Some(beg), Some(end)) = (pairs.get("begNs"), pairs.get("endNs")) {
let ret = Self {
range: NanoRange {
beg: beg.parse()?,
end: end.parse()?,
},
};
Ok(ret)
} else {
Err(Error::with_public_msg("missing date range"))
}
}
}
impl AppendToUrl for TimeRangeQuery {
fn append_to_url(&self, url: &mut Url) {
let date_fmt = DATETIME_FMT_6MS;
let mut g = url.query_pairs_mut();
g.append_pair(
"begDate",
&Utc.timestamp_nanos(self.range.beg as i64).format(date_fmt).to_string(),
);
g.append_pair(
"endDate",
&Utc.timestamp_nanos(self.range.end as i64).format(date_fmt).to_string(),
);
}
}
impl From<TimeRangeQuery> for NanoRange {
fn from(k: TimeRangeQuery) -> Self {
Self {
beg: k.range.beg,
end: k.range.end,
}
}
}
impl From<&NanoRange> for TimeRangeQuery {
fn from(k: &NanoRange) -> Self {
Self {
range: NanoRange { beg: k.beg, end: k.end },
}
}
}
impl From<&PulseRange> for PulseRangeQuery {
fn from(k: &PulseRange) -> Self {
Self {
range: PulseRange { beg: k.beg, end: k.end },
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PulseRangeQuery {
range: PulseRange,
}
impl FromUrl for PulseRangeQuery {
fn from_url(url: &Url) -> Result<Self, Error> {
let pairs = get_url_query_pairs(url);
Self::from_pairs(&pairs)
}
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, Error> {
if let (Some(beg), Some(end)) = (pairs.get("begPulse"), pairs.get("endPulse")) {
let ret = Self {
range: PulseRange {
beg: beg.parse()?,
end: end.parse()?,
},
};
Ok(ret)
} else {
Err(Error::with_public_msg("missing pulse range"))
}
}
}
impl AppendToUrl for PulseRangeQuery {
fn append_to_url(&self, url: &mut Url) {
let mut g = url.query_pairs_mut();
g.append_pair("begPulse", &self.range.beg.to_string());
g.append_pair("endPulse", &self.range.end.to_string());
}
}
impl From<PulseRangeQuery> for PulseRange {
fn from(k: PulseRangeQuery) -> Self {
Self {
beg: k.range.beg,
end: k.range.end,
}
}
}
pub fn binning_scheme_append_to_url(agg_kind: &AggKind, url: &mut Url) {
let mut g = url.query_pairs_mut();
match agg_kind {
AggKind::EventBlobs => {
g.append_pair("binningScheme", "eventBlobs");
}
AggKind::TimeWeightedScalar => {
g.append_pair("binningScheme", "timeWeightedScalar");
}
AggKind::Plain => {
g.append_pair("binningScheme", "fullValue");
}
AggKind::DimXBins1 => {
g.append_pair("binningScheme", "unweightedScalar");
}
AggKind::DimXBinsN(n) => {
g.append_pair("binningScheme", "binnedX");
g.append_pair("binnedXcount", &format!("{}", n));
}
AggKind::PulseIdDiff => {
g.append_pair("binningScheme", "pulseIdDiff");
}
}
}
// Absent AggKind is not considered an error.
pub fn agg_kind_from_binning_scheme(pairs: &BTreeMap<String, String>) -> Result<Option<AggKind>, Error> {
let key = "binningScheme";
if let Some(s) = pairs.get(key) {
let ret = if s == "eventBlobs" {
AggKind::EventBlobs
} else if s == "fullValue" {
AggKind::Plain
} else if s == "timeWeightedScalar" {
AggKind::TimeWeightedScalar
} else if s == "unweightedScalar" {
AggKind::DimXBins1
} else if s == "binnedX" {
let u = pairs.get("binnedXcount").map_or("1", |k| k).parse()?;
AggKind::DimXBinsN(u)
} else if s == "pulseIdDiff" {
AggKind::PulseIdDiff
} else {
return Err(Error::with_msg("can not extract binningScheme"));
};
Ok(Some(ret))
} else {
Ok(None)
}
}
#[derive(Clone, Debug)]
pub struct ChannelStateEventsQuery {
channel: SfDbChannel,
range: NanoRange,
}
impl ChannelStateEventsQuery {
pub fn new(channel: SfDbChannel, range: NanoRange) -> Self {
Self { channel, range }
}
pub fn range(&self) -> &NanoRange {
&self.range
}
pub fn channel(&self) -> &SfDbChannel {
&self.channel
}
pub fn set_series_id(&mut self, series: u64) {
self.channel.series = Some(series);
}
pub fn channel_mut(&mut self) -> &mut SfDbChannel {
&mut self.channel
}
}
impl HasBackend for ChannelStateEventsQuery {
fn backend(&self) -> &str {
&self.channel.backend
}
}
impl HasTimeout for ChannelStateEventsQuery {
fn timeout(&self) -> Duration {
Duration::from_millis(6000)
}
}
impl FromUrl for ChannelStateEventsQuery {
fn from_url(url: &Url) -> Result<Self, Error> {
let pairs = get_url_query_pairs(url);
Self::from_pairs(&pairs)
}
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, Error> {
let beg_date = pairs.get("begDate").ok_or(Error::with_msg("missing begDate"))?;
let end_date = pairs.get("endDate").ok_or(Error::with_msg("missing endDate"))?;
let ret = Self {
channel: SfDbChannel::from_pairs(&pairs)?,
range: NanoRange {
beg: beg_date.parse::<DateTime<Utc>>()?.to_nanos(),
end: end_date.parse::<DateTime<Utc>>()?.to_nanos(),
},
};
let self_name = std::any::type_name::<Self>();
info!("{self_name}::from_url {ret:?}");
Ok(ret)
}
}
impl AppendToUrl for ChannelStateEventsQuery {
fn append_to_url(&self, url: &mut Url) {
self.channel.append_to_url(url);
let mut g = url.query_pairs_mut();
g.append_pair(
"begDate",
&Utc.timestamp_nanos(self.range.beg as i64)
.format(DATETIME_FMT_6MS)
.to_string(),
);
g.append_pair(
"endDate",
&Utc.timestamp_nanos(self.range.end as i64)
.format(DATETIME_FMT_6MS)
.to_string(),
);
}
}

View File

@@ -0,0 +1,321 @@
use crate::query::datetime::Datetime;
use crate::{DiskIoTune, FileIoBufferSize, ReadSys};
use err::Error;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::time::Duration;
fn bool_true() -> bool {
true
}
fn bool_is_true(x: &bool) -> bool {
*x
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Api1Range {
#[serde(rename = "type", default, skip_serializing_if = "String::is_empty")]
ty: String,
#[serde(rename = "startDate")]
beg: Datetime,
#[serde(rename = "endDate")]
end: Datetime,
}
impl Api1Range {
pub fn new(beg: Datetime, end: Datetime) -> Result<Self, Error> {
let ret = Self {
ty: String::new(),
beg,
end,
};
Ok(ret)
}
pub fn beg(&self) -> &Datetime {
&self.beg
}
pub fn end(&self) -> &Datetime {
&self.end
}
}
#[test]
fn serde_de_range_zulu() {
let s = r#"{"startDate": "2022-11-22T10:15:12.412Z", "endDate": "2022-11-22T10:15:12.413556Z"}"#;
let range: Api1Range = serde_json::from_str(s).unwrap();
assert_eq!(range.beg().offset().local_minus_utc(), 0);
assert_eq!(range.end().offset().local_minus_utc(), 0);
assert_eq!(range.beg().timestamp_subsec_micros(), 412000);
assert_eq!(range.end().timestamp_subsec_micros(), 413556);
}
#[test]
fn serde_de_range_offset() {
let s = r#"{"startDate": "2022-11-22T10:15:12.412Z", "endDate": "2022-11-22T10:15:12.413556Z"}"#;
let range: Api1Range = serde_json::from_str(s).unwrap();
assert_eq!(range.beg().offset().local_minus_utc(), 0);
assert_eq!(range.end().offset().local_minus_utc(), 0);
assert_eq!(range.beg().timestamp_subsec_micros(), 412000);
assert_eq!(range.end().timestamp_subsec_micros(), 413556);
}
#[test]
fn serde_ser_range_offset() {
use chrono::{FixedOffset, NaiveDate, TimeZone};
let beg = FixedOffset::east_opt(60 * 60 * 3)
.unwrap()
.from_local_datetime(
&NaiveDate::from_ymd_opt(2022, 11, 22)
.unwrap()
.and_hms_milli_opt(13, 14, 15, 16)
.unwrap(),
)
.earliest()
.unwrap();
let end = FixedOffset::east_opt(-60 * 60 * 1)
.unwrap()
.from_local_datetime(
&NaiveDate::from_ymd_opt(2022, 11, 22)
.unwrap()
.and_hms_milli_opt(13, 14, 15, 800)
.unwrap(),
)
.earliest()
.unwrap();
let range = Api1Range::new(beg.into(), end.into()).unwrap();
let js = serde_json::to_string(&range).unwrap();
let exp = r#"{"startDate":"2022-11-22T13:14:15.016+03:00","endDate":"2022-11-22T13:14:15.800-01:00"}"#;
assert_eq!(js, exp);
}
#[test]
fn serde_ser_range_01() -> Result<(), Error> {
let beg = Datetime::try_from("2022-11-22T02:03:04Z")?;
let end = Datetime::try_from("2022-11-22T02:03:04.123Z")?;
let range = Api1Range::new(beg, end).unwrap();
let js = serde_json::to_string(&range).unwrap();
let exp = r#"{"startDate":"2022-11-22T02:03:04Z","endDate":"2022-11-22T02:03:04.123Z"}"#;
assert_eq!(js, exp);
Ok(())
}
#[test]
fn serde_ser_range_02() -> Result<(), Error> {
let beg = Datetime::try_from("2022-11-22T02:03:04.987654Z")?;
let end = Datetime::try_from("2022-11-22T02:03:04.777000Z")?;
let range = Api1Range::new(beg, end).unwrap();
let js = serde_json::to_string(&range).unwrap();
let exp = r#"{"startDate":"2022-11-22T02:03:04.987654Z","endDate":"2022-11-22T02:03:04.777Z"}"#;
assert_eq!(js, exp);
Ok(())
}
/// In Api1, the list of channels consists of either `BACKEND/CHANNELNAME`
/// or just `CHANNELNAME`.
#[derive(Debug, PartialEq)]
pub struct ChannelTuple {
backend: Option<String>,
name: String,
}
impl ChannelTuple {
pub fn new(backend: String, name: String) -> Self {
Self {
backend: Some(backend),
name,
}
}
pub fn from_name(name: String) -> Self {
Self { backend: None, name }
}
pub fn backend(&self) -> Option<&String> {
self.backend.as_ref()
}
pub fn name(&self) -> &str {
&self.name
}
}
mod serde_channel_tuple {
use super::*;
use serde::de::{Deserialize, Deserializer, Visitor};
use serde::ser::{Serialize, Serializer};
impl Serialize for ChannelTuple {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(backend) = self.backend.as_ref() {
serializer.serialize_str(&format!("{}/{}", backend, self.name))
} else {
serializer.serialize_str(&self.name)
}
}
}
struct Vis;
impl<'de> Visitor<'de> for Vis {
type Value = ChannelTuple;
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "[Backendname/]Channelname")
}
fn visit_str<E>(self, val: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let mut it = val.split("/");
// Even empty string splits into one element of empty string
let s0 = it.next().unwrap();
if let Some(s1) = it.next() {
let ret = ChannelTuple {
backend: Some(s0.into()),
name: s1.into(),
};
Ok(ret)
} else {
let ret = ChannelTuple {
backend: None,
name: s0.into(),
};
Ok(ret)
}
}
}
impl<'de> Deserialize<'de> for ChannelTuple {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(Vis)
}
}
#[test]
fn ser_name() {
let x = ChannelTuple {
backend: None,
name: "temperature".into(),
};
let js = serde_json::to_string(&x).unwrap();
assert_eq!(js, r#""temperature""#);
}
#[test]
fn ser_backend_name() {
let x = ChannelTuple {
backend: Some("beach".into()),
name: "temperature".into(),
};
let js = serde_json::to_string(&x).unwrap();
assert_eq!(js, r#""beach/temperature""#);
}
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Api1Query {
range: Api1Range,
channels: Vec<ChannelTuple>,
#[serde(default, skip_serializing_if = "Option::is_none")]
timeout: Option<Duration>,
// All following parameters are private and not to be used
#[serde(default, skip_serializing_if = "Option::is_none")]
file_io_buffer_size: Option<FileIoBufferSize>,
#[serde(default = "bool_true", skip_serializing_if = "bool_is_true")]
decompress: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
events_max: Option<u64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
io_queue_len: Option<u32>,
#[serde(default, skip_serializing_if = "String::is_empty")]
log_level: String,
#[serde(default, skip_serializing_if = "String::is_empty")]
read_sys: String,
}
impl Api1Query {
pub fn new(range: Api1Range, channels: Vec<ChannelTuple>) -> Self {
Self {
range,
channels,
timeout: None,
decompress: true,
events_max: None,
file_io_buffer_size: None,
io_queue_len: None,
log_level: String::new(),
read_sys: String::new(),
}
}
pub fn disk_io_tune(&self) -> DiskIoTune {
let mut k = DiskIoTune::default();
if let Some(x) = &self.file_io_buffer_size {
k.read_buffer_len = x.0;
}
if let Some(io_queue_len) = self.io_queue_len {
k.read_queue_len = io_queue_len as usize;
}
let read_sys: ReadSys = self.read_sys.as_str().into();
k.read_sys = read_sys;
k
}
pub fn range(&self) -> &Api1Range {
&self.range
}
pub fn channels(&self) -> &[ChannelTuple] {
&self.channels
}
pub fn timeout(&self) -> Option<Duration> {
self.timeout
}
pub fn timeout_or_default(&self) -> Duration {
Duration::from_secs(60 * 30)
}
pub fn log_level(&self) -> &str {
&self.log_level
}
pub fn decompress(&self) -> bool {
self.decompress
}
pub fn events_max(&self) -> Option<u64> {
self.events_max
}
pub fn set_decompress(&mut self, v: bool) {
self.decompress = v;
}
}
#[test]
fn serde_api1_query() -> Result<(), Error> {
let beg = Datetime::try_from("2022-11-22T08:09:10Z")?;
let end = Datetime::try_from("2022-11-23T08:11:05.455009+02:00")?;
let range = Api1Range::new(beg, end).unwrap();
let ch0 = ChannelTuple::from_name("nameonly".into());
let ch1 = ChannelTuple::new("somebackend".into(), "somechan".into());
let qu = Api1Query::new(range, vec![ch0, ch1]);
let js = serde_json::to_string(&qu).unwrap();
assert_eq!(
js,
r#"{"range":{"startDate":"2022-11-22T08:09:10Z","endDate":"2022-11-23T08:11:05.455009+02:00"},"channels":["nameonly","somebackend/somechan"]}"#
);
Ok(())
}

View File

@@ -0,0 +1,191 @@
use chrono::DateTime;
use chrono::FixedOffset;
use err::Error;
use serde::de::Visitor;
use serde::Deserialize;
use serde::Serialize;
use std::fmt;
use std::ops;
#[derive(Clone, Debug, PartialEq)]
pub struct Datetime(DateTime<FixedOffset>);
impl From<DateTime<FixedOffset>> for Datetime {
fn from(x: DateTime<FixedOffset>) -> Self {
Datetime(x)
}
}
impl TryFrom<&str> for Datetime {
type Error = Error;
fn try_from(val: &str) -> Result<Self, Self::Error> {
let dt =
DateTime::<FixedOffset>::parse_from_rfc3339(val).map_err(|e| Error::with_msg_no_trace(format!("{e}")))?;
Ok(Datetime(dt))
}
}
impl ops::Deref for Datetime {
type Target = DateTime<FixedOffset>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
// RFC 3339 (subset of ISO 8601)
impl Serialize for Datetime {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use fmt::Write;
use serde::ser::Error;
let val = &self.0;
let mut s = String::with_capacity(64);
write!(&mut s, "{}", val.format("%Y-%m-%dT%H:%M:%S")).map_err(|_| Error::custom("fmt"))?;
let ns = val.timestamp_subsec_nanos();
let mus = val.timestamp_subsec_micros();
if ns % 1000 != 0 {
write!(&mut s, "{}", val.format(".%9f")).map_err(|_| Error::custom("fmt"))?;
} else if mus % 1000 != 0 {
write!(&mut s, "{}", val.format(".%6f")).map_err(|_| Error::custom("fmt"))?;
} else if mus != 0 {
write!(&mut s, "{}", val.format(".%3f")).map_err(|_| Error::custom("fmt"))?;
}
if val.offset().local_minus_utc() == 0 {
write!(&mut s, "Z").map_err(|_| Error::custom("fmt"))?;
} else {
write!(&mut s, "{}", val.format("%:z")).map_err(|_| Error::custom("fmt"))?;
}
serializer.collect_str(&s)
}
}
mod ser_impl_2 {
use super::Datetime;
use crate::DATETIME_FMT_0MS;
use crate::DATETIME_FMT_3MS;
use crate::DATETIME_FMT_6MS;
use crate::DATETIME_FMT_9MS;
use fmt::Write;
use serde::ser::Error;
use std::fmt;
#[allow(unused)]
fn serialize<S>(obj: &Datetime, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let val = &obj.0;
let mut s = String::with_capacity(64);
write!(&mut s, "{}", val.format("%Y-%m-%dT%H:%M:%S")).map_err(|_| Error::custom("fmt"))?;
let ns = val.timestamp_subsec_nanos();
let s = if ns % 1000 != 0 {
val.format(DATETIME_FMT_9MS)
} else {
let mus = val.timestamp_subsec_micros();
if mus % 1000 != 0 {
val.format(DATETIME_FMT_6MS)
} else {
let ms = val.timestamp_subsec_millis();
if ms != 0 {
val.format(DATETIME_FMT_3MS)
} else {
val.format(DATETIME_FMT_0MS)
}
}
};
serializer.collect_str(&s)
}
}
struct Vis1;
impl<'de> Visitor<'de> for Vis1 {
type Value = Datetime;
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "Datetime")
}
fn visit_str<E>(self, val: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Datetime::try_from(val).map_err(|e| serde::de::Error::custom(format!("{e}")))
}
}
impl<'de> Deserialize<'de> for Datetime {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_str(Vis1)
}
}
#[test]
fn ser_00() {
use chrono::TimeZone;
let x = FixedOffset::east_opt(0)
.unwrap()
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
.earliest()
.unwrap();
let x = Datetime(x);
let s = serde_json::to_string(&x).unwrap();
assert_eq!(s, r#""2023-02-03T15:12:40Z""#);
}
#[test]
fn ser_01() {
use chrono::TimeZone;
let x = FixedOffset::east_opt(0)
.unwrap()
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
.earliest()
.unwrap()
.checked_add_signed(chrono::Duration::milliseconds(876))
.unwrap();
let x = Datetime(x);
let s = serde_json::to_string(&x).unwrap();
assert_eq!(s, r#""2023-02-03T15:12:40.876Z""#);
}
#[test]
fn ser_02() {
use chrono::TimeZone;
let x = FixedOffset::east_opt(0)
.unwrap()
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
.earliest()
.unwrap()
.checked_add_signed(chrono::Duration::nanoseconds(543430000))
.unwrap();
let x = Datetime(x);
let s = serde_json::to_string(&x).unwrap();
assert_eq!(s, r#""2023-02-03T15:12:40.543430Z""#);
}
#[test]
fn ser_03() {
use chrono::TimeZone;
let x = FixedOffset::east_opt(0)
.unwrap()
.with_ymd_and_hms(2023, 2, 3, 15, 12, 40)
.earliest()
.unwrap()
.checked_add_signed(chrono::Duration::nanoseconds(543432321))
.unwrap();
let x = Datetime(x);
let s = serde_json::to_string(&x).unwrap();
assert_eq!(s, r#""2023-02-03T15:12:40.543432321Z""#);
}

View File

@@ -0,0 +1,141 @@
use super::agg_kind_from_binning_scheme;
use super::binning_scheme_append_to_url;
use super::CacheUsage;
use crate::AggKind;
use crate::AppendToUrl;
use crate::ByteSize;
use crate::FromUrl;
use crate::PreBinnedPatchCoordEnum;
use crate::ScalarType;
use crate::SfDbChannel;
use crate::Shape;
use err::Error;
use std::collections::BTreeMap;
use url::Url;
#[derive(Clone, Debug)]
pub struct PreBinnedQuery {
patch: PreBinnedPatchCoordEnum,
channel: SfDbChannel,
scalar_type: ScalarType,
shape: Shape,
agg_kind: Option<AggKind>,
cache_usage: Option<CacheUsage>,
buf_len_disk_io: Option<usize>,
disk_stats_every: Option<ByteSize>,
}
impl PreBinnedQuery {
pub fn new(
patch: PreBinnedPatchCoordEnum,
channel: SfDbChannel,
scalar_type: ScalarType,
shape: Shape,
agg_kind: Option<AggKind>,
cache_usage: Option<CacheUsage>,
buf_len_disk_io: Option<usize>,
disk_stats_every: Option<ByteSize>,
) -> Self {
Self {
patch,
channel,
scalar_type,
shape,
agg_kind,
cache_usage,
buf_len_disk_io,
disk_stats_every,
}
}
pub fn from_url(url: &Url) -> Result<Self, Error> {
let mut pairs = BTreeMap::new();
for (j, k) in url.query_pairs() {
pairs.insert(j.to_string(), k.to_string());
}
let pairs = pairs;
let scalar_type = pairs
.get("scalarType")
.ok_or_else(|| Error::with_msg("missing scalarType"))
.map(|x| ScalarType::from_url_str(&x))??;
let shape = pairs
.get("shape")
.ok_or_else(|| Error::with_msg("missing shape"))
.map(|x| Shape::from_url_str(&x))??;
let ret = Self {
patch: PreBinnedPatchCoordEnum::from_pairs(&pairs)?,
channel: SfDbChannel::from_pairs(&pairs)?,
scalar_type,
shape,
agg_kind: agg_kind_from_binning_scheme(&pairs)?,
cache_usage: CacheUsage::from_pairs(&pairs)?,
buf_len_disk_io: pairs
.get("bufLenDiskIo")
.map_or(Ok(None), |k| k.parse().map(|k| Some(k)))?,
disk_stats_every: pairs
.get("diskStatsEveryKb")
.map(|k| k.parse().ok())
.unwrap_or(None)
.map(ByteSize::from_kb),
};
Ok(ret)
}
pub fn patch(&self) -> &PreBinnedPatchCoordEnum {
&self.patch
}
pub fn channel(&self) -> &SfDbChannel {
&self.channel
}
pub fn scalar_type(&self) -> &ScalarType {
&self.scalar_type
}
pub fn shape(&self) -> &Shape {
&self.shape
}
pub fn agg_kind(&self) -> &Option<AggKind> {
&self.agg_kind
}
pub fn disk_stats_every(&self) -> ByteSize {
match &self.disk_stats_every {
Some(x) => x.clone(),
None => ByteSize(1024 * 1024 * 4),
}
}
pub fn cache_usage(&self) -> CacheUsage {
self.cache_usage.as_ref().map_or(CacheUsage::Use, |x| x.clone())
}
pub fn buf_len_disk_io(&self) -> usize {
self.buf_len_disk_io.unwrap_or(1024 * 8)
}
}
impl AppendToUrl for PreBinnedQuery {
fn append_to_url(&self, url: &mut Url) {
self.patch.append_to_url(url);
self.channel.append_to_url(url);
self.shape.append_to_url(url);
self.scalar_type.append_to_url(url);
if let Some(x) = &self.agg_kind {
binning_scheme_append_to_url(x, url);
}
let mut g = url.query_pairs_mut();
// TODO add also impl AppendToUrl for these if applicable:
if let Some(x) = &self.cache_usage {
g.append_pair("cacheUsage", &x.query_param_value());
}
if let Some(x) = self.buf_len_disk_io {
g.append_pair("bufLenDiskIo", &format!("{}", x));
}
if let Some(x) = &self.disk_stats_every {
g.append_pair("diskStatsEveryKb", &format!("{}", x.bytes() / 1024));
}
}
}

View File

@@ -0,0 +1,2 @@
pub mod binrange;
pub mod evrange;

View File

@@ -0,0 +1,86 @@
use super::evrange::NanoRange;
use super::evrange::SeriesRange;
use crate::timeunits::SEC;
use crate::BinnedRangeEnum;
use crate::Dim0Kind;
use crate::TsNano;
use chrono::DateTime;
use chrono::Utc;
#[test]
fn test_binned_range_covering_00() {
let range = SeriesRange::TimeRange(NanoRange::from_date_time(
DateTime::parse_from_rfc3339("1970-01-01T10:10:00Z").unwrap().into(),
DateTime::parse_from_rfc3339("1970-01-01T10:20:00Z").unwrap().into(),
));
let r = BinnedRangeEnum::covering_range(range, 9).unwrap();
assert_eq!(r.bin_count(), 10);
if let Dim0Kind::Time = r.dim0kind() {
} else {
panic!()
}
let r2 = r.binned_range_time();
let a = r2.edges();
assert_eq!(a.len(), 1 + r.bin_count() as usize);
assert_eq!(a[0], TsNano((((10 * 60) + 10) * 60 + 0) * SEC));
assert_eq!(a[1], TsNano((((10 * 60) + 11) * 60 + 0) * SEC));
assert_eq!(a[10], TsNano((((10 * 60) + 20) * 60 + 0) * SEC));
let x = r.range_at(2).unwrap();
let y = SeriesRange::TimeRange(NanoRange {
beg: (((10 * 60) + 12) * 60 + 0) * SEC,
end: (((10 * 60) + 13) * 60 + 0) * SEC,
});
assert_eq!(x, y);
}
#[test]
fn test_binned_range_covering_01() {
let range = SeriesRange::TimeRange(NanoRange::from_date_time(
DateTime::parse_from_rfc3339("1970-01-01T00:20:04Z").unwrap().into(),
DateTime::parse_from_rfc3339("1970-01-01T00:21:10Z").unwrap().into(),
));
let r = BinnedRangeEnum::covering_range(range, 9).unwrap();
assert_eq!(r.bin_count(), 14);
if let Dim0Kind::Time = r.dim0kind() {
} else {
panic!()
}
let r2 = r.binned_range_time();
let a = r2.edges();
assert_eq!(a.len(), 1 + r.bin_count() as usize);
assert_eq!(a[0], TsNano((((0 * 60) + 20) * 60 + 0) * SEC));
assert_eq!(a[1], TsNano((((0 * 60) + 20) * 60 + 5) * SEC));
assert_eq!(a[14], TsNano((((0 * 60) + 21) * 60 + 10) * SEC));
let x = r.range_at(0).unwrap();
let y = SeriesRange::TimeRange(NanoRange {
beg: (((0 * 60) + 20) * 60 + 0) * SEC,
end: (((0 * 60) + 20) * 60 + 5) * SEC,
});
assert_eq!(x, y);
}
#[test]
fn test_binned_range_covering_02() {
let range = SeriesRange::TimeRange(NanoRange::from_date_time(
DateTime::parse_from_rfc3339("1970-01-01T00:20:04Z").unwrap().into(),
DateTime::parse_from_rfc3339("1970-01-01T00:22:10Z").unwrap().into(),
));
let r = BinnedRangeEnum::covering_range(range, 25).unwrap();
assert_eq!(r.bin_count(), 26);
if let Dim0Kind::Time = r.dim0kind() {
} else {
panic!()
}
let r2 = r.binned_range_time();
let a = r2.edges();
assert_eq!(a.len(), 1 + r.bin_count() as usize);
assert_eq!(a[0], TsNano((((0 * 60) + 20) * 60 + 0) * SEC));
assert_eq!(a[1], TsNano((((0 * 60) + 20) * 60 + 5) * SEC));
assert_eq!(a[14], TsNano((((0 * 60) + 21) * 60 + 10) * SEC));
let x = r.range_at(0).unwrap();
let y = SeriesRange::TimeRange(NanoRange {
beg: (((0 * 60) + 20) * 60 + 0) * SEC,
end: (((0 * 60) + 20) * 60 + 5) * SEC,
});
assert_eq!(x, y);
}

View File

@@ -0,0 +1,151 @@
use crate::timeunits::SEC;
use crate::Dim0Kind;
use crate::TsNano;
use chrono::DateTime;
use chrono::TimeZone;
use chrono::Utc;
use err::Error;
use serde::Deserialize;
use serde::Serialize;
use std::fmt;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum TimeRange {
Time { beg: DateTime<Utc>, end: DateTime<Utc> },
Pulse { beg: u64, end: u64 },
Nano { beg: u64, end: u64 },
}
#[derive(Clone, Serialize, Deserialize, PartialEq)]
pub struct NanoRange {
pub beg: u64,
pub end: u64,
}
impl fmt::Debug for NanoRange {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if true {
let beg = TsNano(self.beg);
let end = TsNano(self.end);
f.debug_struct("NanoRange")
.field("beg", &beg)
.field("end", &end)
.finish()
} else {
let beg = chrono::Utc
.timestamp_opt((self.beg / SEC) as i64, (self.beg % SEC) as u32)
.earliest();
let end = chrono::Utc
.timestamp_opt((self.end / SEC) as i64, (self.end % SEC) as u32)
.earliest();
if let (Some(a), Some(b)) = (beg, end) {
f.debug_struct("NanoRange").field("beg", &a).field("end", &b).finish()
} else {
f.debug_struct("NanoRange")
.field("beg", &beg)
.field("end", &end)
.finish()
}
}
}
}
impl NanoRange {
pub fn from_date_time(beg: DateTime<Utc>, end: DateTime<Utc>) -> Self {
Self {
beg: beg.timestamp_nanos() as u64,
end: end.timestamp_nanos() as u64,
}
}
pub fn delta(&self) -> u64 {
self.end - self.beg
}
pub fn beg(&self) -> u64 {
self.beg
}
pub fn end(&self) -> u64 {
self.end
}
}
impl TryFrom<&SeriesRange> for NanoRange {
type Error = Error;
fn try_from(val: &SeriesRange) -> Result<NanoRange, Self::Error> {
match val {
SeriesRange::TimeRange(x) => Ok(x.clone()),
SeriesRange::PulseRange(_) => Err(Error::with_msg_no_trace("not a Time range")),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct PulseRange {
pub beg: u64,
pub end: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum SeriesRange {
TimeRange(NanoRange),
PulseRange(PulseRange),
}
impl SeriesRange {
pub fn dim0kind(&self) -> Dim0Kind {
match self {
SeriesRange::TimeRange(_) => Dim0Kind::Time,
SeriesRange::PulseRange(_) => Dim0Kind::Pulse,
}
}
pub fn is_time(&self) -> bool {
match self {
SeriesRange::TimeRange(_) => true,
SeriesRange::PulseRange(_) => false,
}
}
pub fn is_pulse(&self) -> bool {
match self {
SeriesRange::TimeRange(_) => false,
SeriesRange::PulseRange(_) => true,
}
}
pub fn beg_u64(&self) -> u64 {
match self {
SeriesRange::TimeRange(x) => x.beg,
SeriesRange::PulseRange(x) => x.beg,
}
}
pub fn end_u64(&self) -> u64 {
match self {
SeriesRange::TimeRange(x) => x.end,
SeriesRange::PulseRange(x) => x.end,
}
}
pub fn delta_u64(&self) -> u64 {
match self {
SeriesRange::TimeRange(x) => x.end - x.beg,
SeriesRange::PulseRange(x) => x.end - x.beg,
}
}
}
impl From<NanoRange> for SeriesRange {
fn from(k: NanoRange) -> Self {
Self::TimeRange(k)
}
}
impl From<PulseRange> for SeriesRange {
fn from(k: PulseRange) -> Self {
Self::PulseRange(k)
}
}

View File

@@ -0,0 +1,4 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct SystemStats {}

View File

@@ -0,0 +1,71 @@
use err::Error;
use futures_util::{Stream, StreamExt};
use std::pin::Pin;
use std::task::{Context, Poll};
pub struct SCC<S>
where
S: Stream,
{
inp: S,
errored: bool,
completed: bool,
}
impl<S> SCC<S>
where
S: Stream,
{
pub fn new(inp: S) -> Self {
Self {
inp,
errored: false,
completed: false,
}
}
}
impl<S, I> Stream for SCC<S>
where
S: Stream<Item = Result<I, Error>> + Unpin,
{
type Item = <S as Stream>::Item;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
use Poll::*;
if self.completed {
panic!("SCC poll_next on completed");
} else if self.errored {
self.completed = true;
Ready(None)
} else {
match self.inp.poll_next_unpin(cx) {
Ready(Some(Ok(k))) => Ready(Some(Ok(k))),
Ready(Some(Err(e))) => {
self.errored = true;
Ready(Some(Err(e)))
}
Ready(None) => {
self.completed = true;
Ready(None)
}
Pending => Pending,
}
}
}
}
pub trait IntoSCC<S>
where
S: Stream,
{
fn into_scc(self) -> SCC<S>;
}
impl<S> IntoSCC<S> for S
where
S: Stream,
{
fn into_scc(self) -> SCC<S> {
SCC::new(self)
}
}