Moved err crate

This commit is contained in:
Dominik Werder
2024-11-07 18:26:02 +01:00
parent 8fd7e72796
commit 2f89c969cd
124 changed files with 191 additions and 738 deletions

View File

@@ -17,7 +17,7 @@ chrono = "0.4"
async-channel = "1.9.0"
parking_lot = "0.12"
crc32fast = "1.2"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
taskrun = { path = "../taskrun" }
netpod = { path = "../netpod" }
items_0 = { path = "../items_0" }

View File

@@ -1,6 +1,7 @@
pub mod ringbuf;
use async_channel::Sender;
use daqbuf_err as err;
use err::ErrStr;
use err::Error;
use futures_util::StreamExt;

View File

@@ -1,6 +1,7 @@
use crate::read;
use crate::seek;
use crate::StatsChannel;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use std::borrow::BorrowMut;

View File

@@ -7,6 +7,6 @@ edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
taskrun = { path = "../taskrun" }
redis = { version = "0.26.1", features = [] }

View File

@@ -14,7 +14,7 @@ serde_yaml = "0.9.27"
chrono = "0.4.31"
url = "2.5.0"
clap = { version = "4.5.7", features = ["derive", "cargo"] }
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
taskrun = { path = "../taskrun" }
netpod = { path = "../netpod" }
disk = { path = "../disk" }

View File

@@ -2,10 +2,10 @@ use chrono::DateTime;
use chrono::Duration;
use chrono::Utc;
use clap::Parser;
use daqbuf_err::Error;
use daqbuffer::cli::ClientType;
use daqbuffer::cli::Opts;
use daqbuffer::cli::SubCmd;
use err::Error;
use netpod::log::*;
use netpod::query::CacheUsage;
use netpod::NodeConfig;

View File

@@ -1,14 +1,14 @@
pub trait ErrConv<T> {
fn ec(self) -> Result<T, ::err::Error>;
fn ec(self) -> Result<T, daqbuf_err::Error>;
}
pub trait Convable: ToString {}
impl<T, E: Convable> ErrConv<T> for Result<T, E> {
fn ec(self) -> Result<T, err::Error> {
fn ec(self) -> Result<T, daqbuf_err::Error> {
match self {
Ok(x) => Ok(x),
Err(e) => Err(::err::Error::from_string(e.to_string())),
Err(e) => Err(daqbuf_err::Error::from_string(e.to_string())),
}
}
}

View File

@@ -1,5 +1,5 @@
use err::thiserror;
use err::ThisError;
use daqbuf_err::thiserror;
use daqbuf_err::ThisError;
use futures_util::future;
use futures_util::StreamExt;
use http::header;

View File

@@ -21,7 +21,7 @@ serde_json = "1.0"
chrono = "0.4"
url = "2.2.2"
lazy_static = "1.4.0"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
taskrun = { path = "../taskrun" }
netpod = { path = "../netpod" }
query = { path = "../query" }

View File

@@ -1,8 +1,8 @@
use crate::err::ErrConv;
use chrono::DateTime;
use chrono::Utc;
use daqbuf_err::Error;
use disk::streamlog::Streamlog;
use err::Error;
use futures_util::TryStreamExt;
use http::StatusCode;
use http::Uri;
@@ -118,7 +118,7 @@ pub async fn get_binned(
StreamItem::DataItem(_frame) => {
// TODO
// The expected type nowadays depends on the channel and agg-kind.
err::todo();
daqbuf_err::todo();
Some(Ok(()))
}
},

View File

@@ -4,7 +4,7 @@ pub mod nodes;
#[cfg(test)]
pub mod test;
use ::err::Error;
use daqbuf_err::Error;
use futures_util::TryFutureExt;
use netpod::Cluster;
use netpod::NodeConfig;

View File

@@ -1,14 +1,14 @@
pub trait ErrConv<T> {
fn ec(self) -> Result<T, ::err::Error>;
fn ec(self) -> Result<T, daqbuf_err::Error>;
}
pub trait Convable: ToString {}
impl<T, E: Convable> ErrConv<T> for Result<T, E> {
fn ec(self) -> Result<T, ::err::Error> {
fn ec(self) -> Result<T, daqbuf_err::Error> {
match self {
Ok(x) => Ok(x),
Err(e) => Err(::err::Error::from_string(e.to_string())),
Err(e) => Err(daqbuf_err::Error::from_string(e.to_string())),
}
}
}

View File

@@ -1,5 +1,5 @@
use crate::spawn_test_hosts;
use err::Error;
use daqbuf_err::Error;
use netpod::log::*;
use netpod::Cluster;
use std::sync::{Arc, Mutex};

View File

@@ -1,7 +1,7 @@
mod data_api_python;
use crate::nodes::require_test_hosts_running;
use err::Error;
use daqbuf_err::Error;
use futures_util::Future;
use httpclient::http_post;
use netpod::log::*;

View File

@@ -1,6 +1,6 @@
use crate::nodes::require_test_hosts_running;
use chrono::Utc;
use err::Error;
use daqbuf_err::Error;
use netpod::log::*;
use netpod::range::evrange::NanoRange;
use netpod::timeunits::MS;

View File

@@ -1,6 +1,6 @@
use crate::nodes::require_test_hosts_running;
use chrono::Utc;
use err::Error;
use daqbuf_err::Error;
use items_0::test::f32_iter_cmp_near;
use items_0::test::f64_iter_cmp_near;
use items_0::WithLen;

View File

@@ -1,5 +1,5 @@
use chrono::Utc;
use err::Error;
use daqbuf_err::Error;
use netpod::log::*;
use netpod::AppendToUrl;
use netpod::Cluster;

View File

@@ -1,7 +1,7 @@
use crate::nodes::require_test_hosts_running;
use crate::test::api4::common::fetch_events_json;
use chrono::Utc;
use err::Error;
use daqbuf_err::Error;
use items_0::WithLen;
use items_2::eventsdim0::EventsDim0CollectorOutput;
use netpod::log::*;

View File

@@ -1,6 +1,6 @@
use crate::nodes::require_test_hosts_running;
use crate::test::api4::common::fetch_events_json;
use err::Error;
use daqbuf_err::Error;
use items_0::test::f32_iter_cmp_near;
use items_0::WithLen;
use items_2::eventsdim0::EventsDim0CollectorOutput;

View File

@@ -1,6 +1,6 @@
#![allow(unused)]
use crate::nodes::require_archapp_test_host_running;
use err::Error;
use daqbuf_err::Error;
use netpod::f64_close;
use netpod::log::*;

View File

@@ -1,6 +1,6 @@
mod channelarchiver;
use err::Error;
use daqbuf_err::Error;
#[test]
fn get_sls_archive_1() -> Result<(), Error> {

View File

@@ -1,6 +1,6 @@
use chrono::DateTime;
use chrono::Utc;
use err::Error;
use daqbuf_err::Error;
use netpod::log::*;
use netpod::query::CacheUsage;
use netpod::range::evrange::NanoRange;

View File

@@ -10,7 +10,7 @@ path = "src/dbconn.rs"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio-postgres = { version = "0.7.10", features = ["with-chrono-0_4", "with-serde_json-1"] }
tokio-postgres = { version = "0.7.12", features = ["with-chrono-0_4", "with-serde_json-1"] }
crc32fast = "1.3.2"
byteorder = "1.4"
futures-util = "0.3.30"
@@ -20,7 +20,7 @@ pin-project = "1"
async-channel = "1.9.0"
chrono = "0.4.38"
regex = "1.10.4"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
netpod = { path = "../netpod" }
parse = { path = "../parse" }
taskrun = { path = "../taskrun" }

View File

@@ -1,5 +1,6 @@
use chrono::DateTime;
use chrono::Utc;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use netpod::log::*;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use netpod::ScalarType;

View File

@@ -12,6 +12,7 @@ pub mod pg {
pub use tokio_postgres::Statement;
}
use daqbuf_err as err;
use err::anyhow;
use err::thiserror;
use err::Error;

View File

@@ -6,6 +6,7 @@ use async_channel::bounded;
use async_channel::Receiver;
use chrono::DateTime;
use chrono::Utc;
use daqbuf_err as err;
use err::Error;
use futures_util::FutureExt;
use futures_util::Stream;

View File

@@ -1,6 +1,7 @@
use crate::create_connection;
use crate::worker::PgQueue;
use crate::ErrConv;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use netpod::ChannelArchiver;

View File

@@ -2,6 +2,7 @@ use crate::create_connection;
use async_channel::Receiver;
use async_channel::RecvError;
use async_channel::Sender;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use netpod::log::*;

View File

@@ -30,7 +30,7 @@ num-traits = "0.2.14"
num-derive = "0.4.0"
url = "2.5.0"
tiny-keccak = { version = "2.0", features = ["sha3"] }
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
taskrun = { path = "../taskrun" }
netpod = { path = "../netpod" }
query = { path = "../query" }

View File

@@ -2,6 +2,7 @@ use crate::eventchunker::EventChunkerConf;
use crate::eventchunkermultifile::EventChunkerMultifile;
use crate::AggQuerySingleChannel;
use crate::SfDbChConf;
use daqbuf_err as err;
use err::Error;
use netpod::range::evrange::NanoRange;
use netpod::test_data_base_path_databuffer;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::Error;
use futures_util::Stream;
use futures_util::StreamExt;

View File

@@ -1,4 +1,5 @@
use crate::SfDbChConf;
use daqbuf_err as err;
use err::*;
#[allow(unused)]
use netpod::log::*;

View File

@@ -1,5 +1,6 @@
use super::paths;
use bytes::BytesMut;
use daqbuf_err as err;
use err::ErrStr;
use err::Error;
use futures_util::StreamExt;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::Error;
use futures_util::Stream;
use futures_util::StreamExt;

View File

@@ -22,6 +22,7 @@ pub use parse;
use async_channel::Receiver;
use async_channel::Sender;
use bytes::BytesMut;
use daqbuf_err as err;
use err::Error;
use futures_util::future::FusedFuture;
use futures_util::FutureExt;

View File

@@ -1,5 +1,6 @@
use bytes::Buf;
use bytes::BytesMut;
use daqbuf_err as err;
use err::thiserror;
use err::Error;
use err::ThisError;

View File

@@ -3,6 +3,7 @@ use crate::dataopen::open_files;
use crate::dataopen::OpenedFileSet;
use crate::eventchunker::EventChunker;
use crate::eventchunker::EventChunkerConf;
use daqbuf_err as err;
use err::Error;
use futures_util::Stream;
use futures_util::StreamExt;

View File

@@ -3,6 +3,7 @@ use crate::SfDbChConf;
use bitshuffle::bitshuffle_compress;
use bytes::BufMut;
use bytes::BytesMut;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use netpod::timeunits::*;

View File

@@ -1,4 +1,5 @@
use arrayref::array_ref;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use netpod::range::evrange::NanoRange;

View File

@@ -1,4 +1,5 @@
use crate::SfDbChConf;
use daqbuf_err as err;
use err::Error;
use futures_util::StreamExt;
use netpod::timeunits::MS;

View File

@@ -2,6 +2,7 @@ use crate::eventchunker::EventChunkerConf;
use crate::eventchunkermultifile::EventChunkerMultifile;
use crate::raw::generated::EventBlobsGeneratorI32Test00;
use crate::raw::generated::EventBlobsGeneratorI32Test01;
use daqbuf_err as err;
use err::Error;
use futures_util::stream;
use futures_util::Stream;

View File

@@ -1,4 +1,5 @@
use bytes::BytesMut;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use std::os::unix::prelude::RawFd;

View File

@@ -1,4 +1,5 @@
use bytes::BytesMut;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use std::os::unix::prelude::RawFd;

View File

@@ -8,12 +8,12 @@ edition = "2021"
path = "src/dq.rs"
[dependencies]
tokio = { version = "1.21.1", features = ["rt-multi-thread", "io-util", "net", "time", "sync", "fs"] }
tokio = { version = "1.41.1", features = ["rt-multi-thread", "io-util", "net", "time", "sync", "fs"] }
futures-util = "0.3.14"
clap = { version = "4.0", features = ["derive", "cargo"] }
chrono = "0.4.19"
bytes = "1.0.1"
err = { path = "../err" }
bytes = "1.7"
daqbuf-err = { path = "../../../daqbuf-err" }
taskrun = { path = "../taskrun" }
netpod = { path = "../netpod" }
parse = { path = "../parse" }

View File

@@ -1,4 +1,5 @@
use clap::{ArgAction, Parser};
use daqbuf_err as err;
use err::Error;
use netpod::timeunits::*;
use std::path::PathBuf;

View File

@@ -1,5 +1,6 @@
use clap::ArgAction;
use clap::Parser;
use daqbuf_err as err;
use disk::eventchunker::EventChunker;
use disk::eventchunker::EventChunkerConf;
use err::Error;

View File

@@ -1,28 +0,0 @@
[package]
name = "err"
version = "0.0.5"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"
[lib]
doctest = false
[dependencies]
backtrace = "0.3.68"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_cbor = "0.11.2"
rmp-serde = "1.1.1"
async-channel = "1.9.0"
async_channel_2 = { package = "async-channel", version = "2.0.0" }
chrono = { version = "0.4.26", features = ["serde"] }
url = "2.4.0"
regex = "1.9.1"
http = "1.0.0"
#hyper = "1.0.1"
thiserror = "=0.0.1"
anyhow = "1.0"
#tokio = "1"
[patch.crates-io]
thiserror = { git = "https://github.com/dominikwerder/thiserror.git", branch = "cstm" }

View File

@@ -1,610 +0,0 @@
//! Error handling and reporting.
#[macro_export]
macro_rules! err_dbg_dis {
($tt:ty, $nn:expr) => {
impl ::core::fmt::Display for $tt {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
write!(fmt, "{}::{:?}", $nn, self)
}
}
};
}
pub use anyhow;
pub use thiserror;
pub use thiserror::Error as ThisError;
// pub use thiserror::UserErrorClass;
// pub use thiserror::UserErrorContent;
pub mod bt {
pub use backtrace::Backtrace;
}
use serde::Deserialize;
use serde::Serialize;
use std::array::TryFromSliceError;
use std::convert::Infallible;
use std::fmt;
use std::net::AddrParseError;
use std::num::ParseFloatError;
use std::num::ParseIntError;
use std::string::FromUtf8Error;
use std::sync::PoisonError;
pub type Res2<T> = anyhow::Result<T>;
#[derive(Debug, ThisError)]
pub enum ErrA {
#[error("bad-A")]
Bad,
}
#[derive(Debug, ThisError)]
pub enum ErrB {
#[error("worse-B")]
Worse,
#[error("FromArrA")]
ErrA(#[from] ErrA),
}
fn f_a() -> Result<u32, ErrA> {
Err(ErrA::Bad)
}
fn f_b() -> Result<u32, ErrB> {
if true {
let res = f_a()?;
Ok(res)
} else {
Err(ErrB::Worse)
}
}
#[allow(unused)]
fn f_c() -> Result<u32, anyhow::Error> {
return Ok(f_b()?);
}
#[test]
fn test_fc() {
assert!(f_c().is_err());
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Reason {
InternalError,
BadRequest,
IoError,
}
/// The common error type for this application.
#[derive(Clone, PartialEq, Serialize, Deserialize)]
pub struct Error {
msg: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
trace_str: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
public_msg: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Option::is_none")]
reason: Option<Reason>,
#[serde(default, skip_serializing_if = "Option::is_none")]
parent: Option<Box<Error>>,
}
impl Error {
pub fn with_msg_no_trace<S: Into<String>>(s: S) -> Self {
Self {
msg: s.into(),
trace_str: None,
public_msg: None,
reason: None,
parent: None,
}
}
pub fn with_msg<S: Into<String>>(s: S) -> Self {
Self::with_msg_no_trace(s).add_backtrace()
}
pub fn with_public_msg_no_trace<S: Into<String>>(s: S) -> Self {
let s = s.into();
let ret = Self::with_msg_no_trace(&s);
let ret = ret.add_public_msg(s);
ret
}
pub fn with_public_msg<S: Into<String>>(s: S) -> Self {
let s = s.into();
let ret = Self::with_msg_no_trace(String::new());
let ret = ret.add_backtrace();
let ret = ret.add_public_msg(s);
ret
}
pub fn from_string<E>(e: E) -> Self
where
E: ToString,
{
Self::with_msg_no_trace(e.to_string())
}
pub fn add_backtrace(mut self) -> Self {
self.msg.extend(" (add_backtrace DISABLED)".chars());
// ret.trace_str = Some(fmt_backtrace(&backtrace::Backtrace::new()));
self
}
pub fn mark_bad_request(mut self) -> Self {
self.reason = Some(Reason::BadRequest);
self
}
pub fn mark_io_error(mut self) -> Self {
self.reason = Some(Reason::IoError);
self
}
pub fn add_public_msg(mut self, msg: impl Into<String>) -> Self {
if self.public_msg.is_none() {
self.public_msg = Some(Vec::new());
}
self.public_msg.as_mut().unwrap().push(msg.into());
self
}
pub fn msg(&self) -> &str {
&self.msg
}
pub fn public_msg(&self) -> Option<&Vec<String>> {
self.public_msg.as_ref()
}
pub fn reason(&self) -> Option<Reason> {
self.reason.clone()
}
}
#[allow(unused)]
fn fmt_backtrace(trace: &backtrace::Backtrace) -> String {
if true {
return String::from("fmt_backtrace DISABLED");
}
use std::io::Write;
let mut buf = Vec::new();
let mut c1 = 0;
'outer: for fr in trace.frames() {
for sy in fr.symbols() {
let is_ours = match sy.filename() {
None => false,
Some(s) => {
let s = s.to_str().unwrap();
s.contains("/dev/daqbuffer/") || s.contains("/build/daqbuffer/")
}
};
let name = match sy.name() {
Some(k) => k.to_string(),
_ => "[err]".into(),
};
let filename = match sy.filename() {
Some(k) => match k.to_str() {
Some(k) => k,
_ => "[err]",
},
_ => "[err]",
};
let lineno = match sy.lineno() {
Some(k) => k,
_ => 0,
};
if is_ours {
write!(&mut buf, "\n {name}\n {filename} {lineno}").unwrap();
c1 += 1;
if c1 >= 10 {
break 'outer;
}
}
}
}
String::from_utf8(buf).unwrap()
}
impl fmt::Debug for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let trace_str = if let Some(s) = &self.trace_str {
s.into()
} else {
String::new()
};
write!(fmt, "msg: {}", self.msg)?;
if let Some(msgs) = self.public_msg() {
for (i, msg) in msgs.iter().enumerate() {
write!(fmt, "; pub({i}): {msg}")?;
}
}
if !trace_str.is_empty() {
write!(fmt, "\nTrace:\n{}", trace_str)?;
}
Ok(())
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, fmt)
}
}
impl std::error::Error for Error {}
pub trait ErrConv<T> {
fn err_conv(self) -> Result<T, Error>;
}
impl<T, E> ErrConv<T> for Result<T, E>
where
E: Into<Error>,
{
fn err_conv(self) -> Result<T, Error> {
match self {
Ok(k) => Ok(k),
Err(e) => Err(e.into()),
}
}
}
pub trait ErrStr<T> {
fn errstr(self) -> Result<T, Error>;
}
impl<T, E> ErrStr<T> for Result<T, E>
where
E: ToString,
{
fn errstr(self) -> Result<T, Error> {
match self {
Ok(k) => Ok(k),
Err(e) => Err(Error::with_msg_no_trace(e.to_string())),
}
}
}
pub trait ToErr {
fn to_err(self) -> Error;
}
impl<T: ToErr> From<T> for Error {
fn from(k: T) -> Self {
k.to_err()
}
}
impl From<PublicError> for Error {
fn from(k: PublicError) -> Self {
Self {
msg: String::new(),
trace_str: None,
public_msg: Some(k.msg.clone()),
reason: k.reason(),
parent: None,
}
}
}
impl ToErr for Infallible {
fn to_err(self) -> Error {
Error::with_msg_no_trace(String::new())
}
}
impl From<String> for Error {
fn from(k: String) -> Self {
Self::from_string(k)
}
}
impl From<&str> for Error {
fn from(k: &str) -> Self {
Self::from_string(k)
}
}
impl From<std::io::Error> for Error {
fn from(k: std::io::Error) -> Self {
Self::from_string(k)
}
}
impl From<AddrParseError> for Error {
fn from(k: AddrParseError) -> Self {
Self::from_string(k)
}
}
impl From<serde_json::Error> for Error {
fn from(k: serde_json::Error) -> Self {
Self::from_string(k)
}
}
impl<T> From<async_channel::SendError<T>> for Error {
fn from(k: async_channel::SendError<T>) -> Self {
Self::from_string(k)
}
}
impl From<async_channel::RecvError> for Error {
fn from(k: async_channel::RecvError) -> Self {
Self::from_string(k)
}
}
impl<T> From<async_channel_2::SendError<T>> for Error {
fn from(k: async_channel_2::SendError<T>) -> Self {
Self::from_string(k)
}
}
impl From<async_channel_2::RecvError> for Error {
fn from(k: async_channel_2::RecvError) -> Self {
Self::from_string(k)
}
}
impl From<chrono::format::ParseError> for Error {
fn from(k: chrono::format::ParseError) -> Self {
Self::from_string(k)
}
}
impl From<ParseIntError> for Error {
fn from(k: ParseIntError) -> Self {
Self::from_string(k)
}
}
impl From<ParseFloatError> for Error {
fn from(k: ParseFloatError) -> Self {
Self::from_string(k)
}
}
impl From<FromUtf8Error> for Error {
fn from(k: FromUtf8Error) -> Self {
Self::from_string(k)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(k: std::str::Utf8Error) -> Self {
Self::from_string(k)
}
}
impl From<serde_cbor::Error> for Error {
fn from(k: serde_cbor::Error) -> Self {
Self::from_string(k)
}
}
impl From<std::fmt::Error> for Error {
fn from(k: std::fmt::Error) -> Self {
Self::from_string(k)
}
}
impl From<regex::Error> for Error {
fn from(k: regex::Error) -> Self {
Self::from_string(k)
}
}
impl<T> From<PoisonError<T>> for Error {
fn from(_: PoisonError<T>) -> Self {
Self::from_string("PoisonError")
}
}
impl From<url::ParseError> for Error {
fn from(k: url::ParseError) -> Self {
Self::from_string(format!("{:?}", k))
}
}
impl From<TryFromSliceError> for Error {
fn from(k: TryFromSliceError) -> Self {
Self::from_string(format!("{:?}", k))
}
}
impl From<rmp_serde::encode::Error> for Error {
fn from(k: rmp_serde::encode::Error) -> Self {
Self::from_string(format!("{:?}", k))
}
}
impl From<rmp_serde::decode::Error> for Error {
fn from(k: rmp_serde::decode::Error) -> Self {
Self::from_string(format!("{:?}", k))
}
}
impl From<anyhow::Error> for Error {
fn from(k: anyhow::Error) -> Self {
Self::from_string(format!("{k}"))
}
}
impl From<http::Error> for Error {
fn from(k: http::Error) -> Self {
Self::from_string(k)
}
}
impl From<http::uri::InvalidUri> for Error {
fn from(k: http::uri::InvalidUri) -> Self {
Self::from_string(k)
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PublicError {
reason: Option<Reason>,
msg: Vec<String>,
}
impl PublicError {
pub fn reason(&self) -> Option<Reason> {
self.reason.clone()
}
pub fn msg(&self) -> &Vec<String> {
&self.msg
}
}
impl From<String> for PublicError {
fn from(value: String) -> Self {
Self {
reason: None,
msg: vec![value],
}
}
}
impl From<Error> for PublicError {
fn from(k: Error) -> Self {
Self {
reason: k.reason(),
msg: k.public_msg().map(Clone::clone).unwrap_or(Vec::new()),
}
}
}
impl From<&Error> for PublicError {
fn from(k: &Error) -> Self {
Self {
reason: k.reason(),
msg: vec![k.msg().into()],
}
}
}
impl fmt::Display for PublicError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}", self.msg)
}
}
impl ToPublicError for Error {
fn to_public_error(&self) -> PublicError {
PublicError::from(self)
}
}
pub fn todo() {
let bt = backtrace::Backtrace::new();
eprintln!("TODO\n{bt:?}");
todo!("TODO\n{bt:?}");
}
pub fn todoval<T>() -> T {
let bt = backtrace::Backtrace::new();
eprintln!("TODO\n{bt:?}");
todo!("TODO todoval\n{bt:?}")
}
pub trait ToPublicError: std::error::Error + Send {
fn to_public_error(&self) -> PublicError;
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Debug, ThisError, Serialize, Deserialize)]
#[cstm(name = "SomeErrorEnumA")]
enum SomeErrorEnumA {
BadCase,
WithStringContent(String),
// #[error("bad: {0}")]
WithStringContentFmt(String),
}
#[derive(Debug, ThisError, Serialize, Deserialize)]
#[cstm(name = "SomeErrorEnumB0")]
enum SomeErrorEnumB0 {
FromA(#[from] SomeErrorEnumA),
}
#[derive(Debug, ThisError, Serialize, Deserialize)]
#[cstm(name = "SomeErrorEnumB1")]
enum SomeErrorEnumB1 {
FromA(#[from] SomeErrorEnumA),
#[error("caffe")]
Caffe(SomeErrorEnumA),
}
fn failing_a_00() -> Result<(), SomeErrorEnumA> {
Err(SomeErrorEnumA::BadCase)
}
fn failing_b0_00() -> Result<(), SomeErrorEnumB0> {
let ret = failing_a_00()?;
Ok(ret)
}
fn failing_b1_00() -> Result<(), SomeErrorEnumB1> {
let ret = failing_a_00()?;
Ok(ret)
}
#[test]
fn error_handle_a_00() {
assert_eq!(format!("{}", SomeErrorEnumA::BadCase), "SomeErrorEnumA::BadCase");
}
#[test]
fn error_handle_a_01() {
assert_eq!(
SomeErrorEnumA::WithStringContent(format!("inner")).to_string(),
"SomeErrorEnumA::WithStringContent"
);
}
#[test]
fn error_handle_a_02() {
assert_eq!(
SomeErrorEnumA::WithStringContentFmt(format!("inner failure \"quoted\"")).to_string(),
"bad: inner failure \"quoted\""
);
}
#[test]
fn error_handle_b0_00() {
let e = failing_b0_00().unwrap_err();
let s = e.to_string();
assert_eq!(s, "SomeErrorEnumB0::FromA(SomeErrorEnumA::BadCase)");
}
#[test]
fn error_handle_b0_user_00() {
use thiserror::UserErrorClass;
use thiserror::UserErrorInfo;
let e = failing_b0_00().unwrap_err();
let s = e.class();
if let UserErrorClass::Unspecified = s {
()
} else {
panic!()
}
}
#[test]
fn error_handle_b1_00() {
let e = failing_b1_00().unwrap_err();
let s = e.to_string();
assert_eq!(s, "SomeErrorEnumB1::FromA(SomeErrorEnumA::BadCase)");
}
}

View File

@@ -19,7 +19,7 @@ hyper = { version = "1.0.1", features = ["http1", "http2", "client", "server"] }
hyper-util = { version = "0.1.1", features = ["full"] }
bytes = "1.5.0"
async-channel = "1.9.0"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
netpod = { path = "../netpod" }
parse = { path = "../parse" }
streams = { path = "../streams" }

View File

@@ -5,6 +5,7 @@ pub use hyper_util;
use bytes::BufMut;
use bytes::Bytes;
use bytes::BytesMut;
use daqbuf_err as err;
use futures_util::Stream;
use futures_util::StreamExt;
use http::header;

View File

@@ -28,7 +28,7 @@ rand = "0.8.5"
ciborium = "0.2.1"
flate2 = "1"
brotli = "3.4.0"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
netpod = { path = "../netpod" }
query = { path = "../query" }
dbconn = { path = "../dbconn" }

View File

@@ -11,6 +11,7 @@ use crate::ServiceSharedResources;
use bytes::BufMut;
use bytes::Bytes;
use bytes::BytesMut;
use daqbuf_err as err;
use disk::merge::mergedblobsfromremotes::MergedBlobsFromRemotes;
use futures_util::Stream;
use futures_util::StreamExt;

View File

@@ -3,6 +3,7 @@ use crate::err::Error;
use crate::requests::accepts_json_or_all;
use crate::ReqCtx;
use crate::ServiceSharedResources;
use daqbuf_err as err;
use dbconn::worker::PgQueue;
use err::ToPublicError;
use http::Method;

View File

@@ -5,6 +5,7 @@ use crate::requests::accepts_json_framed;
use crate::requests::accepts_json_or_all;
use crate::requests::accepts_octets;
use crate::ServiceSharedResources;
use daqbuf_err as err;
use dbconn::worker::PgQueue;
use err::thiserror;
use err::ThisError;
@@ -55,7 +56,7 @@ pub enum Error {
EventsCbor(#[from] streams::plaineventscbor::Error),
EventsJson(#[from] streams::plaineventsjson::Error),
ServerError,
BinnedStream(::err::Error),
BinnedStream(err::Error),
TimebinnedJson(#[from] streams::timebinnedjson::Error),
}

View File

@@ -2,6 +2,7 @@ use crate::bodystream::response;
use async_channel::Receiver;
use async_channel::Sender;
use bytes::Bytes;
use daqbuf_err as err;
use err::thiserror;
use err::PublicError;
use err::ThisError;

View File

@@ -1,6 +1,7 @@
use crate::response;
use crate::ReqCtx;
use crate::ServiceSharedResources;
use daqbuf_err as err;
use err::thiserror;
use err::PublicError;
use err::ThisError;

View File

@@ -6,6 +6,7 @@ use crate::response;
use crate::ServiceSharedResources;
use bytes::Bytes;
use bytes::BytesMut;
use daqbuf_err as err;
use dbconn::worker::PgQueue;
use err::thiserror;
use err::ThisError;

View File

@@ -1,4 +1,5 @@
use crate::err::Error;
use daqbuf_err as err;
use err::ToPublicError;
use http::Response;
use http::StatusCode;
@@ -26,7 +27,7 @@ impl ToPublicResponse for Error {
}
}
impl ToPublicResponse for ::err::Error {
impl ToPublicResponse for daqbuf_err::Error {
fn to_public_response(&self) -> StreamResponse {
use err::Reason;
let e = self.to_public_error();

View File

@@ -48,7 +48,7 @@ pub enum Error {
Http(crate::Error),
HttpCrate(http::Error),
// TODO create dedicated error type for query parsing
BadQuery(err::Error),
BadQuery(daqbuf_err::Error),
MissingBackend,
MissingScalarType,
MissingShape,
@@ -56,12 +56,12 @@ pub enum Error {
MissingEdge,
MissingTimerange,
Uri(netpod::UriError),
ChannelConfigQuery(err::Error),
ChannelConfigQuery(daqbuf_err::Error),
ExpectScyllaBackend,
Pg(dbconn::pg::Error),
Scylla(String),
Join,
OtherErr(err::Error),
OtherErr(daqbuf_err::Error),
PgWorker(dbconn::worker::Error),
Async(netpod::AsyncChannelError),
ChannelConfig(dbconn::channelconfig::Error),
@@ -102,7 +102,7 @@ impl fmt::Display for Error {
}
}
fn other_err_error(e: err::Error) -> Error {
fn other_err_error(e: daqbuf_err::Error) -> Error {
Error::OtherErr(e)
}
@@ -446,7 +446,7 @@ pub struct ChannelsWithTypeQuery {
}
impl FromUrl for ChannelsWithTypeQuery {
type Error = err::Error;
type Error = daqbuf_err::Error;
fn from_url(url: &Url) -> Result<Self, Self::Error> {
let pairs = get_url_query_pairs(url);
@@ -456,12 +456,12 @@ impl FromUrl for ChannelsWithTypeQuery {
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, Self::Error> {
let s = pairs
.get("scalar_type")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing scalar_type"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing scalar_type"))?;
//let scalar_type = ScalarType::from_bsread_str(s)?;
let scalar_type: ScalarType = serde_json::from_str(&format!("\"{s}\""))?;
let s = pairs
.get("shape")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing shape"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing shape"))?;
let shape = Shape::from_dims_str(s)?;
Ok(Self { scalar_type, shape })
}
@@ -484,29 +484,29 @@ fn bool_false(x: &bool) -> bool {
}
impl FromUrl for ScyllaChannelEventSeriesIdQuery {
type Error = err::Error;
type Error = daqbuf_err::Error;
fn from_url(url: &Url) -> Result<Self, err::Error> {
fn from_url(url: &Url) -> Result<Self, daqbuf_err::Error> {
let pairs = get_url_query_pairs(url);
Self::from_pairs(&pairs)
}
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, err::Error> {
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, daqbuf_err::Error> {
let backend = pairs
.get("backend")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing backend"))?
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing backend"))?
.into();
let name = pairs
.get("channelName")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing channelName"))?
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing channelName"))?
.into();
let s = pairs
.get("scalarType")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing scalarType"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing scalarType"))?;
let scalar_type: ScalarType = serde_json::from_str(&format!("\"{s}\""))?;
let s = pairs
.get("shape")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing shape"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing shape"))?;
let shape = Shape::from_dims_str(s)?;
let do_create = pairs.get("doCreate").map_or("false", |x| x.as_str()) == "true";
Ok(Self {
@@ -535,25 +535,25 @@ pub struct ScyllaChannelsActiveQuery {
}
impl FromUrl for ScyllaChannelsActiveQuery {
type Error = err::Error;
type Error = daqbuf_err::Error;
fn from_url(url: &Url) -> Result<Self, err::Error> {
fn from_url(url: &Url) -> Result<Self, daqbuf_err::Error> {
let pairs = get_url_query_pairs(url);
Self::from_pairs(&pairs)
}
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, err::Error> {
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, daqbuf_err::Error> {
let s = pairs
.get("tsedge")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing tsedge"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing tsedge"))?;
let tsedge: u64 = s.parse()?;
let s = pairs
.get("shapeKind")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing shapeKind"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing shapeKind"))?;
let shape_kind: u32 = s.parse()?;
let s = pairs
.get("scalarType")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing scalarType"))?;
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing scalarType"))?;
let scalar_type: ScalarType = serde_json::from_str(&format!("\"{s}\""))?;
info!("parsed scalar type inp: {s:?} val: {scalar_type:?}");
Ok(Self {
@@ -643,7 +643,7 @@ pub struct IocForChannelQuery {
}
impl FromUrl for IocForChannelQuery {
type Error = err::Error;
type Error = daqbuf_err::Error;
fn from_url(url: &Url) -> Result<Self, Self::Error> {
let pairs = get_url_query_pairs(url);
@@ -653,11 +653,11 @@ impl FromUrl for IocForChannelQuery {
fn from_pairs(pairs: &BTreeMap<String, String>) -> Result<Self, Self::Error> {
let backend = pairs
.get("backend")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing backend"))?
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing backend"))?
.into();
let name = pairs
.get("channelName")
.ok_or_else(|| err::Error::with_public_msg_no_trace("missing channelName"))?
.ok_or_else(|| daqbuf_err::Error::with_public_msg_no_trace("missing channelName"))?
.into();
Ok(Self { backend, name })
}

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::ToPublicError;
use serde::Deserialize;
use serde::Serialize;
@@ -28,7 +29,7 @@ impl Error {
self.0.msg()
}
pub fn reason(&self) -> Option<::err::Reason> {
pub fn reason(&self) -> Option<err::Reason> {
self.0.reason()
}
@@ -84,7 +85,7 @@ where
T: ToString,
{
fn from(x: T) -> Self {
Self(::err::Error::from_string(x))
Self(err::Error::from_string(x))
}
}

View File

@@ -16,8 +16,9 @@ pub mod settings;
use crate::bodystream::response;
use crate::err::Error;
use ::err::thiserror;
use ::err::ThisError;
use daqbuf_err;
use daqbuf_err::thiserror;
use daqbuf_err::ThisError;
use dbconn::worker::PgQueue;
use dbconn::worker::PgWorker;
use futures_util::Future;
@@ -63,7 +64,7 @@ use tracing::Instrument;
#[derive(Debug, ThisError, Serialize, Deserialize)]
#[cstm(name = "Retrieval")]
pub enum RetrievalError {
Error(#[from] ::err::Error),
Error(#[from] daqbuf_err::Error),
Error2(#[from] crate::err::Error),
TextError(String),
#[serde(skip)]
@@ -99,9 +100,9 @@ where
}
}
impl ::err::ToErr for RetrievalError {
fn to_err(self) -> ::err::Error {
::err::Error::with_msg_no_trace(self.to_string())
impl daqbuf_err::ToErr for RetrievalError {
fn to_err(self) -> daqbuf_err::Error {
daqbuf_err::Error::with_msg_no_trace(self.to_string())
}
}

View File

@@ -233,14 +233,14 @@ impl StatusNodesRecursive {
for (tag, sr) in all {
match sr {
Ok(sr) => {
let s: Result<NodeStatus, _> = serde_json::from_value(sr.val).map_err(err::Error::from);
let s: Result<NodeStatus, _> = serde_json::from_value(sr.val).map_err(daqbuf_err::Error::from);
let sub = NodeStatusSub { url: tag.0, status: s };
subs.push_back(sub);
}
Err(e) => {
let sub = NodeStatusSub {
url: tag.0,
status: Err(err::Error::from(e)),
status: Err(daqbuf_err::Error::from(e)),
};
subs.push_back(sub);
}

View File

@@ -889,24 +889,24 @@ struct LocalMap {
}
pub trait ErrConv<T> {
fn err_conv(self) -> Result<T, err::Error>;
fn err_conv(self) -> Result<T, daqbuf_err::Error>;
}
impl<T> ErrConv<T> for Result<T, scylla::transport::errors::NewSessionError> {
fn err_conv(self) -> Result<T, err::Error> {
self.map_err(|e| err::Error::with_msg_no_trace(format!("{e:?}")))
fn err_conv(self) -> Result<T, daqbuf_err::Error> {
self.map_err(|e| daqbuf_err::Error::with_msg_no_trace(format!("{e:?}")))
}
}
impl<T> ErrConv<T> for Result<T, scylla::transport::errors::QueryError> {
fn err_conv(self) -> Result<T, err::Error> {
self.map_err(|e| err::Error::with_msg_no_trace(format!("{e:?}")))
fn err_conv(self) -> Result<T, daqbuf_err::Error> {
self.map_err(|e| daqbuf_err::Error::with_msg_no_trace(format!("{e:?}")))
}
}
impl<T> ErrConv<T> for Result<T, scylla::transport::query_result::RowsExpectedError> {
fn err_conv(self) -> Result<T, err::Error> {
self.map_err(|e| err::Error::with_msg_no_trace(format!("{e:?}")))
fn err_conv(self) -> Result<T, daqbuf_err::Error> {
self.map_err(|e| daqbuf_err::Error::with_msg_no_trace(format!("{e:?}")))
}
}

View File

@@ -17,4 +17,4 @@ bytes = "1.2.1"
futures-util = "0.3.24"
chrono = { version = "0.4.19", features = ["serde"] }
netpod = { path = "../netpod" }
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }

View File

@@ -5,6 +5,7 @@ use crate::AsAnyRef;
use crate::Events;
use crate::TypeName;
use crate::WithLen;
use daqbuf_err as err;
use err::Error;
use netpod::log::*;
use netpod::range::evrange::SeriesRange;

View File

@@ -19,6 +19,7 @@ pub use futures_util;
use collect_s::CollectableDyn;
use container::ByteEstimate;
use daqbuf_err as err;
use std::any::Any;
use std::collections::VecDeque;
use std::fmt;

View File

@@ -1,5 +1,6 @@
use crate::container::ByteEstimate;
use crate::subfr::SubFrId;
use daqbuf_err as err;
use netpod::EnumVariant;
use netpod::StringFix;
use serde::Serialize;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use netpod::log::Level;
use netpod::DiskStats;
use netpod::EventDataReadStats;

View File

@@ -4,6 +4,7 @@ use crate::streamitem::RangeCompletableItem;
use crate::streamitem::Sitemty;
use crate::streamitem::StreamItem;
use crate::Events;
use daqbuf_err as err;
use err::Error;
use futures_util::stream;
use futures_util::Future;

View File

@@ -23,7 +23,7 @@ crc32fast = "1.3.2"
futures-util = "0.3.24"
humantime-serde = "1.1.1"
thiserror = "0.0.1"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
items_0 = { path = "../items_0" }
items_proc = { path = "../items_proc" }
netpod = { path = "../netpod" }

View File

@@ -5,6 +5,7 @@ use super::___;
use crate::ts_offs_from_abs;
use crate::ts_offs_from_abs_with_anchor;
use core::fmt;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use items_0::collect_s::CollectableDyn;

View File

@@ -3,6 +3,7 @@ use super::aggregator::AggregatorNumeric;
use super::aggregator::AggregatorTimeWeight;
use super::timeweight::timeweight_events_dyn::BinnedEventsTimeweightDynbox;
use core::fmt;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use items_0::timebin::BinningggContainerEventsDyn;

View File

@@ -1,6 +1,7 @@
use crate::binning::container_bins::ContainerBins;
use crate::binning::container_events::ContainerEvents;
use crate::binning::timeweight::timeweight_events::BinnedEventsTimeweight;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use netpod::log::*;

View File

@@ -5,6 +5,7 @@ use crate::binning::container_events::ContainerEvents;
use crate::binning::container_events::ContainerEventsTakeUpTo;
use crate::binning::container_events::EventSingle;
use core::fmt;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use netpod::log::*;

View File

@@ -2,6 +2,7 @@ use super::timeweight_events::BinnedEventsTimeweight;
use crate::binning::container_events::ContainerEvents;
use crate::binning::container_events::EventValueType;
use crate::channelevents::ChannelEvents;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use futures_util::Stream;
@@ -183,11 +184,11 @@ impl BinnedEventsTimeweightStream {
}
}
Ok(None) => Continue(()),
Err(e) => Break(Ready(Some(Err(::err::Error::from_string(e))))),
Err(e) => Break(Ready(Some(Err(err::Error::from_string(e))))),
}
// Continue(())
}
Err(e) => Break(Ready(Some(Err(::err::Error::from_string(e))))),
Err(e) => Break(Ready(Some(Err(err::Error::from_string(e))))),
},
ChannelEvents::Status(_) => {
// TODO use the status
@@ -218,13 +219,13 @@ impl BinnedEventsTimeweightStream {
if self.range_complete {
self.binned_events
.input_done_range_final()
.map_err(::err::Error::from_string)?;
.map_err(err::Error::from_string)?;
} else {
self.binned_events
.input_done_range_open()
.map_err(::err::Error::from_string)?;
.map_err(err::Error::from_string)?;
}
match self.binned_events.output().map_err(::err::Error::from_string)? {
match self.binned_events.output().map_err(err::Error::from_string)? {
Some(x) => {
trace_emit!("seeing ready bins {:?}", x);
Ready(Some(Ok(DataItem(Data(x)))))

View File

@@ -1,6 +1,7 @@
use crate::ts_offs_from_abs;
use crate::ts_offs_from_abs_with_anchor;
use crate::IsoDateTime;
use daqbuf_err as err;
use err::Error;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectableType;

View File

@@ -1,6 +1,7 @@
use crate::ts_offs_from_abs;
use crate::ts_offs_from_abs_with_anchor;
use crate::IsoDateTime;
use daqbuf_err as err;
use err::Error;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectableType;

View File

@@ -1,6 +1,7 @@
use crate::framable::FrameType;
use crate::merger::Mergeable;
use crate::Events;
use daqbuf_err as err;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectedDyn;
use items_0::collect_s::CollectorDyn;

View File

@@ -1,6 +1,7 @@
use crate::eventsdim0::EventsDim0;
use crate::eventsdim1::EventsDim1;
use crate::Error;
use daqbuf_err as err;
use items_0::Empty;
use items_0::Events;
use netpod::log::*;

View File

@@ -1,6 +1,7 @@
use crate::framable::FrameType;
use crate::merger::Mergeable;
use bytes::BytesMut;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use items_0::container::ByteEstimate;

View File

@@ -1,4 +1,5 @@
use crate::IsoDateTime;
use daqbuf_err as err;
use err::Error;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectedDyn;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::Error;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectedDyn;

View File

@@ -1,6 +1,7 @@
use crate::binsdim0::BinsDim0;
use crate::eventsxbindim0::EventsXbinDim0;
use crate::IsoDateTime;
use daqbuf_err as err;
use err::Error;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectableType;

View File

@@ -1,5 +1,6 @@
use crate::binsxbindim0::BinsXbinDim0;
use crate::IsoDateTime;
use daqbuf_err as err;
use err::Error;
use items_0::collect_s::CollectableDyn;
use items_0::collect_s::CollectableType;

View File

@@ -4,6 +4,7 @@ use crate::frame::make_log_frame;
use crate::frame::make_range_complete_frame;
use crate::frame::make_stats_frame;
use bytes::BytesMut;
use daqbuf_err as err;
use items_0::framable::FrameTypeInnerDyn;
use items_0::framable::FrameTypeInnerStatic;
use items_0::streamitem::LogItem;

View File

@@ -13,6 +13,7 @@ use bincode::config::WithOtherTrailing;
use bincode::DefaultOptions;
use bytes::BufMut;
use bytes::BytesMut;
use daqbuf_err as err;
use items_0::bincode;
use items_0::streamitem::LogItem;
use items_0::streamitem::StatsItem;

View File

@@ -21,6 +21,7 @@ pub mod testgen;
pub mod transform;
use channelevents::ChannelEvents;
use daqbuf_err as err;
use futures_util::Stream;
use items_0::isodate::IsoDateTime;
use items_0::streamitem::Sitemty;

View File

@@ -22,7 +22,7 @@ num-traits = "0.2.16"
hex = "0.4.3"
rand = "0.8.5"
thiserror = "0.0.1"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
[patch.crates-io]
thiserror = { git = "https://github.com/dominikwerder/thiserror.git", branch = "cstm" }

View File

@@ -1,3 +1,5 @@
use daqbuf_err as err;
#[derive(Debug, Clone)]
pub enum ChannelStatusClosedReason {
ShutdownCommand,

View File

@@ -60,6 +60,8 @@ pub mod log_ {
pub use tracing::{self, event, span, Level};
}
use daqbuf_err as err;
use bytes::Bytes;
use chrono::DateTime;
use chrono::TimeZone;
@@ -4103,7 +4105,7 @@ pub struct StatusBoardEntry {
// #[serde(skip_serializing_if = "is_false")]
done: bool,
// #[serde(skip_serializing_if = "Vec::is_empty")]
errors: Vec<::err::Error>,
errors: Vec<err::Error>,
// TODO make this a better Stats container and remove pub access.
// #[serde(default, skip_serializing_if = "CmpZero::is_zero")]
error_count: usize,
@@ -4170,7 +4172,7 @@ pub struct StatusBoardEntryUser {
// #[serde(default, skip_serializing_if = "CmpZero::is_zero")]
channel_not_found: usize,
#[serde(skip_serializing_if = "Vec::is_empty")]
errors: Vec<::err::PublicError>,
errors: Vec<err::PublicError>,
}
impl StatusBoardEntryUser {
@@ -4257,7 +4259,7 @@ impl StatusBoard {
}
}
pub fn add_error(&mut self, status_id: &str, err: ::err::Error) {
pub fn add_error(&mut self, status_id: &str, err: err::Error) {
match self.entries.get_mut(status_id) {
Some(e) => {
e.ts_updated = SystemTime::now();

View File

@@ -2,6 +2,8 @@ pub mod api1;
pub mod datetime;
pub mod prebinned;
use daqbuf_err as err;
use crate::get_url_query_pairs;
use crate::log::*;
use crate::AggKind;

View File

@@ -1,7 +1,11 @@
use crate::query::datetime::Datetime;
use crate::{DiskIoTune, FileIoBufferSize, ReadSys};
use crate::DiskIoTune;
use crate::FileIoBufferSize;
use crate::ReadSys;
use daqbuf_err as err;
use err::Error;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use serde::Serialize;
use std::fmt;
use std::time::Duration;

View File

@@ -1,5 +1,6 @@
use chrono::DateTime;
use chrono::FixedOffset;
use daqbuf_err as err;
use err::Error;
use serde::de::Visitor;
use serde::Deserialize;

View File

@@ -9,6 +9,7 @@ use crate::TsNano;
use chrono::DateTime;
use chrono::TimeZone;
use chrono::Utc;
use daqbuf_err as err;
use err::Error;
use serde::Deserialize;
use serde::Serialize;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::Error;
use futures_util::{Stream, StreamExt};
use std::pin::Pin;

View File

@@ -1,4 +1,5 @@
use core::fmt;
use daqbuf_err as err;
use err::thiserror;
use err::ThisError;
use serde::Deserialize;

View File

@@ -17,7 +17,7 @@ byteorder = "1.4.3"
futures-util = "0.3.14"
tracing = "0.1.25"
hex = "0.4.3"
err = { path = "../err" }
daqbuf-err = { path = "../../../daqbuf-err" }
netpod = { path = "../netpod" }
query = { path = "../query" }
disk = { path = "../disk" }

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use dbconn::worker::PgQueue;
use err::thiserror;
use err::ThisError;

View File

@@ -1,3 +1,4 @@
use daqbuf_err as err;
use err::Error;
use futures_util::Future;
use futures_util::TryFutureExt;

Some files were not shown because too many files have changed in this diff Show More