Refactor AsAny handling

This commit is contained in:
Dominik Werder
2022-12-12 15:53:53 +01:00
parent 87dde4712e
commit e81337c22f
29 changed files with 524 additions and 269 deletions

View File

@@ -382,7 +382,7 @@ dependencies = [
[[package]]
name = "commonio"
version = "0.0.1-a.dev.4"
version = "0.0.2"
dependencies = [
"async-channel",
"bytes",
@@ -637,7 +637,7 @@ dependencies = [
[[package]]
name = "daqbufp2"
version = "0.0.1-a.dev.12"
version = "0.0.2"
dependencies = [
"bytes",
"chrono",
@@ -681,7 +681,7 @@ dependencies = [
[[package]]
name = "dbconn"
version = "0.0.1-a.0"
version = "0.0.2"
dependencies = [
"arrayref",
"async-channel",
@@ -728,7 +728,7 @@ dependencies = [
[[package]]
name = "disk"
version = "0.0.1-a.1"
version = "0.0.2"
dependencies = [
"arrayref",
"async-channel",
@@ -1229,7 +1229,7 @@ checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
[[package]]
name = "httpret"
version = "0.0.1-a.0"
version = "0.0.2"
dependencies = [
"async-channel",
"bytes",
@@ -1396,9 +1396,8 @@ dependencies = [
[[package]]
name = "items"
version = "0.0.1-a.dev.4"
version = "0.0.2"
dependencies = [
"bincode",
"bson",
"bytes",
"chrono",
@@ -1420,8 +1419,9 @@ dependencies = [
[[package]]
name = "items_0"
version = "0.0.1"
version = "0.0.2"
dependencies = [
"bincode",
"erased-serde",
"err",
"netpod",
@@ -1431,7 +1431,7 @@ dependencies = [
[[package]]
name = "items_2"
version = "0.0.1"
version = "0.0.2"
dependencies = [
"bytes",
"chrono",
@@ -1674,7 +1674,7 @@ dependencies = [
[[package]]
name = "nodenet"
version = "0.0.1-a.1"
version = "0.0.2"
dependencies = [
"arrayref",
"async-channel",
@@ -1881,7 +1881,7 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
"lock_api",
"parking_lot_core 0.8.5",
"parking_lot_core 0.8.6",
]
[[package]]
@@ -1896,9 +1896,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.8.5"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
dependencies = [
"cfg-if",
"instant",
@@ -1923,7 +1923,7 @@ dependencies = [
[[package]]
name = "parse"
version = "0.0.1-a.0"
version = "0.0.2"
dependencies = [
"byteorder",
"bytes",
@@ -1941,9 +1941,9 @@ dependencies = [
[[package]]
name = "paste"
version = "1.0.9"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
checksum = "cf1c2c742266c2f1041c914ba65355a83ae8747b05f208319784083583494b4b"
[[package]]
name = "percent-encoding"
@@ -2391,9 +2391,9 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.149"
version = "1.0.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "256b9932320c590e707b94576e3cc1f7c9024d0ee6612dfbcf1cb106cbe8e055"
checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91"
dependencies = [
"serde_derive",
]
@@ -2419,9 +2419,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.149"
version = "1.0.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4eae9b04cbffdfd550eb462ed33bc6a1b68c935127d008b27444d08380f94e4"
checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e"
dependencies = [
"proc-macro2",
"quote",
@@ -2505,7 +2505,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "streams"
version = "0.0.1-a.dev.4"
version = "0.0.2"
dependencies = [
"arrayref",
"bitshuffle",
@@ -2589,7 +2589,7 @@ checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8"
[[package]]
name = "taskrun"
version = "0.0.1-a.0"
version = "0.0.3"
dependencies = [
"backtrace",
"chrono",

View File

@@ -1,6 +1,6 @@
[package]
name = "commonio"
version = "0.0.1-a.dev.4"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "daqbufp2"
version = "0.0.1-a.dev.12"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "dbconn"
version = "0.0.1-a.0"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "disk"
version = "0.0.1-a.1"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "httpret"
version = "0.0.1-a.0"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "items"
version = "0.0.1-a.dev.4"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"
@@ -15,7 +15,6 @@ serde_json = "1.0"
ciborium = "0.2"
rmp-serde = "1.1.1"
bson = "2.4.0"
bincode = "1.3.3"
erased-serde = "0.3"
bytes = "1.2.1"
num-traits = "0.2.15"

View File

@@ -18,6 +18,7 @@ use crate::{TimeBinned, TimeBinnerDyn, TimeBins};
use chrono::{TimeZone, Utc};
use err::Error;
use items_0::subfr::SubFrId;
use items_0::AsAnyRef;
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::{NanoRange, Shape};
@@ -74,6 +75,15 @@ where
}
}
impl<NTY> AsAnyRef for MinMaxAvgDim0Bins<NTY>
where
NTY: NumOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> MinMaxAvgDim0Bins<NTY> {
pub fn empty() -> Self {
Self {
@@ -458,10 +468,6 @@ impl<NTY: NumOps + 'static> TimeBinnableDyn for MinMaxAvgDim0Bins<NTY> {
let ret = MinMaxAvgDim0BinsTimeBinner::<NTY>::new(edges.into(), do_time_weight);
Box::new(ret)
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
}
pub struct MinMaxAvgDim0BinsTimeBinner<NTY: NumOps> {
@@ -547,13 +553,13 @@ impl<NTY: NumOps + 'static> TimeBinnerDyn for MinMaxAvgDim0BinsTimeBinner<NTY> {
self.agg.as_mut().unwrap()
};
if let Some(item) = item
.as_any()
.as_any_ref()
// TODO make statically sure that we attempt to cast to the correct type here:
.downcast_ref::<<MinMaxAvgDim0BinsAggregator<NTY> as TimeBinnableTypeAggregator>::Input>()
{
agg.ingest(item);
} else {
let tyid_item = std::any::Any::type_id(item.as_any());
let tyid_item = std::any::Any::type_id(item.as_any_ref());
error!("not correct item type {:?}", tyid_item);
};
if item.ends_after(agg.range().clone()) {

View File

@@ -18,11 +18,13 @@ use crate::{Fits, FitsInside, NewEmpty, ReadPbv, Sitemty, TimeBinned, WithLen};
use chrono::{TimeZone, Utc};
use err::Error;
use items_0::subfr::SubFrId;
use items_0::AsAnyRef;
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::{NanoRange, Shape};
use num_traits::Zero;
use serde::{Deserialize, Serialize};
use std::any::Any;
use std::fmt;
use std::marker::PhantomData;
use tokio::fs::File;
@@ -72,6 +74,15 @@ where
}
}
impl<NTY> AsAnyRef for MinMaxAvgDim1Bins<NTY>
where
NTY: NumOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> MinMaxAvgDim1Bins<NTY> {
pub fn empty() -> Self {
Self {

View File

@@ -7,6 +7,7 @@ use bincode::config::{WithOtherEndian, WithOtherIntEncoding, WithOtherTrailing};
use bincode::DefaultOptions;
use bytes::{BufMut, BytesMut};
use err::Error;
use items_0::bincode;
#[allow(unused)]
use netpod::log::*;
use serde::Serialize;
@@ -86,7 +87,7 @@ pub fn encode_to_vec<S>(item: S) -> Result<Vec<u8>, Error>
where
S: Serialize,
{
if true {
if false {
serde_json::to_vec(&item).map_err(|e| e.into())
} else {
bincode_to_vec(&item)
@@ -97,7 +98,7 @@ pub fn decode_from_slice<T>(buf: &[u8]) -> Result<T, Error>
where
T: for<'de> serde::Deserialize<'de>,
{
if true {
if false {
serde_json::from_slice(buf).map_err(|e| e.into())
} else {
bincode_from_slice(buf)
@@ -112,10 +113,10 @@ where
let mut out = Vec::new();
//let mut ser = rmp_serde::Serializer::new(&mut out).with_struct_map();
//let writer = ciborium::ser::into_writer(&item, &mut out).unwrap();
//let mut ser = bincode_ser(&mut out);
//let mut ser2 = <dyn erased_serde::Serializer>::erase(&mut ser);
let mut ser = serde_json::Serializer::new(&mut out);
let mut ser = bincode_ser(&mut out);
let mut ser2 = <dyn erased_serde::Serializer>::erase(&mut ser);
//let mut ser = serde_json::Serializer::new(&mut out);
//let mut ser2 = <dyn erased_serde::Serializer>::erase(&mut ser);
match item.erased_serialize(&mut ser2) {
Ok(_) => {
let enc = out;
@@ -333,7 +334,10 @@ where
)))
} else {
match decode_from_slice(frame.buf()) {
Ok(item) => Ok(item),
Ok(item) => {
info!("decode_from_slice {} success", std::any::type_name::<T>());
Ok(item)
}
Err(e) => {
error!("decode_frame T = {}", std::any::type_name::<T>());
error!("ERROR deserialize len {} tyid {:x}", frame.buf().len(), frame.tyid());

View File

@@ -19,6 +19,7 @@ use bytes::BytesMut;
use chrono::{TimeZone, Utc};
use err::Error;
use frame::{make_error_frame, make_log_frame, make_range_complete_frame, make_stats_frame};
use items_0::AsAnyRef;
#[allow(unused)]
use netpod::log::*;
use netpod::timeunits::{MS, SEC};
@@ -508,32 +509,23 @@ pub trait TimeBinnableType:
// TODO should not require Sync!
// TODO SitemtyFrameType is already supertrait of FramableInner.
pub trait TimeBinnableDyn:
std::fmt::Debug
fmt::Debug
+ FramableInner
+ FrameType
+ FrameTypeInnerDyn
+ WithLen
+ RangeOverlapInfo
+ Any
+ AsAnyRef
+ Sync
+ Send
+ 'static
{
fn time_binner_new(&self, edges: Vec<u64>, do_time_weight: bool) -> Box<dyn TimeBinnerDyn>;
fn as_any(&self) -> &dyn Any;
}
pub trait TimeBinnableDynStub:
std::fmt::Debug
+ FramableInner
+ FrameType
+ FrameTypeInnerDyn
+ WithLen
+ RangeOverlapInfo
+ Any
+ Sync
+ Send
+ 'static
fmt::Debug + FramableInner + FrameType + FrameTypeInnerDyn + WithLen + RangeOverlapInfo + Any + AsAnyRef + Sync + Send + 'static
{
}
@@ -546,10 +538,6 @@ where
error!("TODO impl time_binner_new for T {}", std::any::type_name::<T>());
err::todoval()
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
}
// TODO maybe this is no longer needed:
@@ -606,10 +594,6 @@ impl TimeBinnableDyn for Box<dyn TimeBinned> {
fn time_binner_new(&self, edges: Vec<u64>, do_time_weight: bool) -> Box<dyn TimeBinnerDyn> {
self.as_time_binnable_dyn().time_binner_new(edges, do_time_weight)
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
}
// TODO should get I/O and tokio dependence out of this crate

View File

@@ -8,6 +8,7 @@ use crate::{
TimeBinnableTypeAggregator, TimeBinnerDyn, WithLen, WithTimestamps,
};
use err::Error;
use items_0::AsAnyRef;
use netpod::log::*;
use netpod::{NanoRange, Shape};
use serde::{Deserialize, Serialize};
@@ -93,6 +94,15 @@ where
}
}
impl<NTY> AsAnyRef for ScalarEvents<NTY>
where
NTY: NumOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> WithLen for ScalarEvents<NTY>
where
NTY: NumOps,
@@ -601,10 +611,6 @@ impl<NTY: NumOps + 'static> TimeBinnableDyn for ScalarEvents<NTY> {
let ret = ScalarEventsTimeBinner::<NTY>::new(edges.into(), do_time_weight);
Box::new(ret)
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
}
impl<NTY: NumOps + 'static> EventsDyn for ScalarEvents<NTY> {
@@ -738,7 +744,7 @@ impl<NTY: NumOps + 'static> TimeBinnerDyn for ScalarEventsTimeBinner<NTY> {
self.agg.as_mut().unwrap()
};
if let Some(item) = item
.as_any()
.as_any_ref()
// TODO make statically sure that we attempt to cast to the correct type here:
.downcast_ref::<<EventValuesAggregator<NTY> as TimeBinnableTypeAggregator>::Input>()
{

View File

@@ -9,6 +9,7 @@ use crate::{
};
use err::Error;
use items_0::subfr::SubFrId;
use items_0::AsAnyRef;
use netpod::log::*;
use netpod::{x_bin_count, AggKind, NanoRange, Shape};
use serde::{Deserialize, Serialize};
@@ -67,6 +68,12 @@ where
}
}
impl<NTY> AsAnyRef for WaveEvents<NTY> where NTY:NumOps {
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> WithLen for WaveEvents<NTY> {
fn len(&self) -> usize {
self.tss.len()

View File

@@ -1,6 +1,6 @@
[package]
name = "items_0"
version = "0.0.1"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"
@@ -11,5 +11,6 @@ path = "src/items_0.rs"
serde = { version = "1.0", features = ["derive"] }
erased-serde = "0.3"
serde_json = "1.0"
bincode = "1.3.3"
netpod = { path = "../netpod" }
err = { path = "../err" }

View File

@@ -1,9 +1,9 @@
use crate::collect_s::ToJsonBytes;
use crate::collect_s::ToJsonResult;
use crate::AsAnyMut;
use crate::AsAnyRef;
use crate::Events;
use err::Error;
use std::any::Any;
use std::fmt;
pub trait Collector: fmt::Debug + Send {
@@ -14,7 +14,7 @@ pub trait Collector: fmt::Debug + Send {
fn result(&mut self) -> Result<Box<dyn Collected>, Error>;
}
pub trait Collectable: fmt::Debug + crate::AsAnyMut {
pub trait Collectable: fmt::Debug + AsAnyMut {
fn new_collector(&self) -> Box<dyn Collector>;
}
@@ -23,20 +23,10 @@ pub trait Collected: fmt::Debug + ToJsonResult + AsAnyRef + Send {}
erased_serde::serialize_trait_object!(Collected);
impl AsAnyRef for Box<dyn Collected> {
fn as_any_ref(&self) -> &dyn Any {
self.as_ref().as_any_ref()
}
}
impl ToJsonResult for Box<dyn Collected> {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error> {
self.as_ref().to_json_result()
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl Collected for Box<dyn Collected> {}
@@ -57,15 +47,8 @@ pub trait CollectorDyn: fmt::Debug + Send {
fn result(&mut self) -> Result<Box<dyn Collected>, Error>;
}
pub trait CollectableWithDefault {
pub trait CollectableWithDefault: AsAnyMut {
fn new_collector(&self) -> Box<dyn CollectorDyn>;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
impl crate::AsAnyMut for Box<dyn Events> {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl Collectable for Box<dyn Events> {
@@ -99,12 +82,6 @@ impl Collector for TimeBinnedCollector {
}
}
impl crate::AsAnyMut for Box<dyn crate::TimeBinned> {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl Collectable for Box<dyn crate::TimeBinned> {
fn new_collector(&self) -> Box<dyn Collector> {
self.as_ref().new_collector()

View File

@@ -1,5 +1,5 @@
use super::collect_c::Collected;
use crate::WithLen;
use crate::{AsAnyMut, AsAnyRef, WithLen};
use err::Error;
use serde::Serialize;
use std::any::Any;
@@ -26,14 +26,13 @@ pub trait Collector: Send + Unpin + WithLen {
}
// TODO rename to `Typed`
pub trait CollectableType {
pub trait CollectableType: AsAnyRef + AsAnyMut {
type Collector: CollectorType<Input = Self>;
fn new_collector() -> Self::Collector;
}
pub trait Collectable: Any {
pub trait Collectable: AsAnyRef + AsAnyMut + Any {
fn new_collector(&self) -> Box<dyn Collector>;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
impl<T: CollectorType + 'static> Collector for T {
@@ -56,15 +55,13 @@ impl<T: CollectorType + 'static> Collector for T {
}
}
impl<T: CollectableType + 'static> Collectable for T {
impl<T> Collectable for T
where
T: CollectableType + 'static,
{
fn new_collector(&self) -> Box<dyn Collector> {
Box::new(T::new_collector()) as _
}
fn as_any_mut(&mut self) -> &mut dyn Any {
// TODO interesting: why exactly does returning `&mut self` not work here?
self
}
}
// TODO check usage of this trait
@@ -73,21 +70,28 @@ pub trait ToJsonBytes {
}
// TODO check usage of this trait
pub trait ToJsonResult: erased_serde::Serialize + fmt::Debug + Send {
pub trait ToJsonResult: erased_serde::Serialize + fmt::Debug + AsAnyRef + AsAnyMut + Send {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error>;
fn as_any(&self) -> &dyn Any;
}
erased_serde::serialize_trait_object!(ToJsonResult);
impl AsAnyRef for serde_json::Value {
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl AsAnyMut for serde_json::Value {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl ToJsonResult for serde_json::Value {
fn to_json_result(&self) -> Result<Box<dyn ToJsonBytes>, Error> {
Ok(Box::new(self.clone()))
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl ToJsonBytes for serde_json::Value {
@@ -101,8 +105,4 @@ impl Collectable for Box<dyn Collectable> {
fn new_collector(&self) -> Box<dyn Collector> {
Collectable::new_collector(self.as_ref())
}
fn as_any_mut(&mut self) -> &mut dyn Any {
Collectable::as_any_mut(self.as_mut())
}
}

View File

@@ -3,6 +3,10 @@ pub mod collect_s;
pub mod scalar_ops;
pub mod subfr;
pub mod bincode {
pub use bincode::*;
}
use collect_c::CollectableWithDefault;
use collect_s::Collectable;
use collect_s::ToJsonResult;
@@ -61,6 +65,24 @@ pub trait AsAnyMut {
fn as_any_mut(&mut self) -> &mut dyn Any;
}
impl<T> AsAnyRef for Box<T>
where
T: AsAnyRef + ?Sized,
{
fn as_any_ref(&self) -> &dyn Any {
self.as_ref().as_any_ref()
}
}
impl<T> AsAnyMut for Box<T>
where
T: AsAnyMut + ?Sized,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self.as_mut().as_any_mut()
}
}
/// Data in time-binned form.
pub trait TimeBinned: Any + TimeBinnable + crate::collect_c::Collectable {
fn as_time_binnable_dyn(&self) -> &dyn TimeBinnable;
@@ -94,11 +116,9 @@ pub trait TimeBinner: Send {
/// Provides a time-binned representation of the implementing type.
/// In contrast to `TimeBinnableType` this is meant for trait objects.
pub trait TimeBinnable: fmt::Debug + WithLen + RangeOverlapInfo + Any + Send {
pub trait TimeBinnable: fmt::Debug + WithLen + RangeOverlapInfo + Any + AsAnyRef + AsAnyMut + Send {
// TODO implementors may fail if edges contain not at least 2 entries.
fn time_binner_new(&self, edges: Vec<u64>, do_time_weight: bool) -> Box<dyn TimeBinner>;
fn as_any(&self) -> &dyn Any;
// TODO just a helper for the empty result.
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult>;
}

View File

@@ -1,6 +1,6 @@
[package]
name = "items_2"
version = "0.0.1"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -5,11 +5,11 @@ use chrono::{TimeZone, Utc};
use err::Error;
use items_0::collect_s::{Collectable, CollectableType, CollectorType, ToJsonResult};
use items_0::scalar_ops::ScalarOps;
use items_0::AppendEmptyBin;
use items_0::Empty;
use items_0::TimeBinned;
use items_0::TimeBins;
use items_0::WithLen;
use items_0::{AppendEmptyBin, AsAnyRef};
use items_0::{AsAnyMut, Empty};
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::NanoRange;
@@ -114,6 +114,24 @@ impl<NTY: ScalarOps> BinsDim0<NTY> {
}
}
impl<NTY> AsAnyRef for BinsDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for BinsDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> Empty for BinsDim0<NTY> {
fn empty() -> Self {
Self {
@@ -235,12 +253,24 @@ pub struct BinsDim0CollectedResult<NTY> {
finished_at: Option<IsoDateTime>,
}
impl<NTY: ScalarOps> items_0::AsAnyRef for BinsDim0CollectedResult<NTY> {
impl<NTY> AsAnyRef for BinsDim0CollectedResult<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for BinsDim0CollectedResult<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for BinsDim0CollectedResult<NTY> {}
impl<NTY> BinsDim0CollectedResult<NTY> {
@@ -294,10 +324,6 @@ impl<NTY: ScalarOps> ToJsonResult for BinsDim0CollectedResult<NTY> {
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
fn as_any(&self) -> &dyn Any {
self
}
}
#[derive(Debug)]
@@ -553,10 +579,6 @@ impl<NTY: ScalarOps> TimeBinnable for BinsDim0<NTY> {
Box::new(ret)
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
let k = serde_json::to_value(self).unwrap();
Box::new(k) as _
@@ -640,13 +662,13 @@ impl<NTY: ScalarOps> TimeBinner for BinsDim0TimeBinner<NTY> {
self.agg.as_mut().unwrap()
};
if let Some(item) = item
.as_any()
.as_any_ref()
// TODO make statically sure that we attempt to cast to the correct type here:
.downcast_ref::<<BinsDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Input>()
{
agg.ingest(item);
} else {
let tyid_item = std::any::Any::type_id(item.as_any());
let tyid_item = std::any::Any::type_id(item.as_any_ref());
error!("not correct item type {:?}", tyid_item);
};
if item.ends_after(agg.range().clone()) {
@@ -785,15 +807,6 @@ impl<NTY: ScalarOps> TimeBinned for BinsDim0<NTY> {
}
}
impl<NTY> items_0::AsAnyMut for BinsDim0<NTY>
where
NTY: 'static,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> items_0::collect_c::Collectable for BinsDim0<NTY>
where
NTY: ScalarOps,

View File

@@ -5,11 +5,11 @@ use chrono::{TimeZone, Utc};
use err::Error;
use items_0::collect_s::{Collectable, CollectableType, CollectorType, ToJsonResult};
use items_0::scalar_ops::ScalarOps;
use items_0::AppendEmptyBin;
use items_0::Empty;
use items_0::TimeBinned;
use items_0::TimeBins;
use items_0::WithLen;
use items_0::{AppendEmptyBin, AsAnyMut, AsAnyRef};
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::NanoRange;
@@ -133,6 +133,24 @@ impl<NTY: ScalarOps> BinsXbinDim0<NTY> {
}
}
impl<NTY> AsAnyRef for BinsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for BinsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> Empty for BinsXbinDim0<NTY> {
fn empty() -> Self {
Self {
@@ -254,12 +272,24 @@ pub struct BinsXbinDim0CollectedResult<NTY> {
finished_at: Option<IsoDateTime>,
}
impl<NTY: ScalarOps> items_0::AsAnyRef for BinsXbinDim0CollectedResult<NTY> {
impl<NTY> AsAnyRef for BinsXbinDim0CollectedResult<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for BinsXbinDim0CollectedResult<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for BinsXbinDim0CollectedResult<NTY> {}
impl<NTY> BinsXbinDim0CollectedResult<NTY> {
@@ -309,10 +339,6 @@ impl<NTY: ScalarOps> ToJsonResult for BinsXbinDim0CollectedResult<NTY> {
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
fn as_any(&self) -> &dyn Any {
self
}
}
#[derive(Debug)]
@@ -568,10 +594,6 @@ impl<NTY: ScalarOps> TimeBinnable for BinsXbinDim0<NTY> {
Box::new(ret)
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
fn to_box_to_json_result(&self) -> Box<dyn ToJsonResult> {
let k = serde_json::to_value(self).unwrap();
Box::new(k) as _
@@ -655,13 +677,13 @@ impl<NTY: ScalarOps> TimeBinner for BinsXbinDim0TimeBinner<NTY> {
self.agg.as_mut().unwrap()
};
if let Some(item) = item
.as_any()
.as_any_ref()
// TODO make statically sure that we attempt to cast to the correct type here:
.downcast_ref::<<BinsXbinDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Input>()
{
agg.ingest(item);
} else {
let tyid_item = std::any::Any::type_id(item.as_any());
let tyid_item = std::any::Any::type_id(item.as_any_ref());
error!("not correct item type {:?}", tyid_item);
};
if item.ends_after(agg.range().clone()) {
@@ -800,15 +822,6 @@ impl<NTY: ScalarOps> TimeBinned for BinsXbinDim0<NTY> {
}
}
impl<NTY> items_0::AsAnyMut for BinsXbinDim0<NTY>
where
NTY: 'static,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> items_0::collect_c::Collectable for BinsXbinDim0<NTY>
where
NTY: ScalarOps,

View File

@@ -5,6 +5,8 @@ use items::FrameType;
use items::FrameTypeInnerStatic;
use items_0::collect_s::Collectable;
use items_0::collect_s::Collector;
use items_0::AsAnyMut;
use items_0::AsAnyRef;
use netpod::log::*;
use serde::{Deserialize, Serialize};
use std::any::Any;
@@ -57,88 +59,182 @@ impl Clone for ChannelEvents {
}
}
impl AsAnyRef for ChannelEvents {
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl AsAnyMut for ChannelEvents {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
mod serde_channel_events {
use super::{ChannelEvents, Events};
use crate::channelevents::ConnStatusEvent;
use crate::eventsdim0::EventsDim0;
use items_0::subfr::SubFrId;
use serde::de::{self, EnumAccess, VariantAccess, Visitor};
use serde::ser::SerializeSeq;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
struct EvRef<'a>(&'a dyn Events);
struct EvBox(Box<dyn Events>);
impl<'a> Serialize for EvRef<'a> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut ser = serializer.serialize_seq(Some(3))?;
ser.serialize_element(self.0.serde_id())?;
ser.serialize_element(&self.0.nty_id())?;
ser.serialize_element(self.0)?;
ser.end()
}
}
struct EvBoxVis;
impl EvBoxVis {
fn name() -> &'static str {
"Events"
}
}
impl<'de> Visitor<'de> for EvBoxVis {
type Value = EvBox;
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", Self::name())
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: de::SeqAccess<'de>,
{
let e0: &str = seq.next_element()?.ok_or(de::Error::missing_field("[0] cty"))?;
let e1: u32 = seq.next_element()?.ok_or(de::Error::missing_field("[1] nty"))?;
if e0 == EventsDim0::<u8>::serde_id() {
match e1 {
i32::SUB => {
let obj: EventsDim0<i32> = seq.next_element()?.ok_or(de::Error::missing_field("[2] obj"))?;
Ok(EvBox(Box::new(obj)))
}
f32::SUB => {
let obj: EventsDim0<f32> = seq.next_element()?.ok_or(de::Error::missing_field("[2] obj"))?;
Ok(EvBox(Box::new(obj)))
}
f64::SUB => {
let obj: EventsDim0<f64> = seq.next_element()?.ok_or(de::Error::missing_field("[2] obj"))?;
Ok(EvBox(Box::new(obj)))
}
_ => Err(de::Error::custom(&format!("unknown nty {e1}"))),
}
} else {
Err(de::Error::custom(&format!("unknown cty {e0}")))
}
}
}
impl<'de> Deserialize<'de> for EvBox {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(EvBoxVis)
}
}
impl Serialize for ChannelEvents {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let name = "ChannelEvents";
let vars = ChannelEventsVis::allowed_variants();
match self {
ChannelEvents::Events(obj) => {
use serde::ser::SerializeTupleVariant;
let mut ser = serializer.serialize_tuple_variant(name, 0, "Events", 3)?;
ser.serialize_field(obj.serde_id())?;
ser.serialize_field(&obj.nty_id())?;
ser.serialize_field(obj)?;
ser.end()
serializer.serialize_newtype_variant(name, 0, vars[0], &EvRef(obj.as_ref()))
}
ChannelEvents::Status(val) => serializer.serialize_newtype_variant(name, 1, "Status", val),
ChannelEvents::Status(val) => serializer.serialize_newtype_variant(name, 1, vars[1], val),
}
}
}
struct EventsBoxVisitor;
enum VarId {
Events,
Status,
}
impl<'de> Visitor<'de> for EventsBoxVisitor {
type Value = Box<dyn Events>;
struct VarIdVis;
impl<'de> Visitor<'de> for VarIdVis {
type Value = VarId;
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "Events object")
write!(fmt, "variant identifier")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
fn visit_u64<E>(self, val: u64) -> Result<Self::Value, E>
where
A: de::SeqAccess<'de>,
E: de::Error,
{
use items_0::subfr::SubFrId;
let e0: &str = seq.next_element()?.ok_or(de::Error::missing_field("ty .0"))?;
let e1: u32 = seq.next_element()?.ok_or(de::Error::missing_field("nty .1"))?;
if e0 == EventsDim0::<u8>::serde_id() {
match e1 {
i32::SUB => {
let obj: EventsDim0<i32> = seq.next_element()?.ok_or(de::Error::missing_field("obj .2"))?;
Ok(Box::new(obj))
}
f32::SUB => {
let obj: EventsDim0<f32> = seq.next_element()?.ok_or(de::Error::missing_field("obj .2"))?;
Ok(Box::new(obj))
}
f64::SUB => {
let obj: EventsDim0<f64> = seq.next_element()?.ok_or(de::Error::missing_field("obj .2"))?;
Ok(Box::new(obj))
}
_ => Err(de::Error::custom(&format!("unknown nty {e1}"))),
}
match val {
0 => Ok(VarId::Events),
1 => Ok(VarId::Status),
_ => Err(de::Error::invalid_value(
de::Unexpected::Unsigned(val),
&"variant index 0..2",
)),
}
}
fn visit_str<E>(self, val: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let vars = ChannelEventsVis::allowed_variants();
if val == vars[0] {
Ok(VarId::Events)
} else if val == vars[1] {
Ok(VarId::Status)
} else {
Err(de::Error::custom(&format!("unknown ty {e0}")))
Err(de::Error::unknown_variant(val, ChannelEventsVis::allowed_variants()))
}
}
}
pub struct ChannelEventsVisitor;
impl<'de> Deserialize<'de> for VarId {
fn deserialize<D>(de: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
de.deserialize_identifier(VarIdVis)
}
}
impl ChannelEventsVisitor {
pub struct ChannelEventsVis;
impl ChannelEventsVis {
fn name() -> &'static str {
"ChannelEvents"
}
fn allowed_variants() -> &'static [&'static str] {
&["Events", "Status", "RangeComplete"]
&["Events", "Status"]
}
}
impl<'de> Visitor<'de> for ChannelEventsVisitor {
impl<'de> Visitor<'de> for ChannelEventsVis {
type Value = ChannelEvents;
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "ChannelEvents")
write!(fmt, "{}", Self::name())
}
fn visit_enum<A>(self, data: A) -> Result<Self::Value, A::Error>
@@ -147,11 +243,14 @@ mod serde_channel_events {
{
let (id, var) = data.variant()?;
match id {
"Events" => {
let c = var.tuple_variant(3, EventsBoxVisitor)?;
Ok(Self::Value::Events(c))
VarId::Events => {
let x: EvBox = var.newtype_variant()?;
Ok(Self::Value::Events(x.0))
}
VarId::Status => {
let x: ConnStatusEvent = var.newtype_variant()?;
Ok(Self::Value::Status(x))
}
_ => return Err(de::Error::unknown_variant(id, Self::allowed_variants())),
}
}
}
@@ -162,9 +261,9 @@ mod serde_channel_events {
D: Deserializer<'de>,
{
de.deserialize_enum(
ChannelEventsVisitor::name(),
ChannelEventsVisitor::allowed_variants(),
ChannelEventsVisitor,
ChannelEventsVis::name(),
ChannelEventsVis::allowed_variants(),
ChannelEventsVis,
)
}
}
@@ -173,8 +272,18 @@ mod serde_channel_events {
#[cfg(test)]
mod test_channel_events_serde {
use super::ChannelEvents;
use crate::channelevents::ConnStatusEvent;
use crate::eventsdim0::EventsDim0;
use bincode::config::FixintEncoding;
use bincode::config::LittleEndian;
use bincode::config::RejectTrailing;
use bincode::config::WithOtherEndian;
use bincode::config::WithOtherIntEncoding;
use bincode::config::WithOtherTrailing;
use bincode::DefaultOptions;
use items_0::bincode;
use items_0::Empty;
use serde::{Deserialize, Serialize};
#[test]
fn channel_events() {
@@ -187,6 +296,68 @@ mod test_channel_events_serde {
let w: ChannelEvents = serde_json::from_str(&s).unwrap();
eprintln!("{w:?}");
}
type OptsTy = WithOtherTrailing<
WithOtherIntEncoding<WithOtherEndian<DefaultOptions, LittleEndian>, FixintEncoding>,
RejectTrailing,
>;
fn bincode_opts() -> OptsTy {
use bincode::Options;
let opts = bincode::DefaultOptions::new()
.with_little_endian()
.with_fixint_encoding()
.reject_trailing_bytes();
opts
}
#[test]
fn channel_events_bincode() {
let mut evs = EventsDim0::empty();
evs.push(8, 2, 3.0f32);
evs.push(12, 3, 3.2f32);
let item = ChannelEvents::Events(Box::new(evs));
let opts = bincode_opts();
let mut out = Vec::new();
let mut ser = bincode::Serializer::new(&mut out, opts);
item.serialize(&mut ser).unwrap();
eprintln!("serialized into {} bytes", out.len());
let mut de = bincode::Deserializer::from_slice(&out, opts);
let item = <ChannelEvents as Deserialize>::deserialize(&mut de).unwrap();
let item = if let ChannelEvents::Events(x) = item {
x
} else {
panic!()
};
let item: &EventsDim0<f32> = item.as_any_ref().downcast_ref().unwrap();
assert_eq!(item.tss().len(), 2);
assert_eq!(item.tss()[1], 12);
}
#[test]
fn channel_status_bincode() {
let mut evs = EventsDim0::empty();
evs.push(8, 2, 3.0f32);
evs.push(12, 3, 3.2f32);
let status = ConnStatusEvent {
ts: 567,
status: crate::channelevents::ConnStatus::Connect,
};
let item = ChannelEvents::Status(status);
let opts = bincode_opts();
let mut out = Vec::new();
let mut ser = bincode::Serializer::new(&mut out, opts);
item.serialize(&mut ser).unwrap();
eprintln!("serialized into {} bytes", out.len());
let mut de = bincode::Deserializer::from_slice(&out, opts);
let item = <ChannelEvents as Deserialize>::deserialize(&mut de).unwrap();
let item = if let ChannelEvents::Status(x) = item {
x
} else {
panic!()
};
assert_eq!(item.ts, 567);
}
}
impl PartialEq for ChannelEvents {
@@ -286,10 +457,6 @@ impl Collectable for ChannelEvents {
ChannelEvents::Status(_) => todo!(),
}
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
pub struct ChannelEventsTimeBinner {
@@ -390,20 +557,22 @@ impl crate::timebin::TimeBinnable for ChannelEvents {
#[derive(Debug, Serialize, Deserialize)]
pub struct ChannelEventsCollectorOutput {}
impl items_0::AsAnyRef for ChannelEventsCollectorOutput {
impl AsAnyRef for ChannelEventsCollectorOutput {
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl AsAnyMut for ChannelEventsCollectorOutput {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl crate::ToJsonResult for ChannelEventsCollectorOutput {
fn to_json_result(&self) -> Result<Box<dyn items_0::collect_s::ToJsonBytes>, err::Error> {
todo!()
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl items_0::collect_c::Collected for ChannelEventsCollectorOutput {}
@@ -484,12 +653,6 @@ impl items_0::collect_c::Collector for ChannelEventsCollector {
}
}
impl items_0::AsAnyMut for ChannelEvents {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl items_0::collect_c::Collectable for ChannelEvents {
fn new_collector(&self) -> Box<dyn items_0::collect_c::Collector> {
Box::new(ChannelEventsCollector::new())

View File

@@ -4,7 +4,7 @@ use crate::{IsoDateTime, RangeOverlapInfo};
use crate::{TimeBinnable, TimeBinnableType, TimeBinnableTypeAggregator, TimeBinner};
use err::Error;
use items_0::scalar_ops::ScalarOps;
use items_0::{Empty, Events, WithLen};
use items_0::{AsAnyMut, AsAnyRef, Empty, Events, WithLen};
use netpod::log::*;
use netpod::timeunits::SEC;
use netpod::NanoRange;
@@ -38,6 +38,28 @@ impl<NTY> EventsDim0<NTY> {
pub fn serde_id() -> &'static str {
"EventsDim0"
}
pub fn tss(&self) -> &VecDeque<u64> {
&self.tss
}
}
impl<NTY> AsAnyRef for EventsDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for EventsDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> Empty for EventsDim0<NTY> {
@@ -206,21 +228,29 @@ impl<NTY: ScalarOps> EventsDim0CollectorOutput<NTY> {
}
}
impl<NTY: ScalarOps> items_0::AsAnyRef for EventsDim0CollectorOutput<NTY> {
impl<NTY> AsAnyRef for EventsDim0CollectorOutput<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for EventsDim0CollectorOutput<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY: ScalarOps> items_0::collect_s::ToJsonResult for EventsDim0CollectorOutput<NTY> {
fn to_json_result(&self) -> Result<Box<dyn items_0::collect_s::ToJsonBytes>, Error> {
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collected for EventsDim0CollectorOutput<NTY> {}
@@ -593,10 +623,6 @@ impl<NTY: ScalarOps> TimeBinnable for EventsDim0<NTY> {
Box::new(ret)
}
fn as_any(&self) -> &dyn Any {
self as &dyn Any
}
fn to_box_to_json_result(&self) -> Box<dyn items_0::collect_s::ToJsonResult> {
let k = serde_json::to_value(self).unwrap();
Box::new(k) as _
@@ -678,7 +704,7 @@ impl<NTY: ScalarOps> Events for EventsDim0<NTY> {
fn move_into_existing(&mut self, tgt: &mut Box<dyn Events>, ts_end: u64) -> Result<(), ()> {
// TODO as_any and as_any_mut are declared on unrealted traits. Simplify.
if let Some(tgt) = items_0::collect_s::Collectable::as_any_mut(tgt.as_mut()).downcast_mut::<Self>() {
if let Some(tgt) = tgt.as_mut().as_any_mut().downcast_mut::<Self>() {
// TODO improve the search
let n1 = self.tss.iter().take_while(|&&x| x <= ts_end).count();
// TODO make it harder to forget new members when the struct may get modified in the future
@@ -701,7 +727,7 @@ impl<NTY: ScalarOps> Events for EventsDim0<NTY> {
}
fn partial_eq_dyn(&self, other: &dyn Events) -> bool {
if let Some(other) = other.as_any().downcast_ref::<Self>() {
if let Some(other) = other.as_any_ref().downcast_ref::<Self>() {
self == other
} else {
false
@@ -821,7 +847,7 @@ impl<NTY: ScalarOps> TimeBinner for EventsDim0TimeBinner<NTY> {
return;
} else {
if let Some(item) = item
.as_any()
.as_any_ref()
// TODO make statically sure that we attempt to cast to the correct type here:
.downcast_ref::<<EventsDim0Aggregator<NTY> as TimeBinnableTypeAggregator>::Input>()
{
@@ -984,16 +1010,6 @@ impl<NTY: ScalarOps> items_0::collect_c::CollectableWithDefault for EventsDim0<N
let coll = EventsDim0Collector::<NTY>::new();
Box::new(coll)
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY: ScalarOps> items_0::AsAnyMut for EventsDim0<NTY> {
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY: ScalarOps> items_0::collect_c::Collectable for EventsDim0<NTY> {
@@ -1013,14 +1029,16 @@ mod test_frame {
use items::StreamItem;
use items_0::AsAnyMut;
use items_0::Empty;
use items_0::Events;
#[test]
fn events_bincode() {
taskrun::tracing_init().unwrap();
// core::result::Result<items::StreamItem<items::RangeCompletableItem<items_2::channelevents::ChannelEvents>>, err::Error>
let mut events = EventsDim0::empty();
events.push(123, 234, 55f32);
let events = events;
let events: Box<dyn items_0::Events> = Box::new(events);
let events: Box<dyn Events> = Box::new(events);
let item = ChannelEvents::Events(events);
let item = Ok::<_, Error>(StreamItem::DataItem(RangeCompletableItem::Data(item)));
let mut buf = item.make_frame().unwrap();
@@ -1050,16 +1068,35 @@ mod test_frame {
} else {
panic!()
};
let item = if let Some(item) = item.as_any_mut().downcast_mut::<Box<dyn items_0::Events>>() {
let item = if let Some(item) = item.as_any_mut().downcast_mut::<EventsDim0<f32>>() {
item
} else {
panic!()
};
eprintln!("NOW WE SEE: {:?}", item);
let item = if let Some(item) = item.as_any_mut().downcast_ref::<Box<EventsDim0<f32>>>() {
// type_name_of_val alloc::boxed::Box<dyn items_0::Events>
eprintln!("0 {:22?}", item.as_any_mut().type_id());
eprintln!("A {:22?}", std::any::TypeId::of::<Box<dyn items_0::Events>>());
eprintln!("B {:22?}", std::any::TypeId::of::<dyn items_0::Events>());
eprintln!("C {:22?}", std::any::TypeId::of::<&dyn items_0::Events>());
eprintln!("D {:22?}", std::any::TypeId::of::<&mut dyn items_0::Events>());
eprintln!("E {:22?}", std::any::TypeId::of::<&mut Box<dyn items_0::Events>>());
eprintln!("F {:22?}", std::any::TypeId::of::<Box<EventsDim0<f32>>>());
eprintln!("G {:22?}", std::any::TypeId::of::<&EventsDim0<f32>>());
eprintln!("H {:22?}", std::any::TypeId::of::<&mut EventsDim0<f32>>());
eprintln!("I {:22?}", std::any::TypeId::of::<Box<Box<EventsDim0<f32>>>>());
//let item = item.as_mut();
//eprintln!("1 {:22?}", item.type_id());
/*
let item = if let Some(item) =
items_0::collect_s::Collectable::as_any_mut(item).downcast_ref::<Box<EventsDim0<f32>>>()
{
item
} else {
panic!()
};
*/
eprintln!("Final value: {item:?}");
assert_eq!(item.tss(), &[123]);
}
}

View File

@@ -4,8 +4,8 @@ use crate::{pulse_offs_from_abs, ts_offs_from_abs};
use crate::{TimeBinnableType, TimeBinnableTypeAggregator};
use err::Error;
use items_0::scalar_ops::ScalarOps;
use items_0::Empty;
use items_0::WithLen;
use items_0::{AsAnyMut, WithLen};
use items_0::{AsAnyRef, Empty};
use netpod::log::*;
use netpod::NanoRange;
use serde::{Deserialize, Serialize};
@@ -76,6 +76,24 @@ impl<NTY> Empty for EventsXbinDim0<NTY> {
}
}
impl<NTY> AsAnyRef for EventsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_ref(&self) -> &dyn Any {
self
}
}
impl<NTY> AsAnyMut for EventsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> WithLen for EventsXbinDim0<NTY> {
fn len(&self) -> usize {
self.tss.len()
@@ -347,7 +365,7 @@ pub struct EventsXbinDim0CollectorOutput<NTY> {
// TODO add continue-at
}
impl<NTY> items_0::AsAnyRef for EventsXbinDim0CollectorOutput<NTY>
impl<NTY> AsAnyRef for EventsXbinDim0CollectorOutput<NTY>
where
NTY: ScalarOps,
{
@@ -356,6 +374,15 @@ where
}
}
impl<NTY> AsAnyMut for EventsXbinDim0CollectorOutput<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> items_0::collect_s::ToJsonResult for EventsXbinDim0CollectorOutput<NTY>
where
NTY: ScalarOps,
@@ -364,10 +391,6 @@ where
let k = serde_json::to_value(self)?;
Ok(Box::new(k))
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl<NTY> items_0::collect_c::Collected for EventsXbinDim0CollectorOutput<NTY> where NTY: ScalarOps {}
@@ -454,15 +477,6 @@ where
}
}
impl<NTY> items_0::AsAnyMut for EventsXbinDim0<NTY>
where
NTY: ScalarOps,
{
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
impl<NTY> items_0::collect_c::Collector for EventsXbinDim0Collector<NTY>
where
NTY: ScalarOps,

View File

@@ -474,7 +474,7 @@ fn binned_timeout_01() {
inp1,
)
.await?;
let r2: &BinsDim0CollectedResult<f32> = res.as_any().downcast_ref().expect("res seems wrong type");
let r2: &BinsDim0CollectedResult<f32> = res.as_any_ref().downcast_ref().expect("res seems wrong type");
assert_eq!(SEC * r2.ts_anchor_sec(), TSBASE + SEC);
assert_eq!(r2.counts(), &[10, 10, 10]);
assert_eq!(r2.mins(), &[3.0, 2.0, 3.0]);

View File

@@ -1,6 +1,6 @@
[package]
name = "nodenet"
version = "0.0.1-a.1"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "parse"
version = "0.0.1-a.0"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -1,6 +1,6 @@
[package]
name = "streams"
version = "0.0.1-a.dev.4"
version = "0.0.2"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"

View File

@@ -46,7 +46,7 @@ fn time_bin_00() {
Ok(item) => match item {
StreamItem::DataItem(item) => match item {
RangeCompletableItem::Data(item) => {
if let Some(item) = item.as_any().downcast_ref::<BinsDim0<f32>>() {
if let Some(item) = item.as_any_ref().downcast_ref::<BinsDim0<f32>>() {
let exp = exps.pop_front().unwrap();
if !item.equal_slack(&exp) {
return Err(Error::with_msg_no_trace(format!("bad, content not equal")));
@@ -104,7 +104,7 @@ fn time_bin_01() {
Ok(item) => match item {
StreamItem::DataItem(item) => match item {
RangeCompletableItem::Data(item) => {
if let Some(_) = item.as_any().downcast_ref::<BinsDim0<f32>>() {
if let Some(_) = item.as_any_ref().downcast_ref::<BinsDim0<f32>>() {
} else {
return Err(Error::with_msg_no_trace(format!("bad, got item with unexpected type")));
}

View File

@@ -1,6 +1,6 @@
[package]
name = "taskrun"
version = "0.0.1-a.0"
version = "0.0.3"
authors = ["Dominik Werder <dominik.werder@gmail.com>"]
edition = "2021"