Download small image sample
This commit is contained in:
@@ -22,8 +22,8 @@ byteorder = "1.4.3"
|
||||
futures-core = "0.3.14"
|
||||
futures-util = "0.3.14"
|
||||
async-stream = "0.3.0"
|
||||
tracing = "0.1.25"
|
||||
#tracing-futures = "0.2.5"
|
||||
tracing = { version = "0.1.25", features = [] }
|
||||
tracing-futures = { version = "0.2.5", features = ["futures-01", "futures-03", "std-future"] }
|
||||
fs2 = "0.4.3"
|
||||
libc = "0.2.93"
|
||||
hex = "0.4.3"
|
||||
|
||||
@@ -112,6 +112,10 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
Shape::Image(..) => {
|
||||
// TODO image binning/aggregation
|
||||
err::todoval()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -142,6 +142,10 @@ where
|
||||
}
|
||||
}
|
||||
}
|
||||
Shape::Image(..) => {
|
||||
// TODO needed for binning or json event retrieval
|
||||
err::todoval()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -90,103 +90,120 @@ impl Stream for EventChunkerMultifile {
|
||||
type Item = Result<StreamItem<RangeCompletableItem<EventFull>>, Error>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
|
||||
use Poll::*;
|
||||
'outer: loop {
|
||||
break if self.completed {
|
||||
panic!("EventBlobsComplete poll_next on completed");
|
||||
} else if self.errored {
|
||||
self.completed = true;
|
||||
return Ready(None);
|
||||
} else if self.data_completed {
|
||||
self.completed = true;
|
||||
return Ready(None);
|
||||
} else {
|
||||
match &mut self.evs {
|
||||
Some(evs) => match evs.poll_next_unpin(cx) {
|
||||
Ready(Some(k)) => Ready(Some(k)),
|
||||
Ready(None) => {
|
||||
self.seen_before_range_count += evs.seen_before_range_count();
|
||||
self.evs = None;
|
||||
continue 'outer;
|
||||
}
|
||||
Pending => Pending,
|
||||
},
|
||||
None => match self.file_chan.poll_next_unpin(cx) {
|
||||
Ready(Some(k)) => match k {
|
||||
Ok(ofs) => {
|
||||
self.files_count += ofs.files.len() as u32;
|
||||
if ofs.files.len() == 1 {
|
||||
let mut ofs = ofs;
|
||||
let file = ofs.files.pop().unwrap();
|
||||
let path = file.path;
|
||||
let item = LogItem::quick(Level::INFO, format!("handle OFS {:?}", ofs));
|
||||
match file.file {
|
||||
Some(file) => {
|
||||
let inp = Box::pin(file_content_stream(file, self.buffer_size as usize));
|
||||
let chunker = EventChunker::from_event_boundary(
|
||||
inp,
|
||||
self.channel_config.clone(),
|
||||
self.range.clone(),
|
||||
self.event_chunker_conf.clone(),
|
||||
path,
|
||||
self.max_ts.clone(),
|
||||
self.expand,
|
||||
);
|
||||
self.evs = Some(Box::pin(chunker));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
} else if ofs.files.len() > 1 {
|
||||
let item = LogItem::quick(Level::INFO, format!("handle OFS MULTIPLE {:?}", ofs));
|
||||
let mut chunkers = vec![];
|
||||
for of in ofs.files {
|
||||
if let Some(file) = of.file {
|
||||
let inp = Box::pin(file_content_stream(file, self.buffer_size as usize));
|
||||
let chunker = EventChunker::from_event_boundary(
|
||||
inp,
|
||||
self.channel_config.clone(),
|
||||
self.range.clone(),
|
||||
self.event_chunker_conf.clone(),
|
||||
of.path,
|
||||
self.max_ts.clone(),
|
||||
self.expand,
|
||||
);
|
||||
chunkers.push(chunker);
|
||||
}
|
||||
}
|
||||
let merged = MergedBlobsStream::new(chunkers);
|
||||
self.evs = Some(Box::pin(merged));
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
} else {
|
||||
let item = LogItem::quick(Level::INFO, format!("handle OFS {:?} NO FILES", ofs));
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
let span1 = span!(Level::INFO, "EventChunkerMultifile", desc = tracing::field::Empty);
|
||||
span1.record("desc", &"");
|
||||
span1.in_scope(|| {
|
||||
use Poll::*;
|
||||
'outer: loop {
|
||||
break if self.completed {
|
||||
panic!("EventBlobsComplete poll_next on completed");
|
||||
} else if self.errored {
|
||||
self.completed = true;
|
||||
return Ready(None);
|
||||
} else if self.data_completed {
|
||||
self.completed = true;
|
||||
return Ready(None);
|
||||
} else {
|
||||
match &mut self.evs {
|
||||
Some(evs) => match evs.poll_next_unpin(cx) {
|
||||
Ready(Some(k)) => {
|
||||
if let Ok(StreamItem::DataItem(RangeCompletableItem::Data(h))) = &k {
|
||||
info!("EventChunkerMultifile emit {} events", h.tss.len());
|
||||
}
|
||||
Ready(Some(k))
|
||||
}
|
||||
Err(e) => {
|
||||
self.errored = true;
|
||||
Ready(Some(Err(e)))
|
||||
Ready(None) => {
|
||||
self.seen_before_range_count += evs.seen_before_range_count();
|
||||
self.evs = None;
|
||||
continue 'outer;
|
||||
}
|
||||
Pending => Pending,
|
||||
},
|
||||
Ready(None) => {
|
||||
self.data_completed = true;
|
||||
let item = LogItem::quick(
|
||||
Level::INFO,
|
||||
format!(
|
||||
"EventBlobsComplete used {} datafiles beg {} end {} node_ix {}",
|
||||
self.files_count,
|
||||
self.range.beg / SEC,
|
||||
self.range.end / SEC,
|
||||
self.node_ix
|
||||
),
|
||||
);
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
}
|
||||
Pending => Pending,
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
None => match self.file_chan.poll_next_unpin(cx) {
|
||||
Ready(Some(k)) => match k {
|
||||
Ok(ofs) => {
|
||||
self.files_count += ofs.files.len() as u32;
|
||||
if ofs.files.len() == 1 {
|
||||
let mut ofs = ofs;
|
||||
let file = ofs.files.pop().unwrap();
|
||||
let path = file.path;
|
||||
let msg = format!("handle OFS {:?}", ofs);
|
||||
info!("{}", msg);
|
||||
let item = LogItem::quick(Level::INFO, msg);
|
||||
match file.file {
|
||||
Some(file) => {
|
||||
let inp =
|
||||
Box::pin(file_content_stream(file, self.buffer_size as usize));
|
||||
let chunker = EventChunker::from_event_boundary(
|
||||
inp,
|
||||
self.channel_config.clone(),
|
||||
self.range.clone(),
|
||||
self.event_chunker_conf.clone(),
|
||||
path,
|
||||
self.max_ts.clone(),
|
||||
self.expand,
|
||||
);
|
||||
self.evs = Some(Box::pin(chunker));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
} else if ofs.files.len() > 1 {
|
||||
let msg = format!("handle OFS MULTIPLE {:?}", ofs);
|
||||
warn!("{}", msg);
|
||||
let item = LogItem::quick(Level::INFO, msg);
|
||||
let mut chunkers = vec![];
|
||||
for of in ofs.files {
|
||||
if let Some(file) = of.file {
|
||||
let inp =
|
||||
Box::pin(file_content_stream(file, self.buffer_size as usize));
|
||||
let chunker = EventChunker::from_event_boundary(
|
||||
inp,
|
||||
self.channel_config.clone(),
|
||||
self.range.clone(),
|
||||
self.event_chunker_conf.clone(),
|
||||
of.path,
|
||||
self.max_ts.clone(),
|
||||
self.expand,
|
||||
);
|
||||
chunkers.push(chunker);
|
||||
}
|
||||
}
|
||||
let merged = MergedBlobsStream::new(chunkers);
|
||||
self.evs = Some(Box::pin(merged));
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
} else {
|
||||
let msg = format!("handle OFS {:?} NO FILES", ofs);
|
||||
info!("{}", msg);
|
||||
let item = LogItem::quick(Level::INFO, msg);
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
self.errored = true;
|
||||
Ready(Some(Err(e)))
|
||||
}
|
||||
},
|
||||
Ready(None) => {
|
||||
self.data_completed = true;
|
||||
let item = LogItem::quick(
|
||||
Level::INFO,
|
||||
format!(
|
||||
"EventBlobsComplete used {} datafiles beg {} end {} node_ix {}",
|
||||
self.files_count,
|
||||
self.range.beg / SEC,
|
||||
self.range.end / SEC,
|
||||
self.node_ix
|
||||
),
|
||||
);
|
||||
Ready(Some(Ok(StreamItem::Log(item))))
|
||||
}
|
||||
Pending => Pending,
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ use items::{
|
||||
use netpod::log::*;
|
||||
use netpod::timeunits::SEC;
|
||||
use netpod::{ByteSize, ChannelConfig, EventDataReadStats, NanoRange, ScalarType, Shape};
|
||||
use parse::channelconfig::CompressionMethod;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use std::path::PathBuf;
|
||||
use std::pin::Pin;
|
||||
@@ -155,9 +156,10 @@ impl EventChunker {
|
||||
}
|
||||
}
|
||||
DataFileState::Event => {
|
||||
let p0 = 0;
|
||||
let mut sl = std::io::Cursor::new(buf.as_ref());
|
||||
let len = sl.read_i32::<BE>().unwrap();
|
||||
if len < 20 || len > 1024 * 1024 * 10 {
|
||||
if len < 20 || len > 1024 * 1024 * 20 {
|
||||
Err(Error::with_msg("unexpected large event chunk"))?;
|
||||
}
|
||||
let len = len as u32;
|
||||
@@ -256,6 +258,8 @@ impl EventChunker {
|
||||
if is_shaped {
|
||||
if shape_dim == 1 {
|
||||
Shape::Wave(shape_lens[0])
|
||||
} else if shape_dim == 2 {
|
||||
Shape::Image(shape_lens[0], shape_lens[1])
|
||||
} else {
|
||||
err::todoval()
|
||||
}
|
||||
@@ -263,27 +267,38 @@ impl EventChunker {
|
||||
Shape::Scalar
|
||||
}
|
||||
};
|
||||
let comp_this = if is_compressed {
|
||||
if compression_method == 0 {
|
||||
Some(CompressionMethod::BitshuffleLZ4)
|
||||
} else {
|
||||
err::todoval()
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let p1 = sl.position();
|
||||
let k1 = len as u64 - (p1 - p0) - 4;
|
||||
if is_compressed {
|
||||
//debug!("event ts {} is_compressed {}", ts, is_compressed);
|
||||
let value_bytes = sl.read_u64::<BE>().unwrap();
|
||||
let block_size = sl.read_u32::<BE>().unwrap();
|
||||
let p1 = sl.position() as u32;
|
||||
let k1 = len as u32 - p1 - 4;
|
||||
//debug!("event len {} ts {} is_compressed {} shape_dim {} len-dim-0 {} value_bytes {} block_size {}", len, ts, is_compressed, shape_dim, shape_lens[0], value_bytes, block_size);
|
||||
assert!(value_bytes < 1024 * 256);
|
||||
assert!(block_size < 1024 * 32);
|
||||
match self.channel_config.shape {
|
||||
Shape::Scalar => {
|
||||
assert!(value_bytes < 1024 * 1);
|
||||
}
|
||||
Shape::Wave(_) => {
|
||||
assert!(value_bytes < 1024 * 64);
|
||||
}
|
||||
Shape::Image(_, _) => {
|
||||
assert!(value_bytes < 1024 * 1024 * 20);
|
||||
}
|
||||
}
|
||||
assert!(block_size <= 1024 * 32);
|
||||
let type_size = scalar_type.bytes() as u32;
|
||||
let ele_count = value_bytes / type_size as u64;
|
||||
let ele_size = type_size;
|
||||
match self.channel_config.shape {
|
||||
Shape::Wave(dim1count) => {
|
||||
if dim1count != ele_count as u32 {
|
||||
Err(Error::with_msg(format!(
|
||||
"ChannelConfig expects {:?} but event has {:?}",
|
||||
self.channel_config.shape, ele_count,
|
||||
)))?;
|
||||
}
|
||||
}
|
||||
Shape::Scalar => {
|
||||
if is_array {
|
||||
Err(Error::with_msg(format!(
|
||||
@@ -291,28 +306,48 @@ impl EventChunker {
|
||||
)))?;
|
||||
}
|
||||
}
|
||||
Shape::Wave(dim1count) => {
|
||||
if dim1count != ele_count as u32 {
|
||||
Err(Error::with_msg(format!(
|
||||
"ChannelConfig expects {:?} but event has ele_count {}",
|
||||
self.channel_config.shape, ele_count,
|
||||
)))?;
|
||||
}
|
||||
}
|
||||
Shape::Image(n1, n2) => {
|
||||
let nt = n1 as usize * n2 as usize;
|
||||
if nt != ele_count as usize {
|
||||
Err(Error::with_msg(format!(
|
||||
"ChannelConfig expects {:?} but event has ele_count {}",
|
||||
self.channel_config.shape, ele_count,
|
||||
)))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
let decomp_bytes = (type_size * ele_count as u32) as usize;
|
||||
let mut decomp = BytesMut::with_capacity(decomp_bytes);
|
||||
unsafe {
|
||||
decomp.set_len(decomp_bytes);
|
||||
}
|
||||
// TODO limit the buf slice range
|
||||
match bitshuffle_decompress(
|
||||
&buf.as_ref()[p1 as usize..],
|
||||
&buf.as_ref()[(p1 as usize + 12)..(p1 as usize + k1 as usize)],
|
||||
&mut decomp,
|
||||
ele_count as usize,
|
||||
ele_size as usize,
|
||||
0,
|
||||
) {
|
||||
Ok(c1) => {
|
||||
assert!(c1 as u32 == k1);
|
||||
assert!(c1 as u64 + 12 == k1);
|
||||
ret.add_event(
|
||||
ts,
|
||||
pulse,
|
||||
buf.as_ref()[(p1 as usize)..(p1 as usize + k1 as usize)].to_vec(),
|
||||
Some(decomp),
|
||||
ScalarType::from_dtype_index(type_index)?,
|
||||
is_big_endian,
|
||||
shape_this,
|
||||
comp_this,
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -320,7 +355,6 @@ impl EventChunker {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
let p1 = sl.position();
|
||||
if len < p1 as u32 + 4 {
|
||||
let msg = format!("uncomp len: {} p1: {}", len, p1);
|
||||
Err(Error::with_msg(msg))?;
|
||||
@@ -330,10 +364,12 @@ impl EventChunker {
|
||||
ret.add_event(
|
||||
ts,
|
||||
pulse,
|
||||
buf.as_ref()[(p1 as usize)..(p1 as usize + k1 as usize)].to_vec(),
|
||||
Some(decomp),
|
||||
ScalarType::from_dtype_index(type_index)?,
|
||||
is_big_endian,
|
||||
shape_this,
|
||||
comp_this,
|
||||
);
|
||||
}
|
||||
buf.advance(len as usize);
|
||||
@@ -358,11 +394,13 @@ impl EventChunker {
|
||||
pub struct EventFull {
|
||||
pub tss: Vec<u64>,
|
||||
pub pulses: Vec<u64>,
|
||||
pub blobs: Vec<Vec<u8>>,
|
||||
#[serde(serialize_with = "decomps_ser", deserialize_with = "decomps_de")]
|
||||
pub decomps: Vec<Option<BytesMut>>,
|
||||
pub scalar_types: Vec<ScalarType>,
|
||||
pub be: Vec<bool>,
|
||||
pub shapes: Vec<Shape>,
|
||||
pub comps: Vec<Option<CompressionMethod>>,
|
||||
}
|
||||
|
||||
fn decomps_ser<S>(t: &Vec<Option<BytesMut>>, s: S) -> Result<S::Ok, S::Error>
|
||||
@@ -403,10 +441,12 @@ impl EventFull {
|
||||
Self {
|
||||
tss: vec![],
|
||||
pulses: vec![],
|
||||
blobs: vec![],
|
||||
decomps: vec![],
|
||||
scalar_types: vec![],
|
||||
be: vec![],
|
||||
shapes: vec![],
|
||||
comps: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -414,17 +454,21 @@ impl EventFull {
|
||||
&mut self,
|
||||
ts: u64,
|
||||
pulse: u64,
|
||||
blob: Vec<u8>,
|
||||
decomp: Option<BytesMut>,
|
||||
scalar_type: ScalarType,
|
||||
be: bool,
|
||||
shape: Shape,
|
||||
comp: Option<CompressionMethod>,
|
||||
) {
|
||||
self.tss.push(ts);
|
||||
self.pulses.push(pulse);
|
||||
self.blobs.push(blob);
|
||||
self.decomps.push(decomp);
|
||||
self.scalar_types.push(scalar_type);
|
||||
self.be.push(be);
|
||||
self.shapes.push(shape);
|
||||
self.comps.push(comp);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -447,10 +491,12 @@ impl Appendable for EventFull {
|
||||
fn append(&mut self, src: &Self) {
|
||||
self.tss.extend_from_slice(&src.tss);
|
||||
self.pulses.extend_from_slice(&src.pulses);
|
||||
self.blobs.extend_from_slice(&src.blobs);
|
||||
self.decomps.extend_from_slice(&src.decomps);
|
||||
self.scalar_types.extend_from_slice(&src.scalar_types);
|
||||
self.be.extend_from_slice(&src.be);
|
||||
self.shapes.extend_from_slice(&src.shapes);
|
||||
self.comps.extend_from_slice(&src.comps);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -465,10 +511,12 @@ impl PushableIndex for EventFull {
|
||||
fn push_index(&mut self, src: &Self, ix: usize) {
|
||||
self.tss.push(src.tss[ix]);
|
||||
self.pulses.push(src.pulses[ix]);
|
||||
self.blobs.push(src.blobs[ix].clone());
|
||||
self.decomps.push(src.decomps[ix].clone());
|
||||
self.scalar_types.push(src.scalar_types[ix].clone());
|
||||
self.be.push(src.be[ix]);
|
||||
self.shapes.push(src.shapes[ix].clone());
|
||||
self.comps.push(src.comps[ix].clone());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -523,16 +571,43 @@ impl Stream for EventChunker {
|
||||
// TODO gather stats about this:
|
||||
self.inp.put_back(fcr);
|
||||
}
|
||||
if self.need_min > 1024 * 8 {
|
||||
let msg = format!("spurious EventChunker asks for need_min {}", self.need_min);
|
||||
self.errored = true;
|
||||
Ready(Some(Err(Error::with_msg(msg))))
|
||||
} else {
|
||||
let x = self.need_min;
|
||||
self.inp.set_need_min(x);
|
||||
let ret = StreamItem::DataItem(RangeCompletableItem::Data(res.events));
|
||||
Ready(Some(Ok(ret)))
|
||||
match self.channel_config.shape {
|
||||
Shape::Scalar => {
|
||||
if self.need_min > 1024 * 8 {
|
||||
let msg =
|
||||
format!("spurious EventChunker asks for need_min {}", self.need_min);
|
||||
self.errored = true;
|
||||
return Ready(Some(Err(Error::with_msg(msg))));
|
||||
}
|
||||
}
|
||||
Shape::Wave(_) => {
|
||||
if self.need_min > 1024 * 32 {
|
||||
let msg =
|
||||
format!("spurious EventChunker asks for need_min {}", self.need_min);
|
||||
self.errored = true;
|
||||
return Ready(Some(Err(Error::with_msg(msg))));
|
||||
}
|
||||
}
|
||||
Shape::Image(_, _) => {
|
||||
if self.need_min > 1024 * 1024 * 20 {
|
||||
let msg =
|
||||
format!("spurious EventChunker asks for need_min {}", self.need_min);
|
||||
self.errored = true;
|
||||
return Ready(Some(Err(Error::with_msg(msg))));
|
||||
}
|
||||
}
|
||||
}
|
||||
let x = self.need_min;
|
||||
self.inp.set_need_min(x);
|
||||
{
|
||||
info!(
|
||||
"EventChunker emits {} events tss {:?}",
|
||||
res.events.len(),
|
||||
res.events.tss
|
||||
);
|
||||
};
|
||||
let ret = StreamItem::DataItem(RangeCompletableItem::Data(res.events));
|
||||
Ready(Some(Ok(ret)))
|
||||
}
|
||||
Err(e) => {
|
||||
self.errored = true;
|
||||
|
||||
@@ -129,15 +129,12 @@ where
|
||||
(Some(None), buf, wp)
|
||||
} else {
|
||||
if len > 1024 * 1024 * 50 {
|
||||
error!("InMemoryFrameAsyncReadStream too long len {}", len);
|
||||
return (
|
||||
Some(Some(Err(Error::with_msg(format!(
|
||||
"InMemoryFrameAsyncReadStream tryparse huge buffer len {} self.inp_bytes_consumed {}",
|
||||
len, self.inp_bytes_consumed
|
||||
))))),
|
||||
buf,
|
||||
wp,
|
||||
let msg = format!(
|
||||
"InMemoryFrameAsyncReadStream tryparse huge buffer len {} self.inp_bytes_consumed {}",
|
||||
len, self.inp_bytes_consumed
|
||||
);
|
||||
error!("{}", msg);
|
||||
return (Some(Some(Err(Error::with_msg(msg)))), buf, wp);
|
||||
} else if len > 1024 * 1024 * 1 {
|
||||
// TODO
|
||||
//warn!("InMemoryFrameAsyncReadStream big len received {}", len);
|
||||
|
||||
@@ -249,6 +249,10 @@ async fn gen_config(
|
||||
buf.put_i8(1);
|
||||
buf.put_i32(k as i32);
|
||||
}
|
||||
Shape::Image(_, _) => {
|
||||
// TODO test data
|
||||
err::todoval()
|
||||
}
|
||||
}
|
||||
let len = buf.len() - p3 - 4;
|
||||
buf.as_mut()[p3..].as_mut().put_i32(len as i32);
|
||||
|
||||
@@ -503,6 +503,9 @@ impl ChannelConfigExt for ChannelConfig {
|
||||
Shape::Wave(_) => {
|
||||
ret |= SHAPE;
|
||||
}
|
||||
Shape::Image(_, _) => {
|
||||
ret |= SHAPE;
|
||||
}
|
||||
}
|
||||
if self.byte_order.is_be() {
|
||||
ret |= BIG_ENDIAN;
|
||||
|
||||
@@ -62,7 +62,7 @@ where
|
||||
range_complete_observed_all: false,
|
||||
range_complete_observed_all_emitted: false,
|
||||
data_emit_complete: false,
|
||||
batch_size: 64,
|
||||
batch_size: 1,
|
||||
logitems: VecDeque::new(),
|
||||
event_data_read_stats_items: VecDeque::new(),
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ impl MergedBlobsFromRemotes {
|
||||
pub fn new(evq: RawEventsQuery, perf_opts: PerfOpts, cluster: Cluster) -> Self {
|
||||
info!("MergedBlobsFromRemotes evq {:?}", evq);
|
||||
let mut tcp_establish_futs = vec![];
|
||||
|
||||
for node in &cluster.nodes {
|
||||
let f = x_processed_event_blobs_stream_from_node(evq.clone(), perf_opts.clone(), node.clone());
|
||||
let f: T002<EventFull> = Box::pin(f);
|
||||
|
||||
@@ -58,7 +58,7 @@ where
|
||||
range_complete_observed_all: false,
|
||||
range_complete_observed_all_emitted: false,
|
||||
data_emit_complete: false,
|
||||
batch_size: 64,
|
||||
batch_size: 1,
|
||||
logitems: VecDeque::new(),
|
||||
event_data_read_stats_items: VecDeque::new(),
|
||||
}
|
||||
@@ -188,6 +188,13 @@ where
|
||||
let emp = I::empty();
|
||||
let ret = std::mem::replace(&mut self.batch, emp);
|
||||
self.data_emit_complete = true;
|
||||
{
|
||||
let mut aa = vec![];
|
||||
for ii in 0..ret.len() {
|
||||
aa.push(ret.ts(ii));
|
||||
}
|
||||
info!("MergedBlobsStream A emits {} events tss {:?}", ret.len(), aa);
|
||||
};
|
||||
Ready(Some(Ok(StreamItem::DataItem(RangeCompletableItem::Data(ret)))))
|
||||
} else {
|
||||
self.data_emit_complete = true;
|
||||
@@ -220,6 +227,13 @@ where
|
||||
if self.batch.len() >= self.batch_size {
|
||||
let emp = I::empty();
|
||||
let ret = std::mem::replace(&mut self.batch, emp);
|
||||
{
|
||||
let mut aa = vec![];
|
||||
for ii in 0..ret.len() {
|
||||
aa.push(ret.ts(ii));
|
||||
}
|
||||
info!("MergedBlobsStream B emits {} events tss {:?}", ret.len(), aa);
|
||||
};
|
||||
Ready(Some(Ok(StreamItem::DataItem(RangeCompletableItem::Data(ret)))))
|
||||
} else {
|
||||
continue 'outer;
|
||||
|
||||
@@ -3,15 +3,16 @@ use crate::decode::{
|
||||
EventsDecodedStream, LittleEndian, NumFromBytes,
|
||||
};
|
||||
use crate::eventblobs::EventChunkerMultifile;
|
||||
use crate::eventchunker::EventChunkerConf;
|
||||
use crate::eventchunker::{EventChunkerConf, EventFull};
|
||||
use err::Error;
|
||||
use futures_core::Stream;
|
||||
use futures_util::StreamExt;
|
||||
use items::numops::{BoolNum, NumOps};
|
||||
use items::{EventsNodeProcessor, Framable, RangeCompletableItem, Sitemty, StreamItem};
|
||||
use netpod::query::RawEventsQuery;
|
||||
use netpod::{AggKind, ByteOrder, ByteSize, NodeConfigCached, ScalarType, Shape};
|
||||
use parse::channelconfig::{extract_matching_config_entry, read_local_config, MatchingConfigEntry};
|
||||
use netpod::{AggKind, ByteOrder, ByteSize, Channel, NanoRange, NodeConfigCached, ScalarType, Shape};
|
||||
|
||||
use parse::channelconfig::{extract_matching_config_entry, read_local_config, ConfigEntry, MatchingConfigEntry};
|
||||
use std::pin::Pin;
|
||||
|
||||
fn make_num_pipeline_stream_evs<NTY, END, EVS, ENP>(
|
||||
@@ -96,6 +97,10 @@ macro_rules! pipe3 {
|
||||
$event_blobs
|
||||
)
|
||||
}
|
||||
Shape::Image(_, _) => {
|
||||
// TODO not needed for python data api v3 protocol, but later for api4.
|
||||
err::todoval()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -193,6 +198,94 @@ pub async fn make_event_pipe(
|
||||
Ok(pipe)
|
||||
}
|
||||
|
||||
pub async fn get_applicable_entry(
|
||||
range: &NanoRange,
|
||||
channel: Channel,
|
||||
node_config: &NodeConfigCached,
|
||||
) -> Result<ConfigEntry, Error> {
|
||||
let channel_config = read_local_config(channel, node_config.node.clone()).await?;
|
||||
let entry_res = match extract_matching_config_entry(range, &channel_config) {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err(e)?,
|
||||
};
|
||||
let entry = match entry_res {
|
||||
MatchingConfigEntry::None => return Err(Error::with_msg("no config entry found"))?,
|
||||
MatchingConfigEntry::Multiple => return Err(Error::with_msg("multiple config entries found"))?,
|
||||
MatchingConfigEntry::Entry(entry) => entry,
|
||||
};
|
||||
Ok(entry.clone())
|
||||
}
|
||||
|
||||
pub fn make_local_event_blobs_stream(
|
||||
range: NanoRange,
|
||||
channel: Channel,
|
||||
entry: &ConfigEntry,
|
||||
expand: bool,
|
||||
event_chunker_conf: EventChunkerConf,
|
||||
disk_io_buffer_size: usize,
|
||||
node_config: &NodeConfigCached,
|
||||
) -> Result<EventChunkerMultifile, Error> {
|
||||
let shape = match entry.to_shape() {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err(e)?,
|
||||
};
|
||||
let channel_config = netpod::ChannelConfig {
|
||||
channel,
|
||||
keyspace: entry.ks as u8,
|
||||
time_bin_size: entry.bs,
|
||||
shape: shape,
|
||||
scalar_type: entry.scalar_type.clone(),
|
||||
byte_order: entry.byte_order.clone(),
|
||||
array: entry.is_array,
|
||||
compression: entry.is_compressed,
|
||||
};
|
||||
let event_blobs = EventChunkerMultifile::new(
|
||||
range,
|
||||
channel_config.clone(),
|
||||
node_config.node.clone(),
|
||||
node_config.ix,
|
||||
disk_io_buffer_size,
|
||||
event_chunker_conf,
|
||||
expand,
|
||||
);
|
||||
Ok(event_blobs)
|
||||
}
|
||||
|
||||
pub fn make_remote_event_blobs_stream(
|
||||
range: NanoRange,
|
||||
channel: Channel,
|
||||
entry: &ConfigEntry,
|
||||
expand: bool,
|
||||
event_chunker_conf: EventChunkerConf,
|
||||
disk_io_buffer_size: usize,
|
||||
node_config: &NodeConfigCached,
|
||||
) -> Result<impl Stream<Item = Sitemty<EventFull>>, Error> {
|
||||
let shape = match entry.to_shape() {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err(e)?,
|
||||
};
|
||||
let channel_config = netpod::ChannelConfig {
|
||||
channel,
|
||||
keyspace: entry.ks as u8,
|
||||
time_bin_size: entry.bs,
|
||||
shape: shape,
|
||||
scalar_type: entry.scalar_type.clone(),
|
||||
byte_order: entry.byte_order.clone(),
|
||||
array: entry.is_array,
|
||||
compression: entry.is_compressed,
|
||||
};
|
||||
let event_blobs = EventChunkerMultifile::new(
|
||||
range,
|
||||
channel_config.clone(),
|
||||
node_config.node.clone(),
|
||||
node_config.ix,
|
||||
disk_io_buffer_size,
|
||||
event_chunker_conf,
|
||||
expand,
|
||||
);
|
||||
Ok(event_blobs)
|
||||
}
|
||||
|
||||
pub async fn make_event_blobs_pipe(
|
||||
evq: &RawEventsQuery,
|
||||
node_config: &NodeConfigCached,
|
||||
@@ -203,53 +296,40 @@ pub async fn make_event_blobs_pipe(
|
||||
Err(e) => return Err(e)?,
|
||||
}
|
||||
}
|
||||
let expand = evq.agg_kind.need_expand();
|
||||
let range = &evq.range;
|
||||
let channel_config = match read_local_config(evq.channel.clone(), node_config.node.clone()).await {
|
||||
Ok(k) => k,
|
||||
Err(e) => {
|
||||
if e.msg().contains("ErrorKind::NotFound") {
|
||||
let s = futures_util::stream::empty();
|
||||
return Ok(Box::pin(s));
|
||||
} else {
|
||||
return Err(e)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
let entry_res = match extract_matching_config_entry(range, &channel_config) {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err(e)?,
|
||||
};
|
||||
let entry = match entry_res {
|
||||
MatchingConfigEntry::None => return Err(Error::with_msg("no config entry found"))?,
|
||||
MatchingConfigEntry::Multiple => return Err(Error::with_msg("multiple config entries found"))?,
|
||||
MatchingConfigEntry::Entry(entry) => entry,
|
||||
};
|
||||
let shape = match entry.to_shape() {
|
||||
Ok(k) => k,
|
||||
Err(e) => return Err(e)?,
|
||||
};
|
||||
let channel_config = netpod::ChannelConfig {
|
||||
channel: evq.channel.clone(),
|
||||
keyspace: entry.ks as u8,
|
||||
time_bin_size: entry.bs,
|
||||
shape: shape,
|
||||
scalar_type: entry.scalar_type.clone(),
|
||||
byte_order: entry.byte_order.clone(),
|
||||
array: entry.is_array,
|
||||
compression: entry.is_compressed,
|
||||
};
|
||||
let entry = get_applicable_entry(&evq.range, evq.channel.clone(), node_config).await?;
|
||||
let event_chunker_conf = EventChunkerConf::new(ByteSize::kb(1024));
|
||||
let event_blobs = EventChunkerMultifile::new(
|
||||
range.clone(),
|
||||
channel_config.clone(),
|
||||
node_config.node.clone(),
|
||||
node_config.ix,
|
||||
evq.disk_io_buffer_size,
|
||||
event_chunker_conf,
|
||||
true,
|
||||
);
|
||||
let s = event_blobs.map(|item| Box::new(item) as Box<dyn Framable>);
|
||||
let pipe: Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>;
|
||||
pipe = Box::pin(s);
|
||||
let pipe = if true {
|
||||
let event_blobs = make_remote_event_blobs_stream(
|
||||
range.clone(),
|
||||
evq.channel.clone(),
|
||||
&entry,
|
||||
expand,
|
||||
event_chunker_conf,
|
||||
evq.disk_io_buffer_size,
|
||||
node_config,
|
||||
)?;
|
||||
let s = event_blobs.map(|item| Box::new(item) as Box<dyn Framable>);
|
||||
//let s = tracing_futures::Instrumented::instrument(s, tracing::info_span!("make_event_blobs_pipe"));
|
||||
let pipe: Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>;
|
||||
pipe = Box::pin(s);
|
||||
pipe
|
||||
} else {
|
||||
let event_blobs = make_local_event_blobs_stream(
|
||||
range.clone(),
|
||||
evq.channel.clone(),
|
||||
&entry,
|
||||
expand,
|
||||
event_chunker_conf,
|
||||
evq.disk_io_buffer_size,
|
||||
node_config,
|
||||
)?;
|
||||
let s = event_blobs.map(|item| Box::new(item) as Box<dyn Framable>);
|
||||
//let s = tracing_futures::Instrumented::instrument(s, tracing::info_span!("make_event_blobs_pipe"));
|
||||
let pipe: Pin<Box<dyn Stream<Item = Box<dyn Framable>> + Send>>;
|
||||
pipe = Box::pin(s);
|
||||
pipe
|
||||
};
|
||||
Ok(pipe)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user