Proxy events and binned and get it serve sf-databuffer and hipa-archive

This commit is contained in:
Dominik Werder
2021-06-21 21:45:40 +02:00
parent e7891fee13
commit c8c81470fc
16 changed files with 354 additions and 172 deletions

View File

@@ -11,11 +11,13 @@ http = "0.2"
url = "2.2"
tokio = { version = "1.5.0", features = ["rt-multi-thread", "io-util", "net", "time", "sync", "fs"] }
hyper = { version = "0.14", features = ["http1", "http2", "client", "server", "tcp", "stream"] }
hyper-tls = { version="0.5.0" }
bytes = "1.0.1"
futures-core = "0.3.14"
futures-util = "0.3.14"
tracing = "0.1.25"
async-channel = "1.6"
itertools = "0.10.1"
err = { path = "../err" }
netpod = { path = "../netpod" }
dbconn = { path = "../dbconn" }

View File

@@ -3,6 +3,7 @@ use err::Error;
use futures_util::{select, FutureExt};
use http::{Method, StatusCode};
use hyper::{Body, Client, Request, Response};
use hyper_tls::HttpsConnector;
use netpod::{Node, NodeConfigCached};
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
@@ -176,7 +177,9 @@ where
let spawned: Vec<_> = urls
.into_iter()
.map(move |url| {
let req = Request::builder().method(method.clone()).uri(url.as_str());
let url_str = url.as_str();
let is_tls = if url_str.starts_with("https://") { true } else { false };
let req = Request::builder().method(method.clone()).uri(url_str);
//let req = req.header("x-log-from-node-name", format!("{}", node_config.node_config.name));
let req = req.header(http::header::ACCEPT, "application/json");
let req = req.body(Body::empty());
@@ -185,7 +188,17 @@ where
_ = sleep(timeout).fuse() => {
Err(Error::with_msg("timeout"))
}
res = Client::new().request(req?).fuse() => Ok(nt(res?).await?)
res = {
if is_tls {
let https = HttpsConnector::new();
let client = Client::builder().build::<_, hyper::Body>(https);
client.request(req?).fuse()
}
else {
let client = Client::new();
client.request(req?).fuse()
}
} => Ok(nt(res?).await?)
}
});
(url, task)

View File

@@ -2,7 +2,7 @@ use crate::gather::gather_get_json;
use bytes::Bytes;
use disk::binned::prebinned::pre_binned_bytes_for_http;
use disk::binned::query::{BinnedQuery, PreBinnedQuery};
use disk::events::{PlainEventsJsonQuery, PlainEventsQuery};
use disk::events::{PlainEventsBinaryQuery, PlainEventsJsonQuery};
use disk::raw::conn::events_service;
use err::Error;
use future::Future;
@@ -13,7 +13,7 @@ use hyper::service::{make_service_fn, service_fn};
use hyper::{server::Server, Body, Request, Response};
use net::SocketAddr;
use netpod::log::*;
use netpod::{AggKind, Channel, NodeConfigCached};
use netpod::{AggKind, Channel, FromUrl, NodeConfigCached};
use panic::{AssertUnwindSafe, UnwindSafe};
use pin::Pin;
use serde::{Deserialize, Serialize};
@@ -21,6 +21,7 @@ use std::{future, net, panic, pin, task};
use task::{Context, Poll};
use tracing::field::Empty;
use tracing::Instrument;
use url::Url;
pub mod api1;
pub mod gather;
@@ -150,15 +151,9 @@ async fn http_service_try(req: Request<Body>, node_config: &NodeConfigCached) ->
} else {
Ok(response(StatusCode::METHOD_NOT_ALLOWED).body(Body::empty())?)
}
} else if path == "/api/4/table_sizes" {
} else if path == "/api/4/events" {
if req.method() == Method::GET {
Ok(table_sizes(req, &node_config).await?)
} else {
Ok(response(StatusCode::METHOD_NOT_ALLOWED).body(Body::empty())?)
}
} else if path == "/api/4/random_channel" {
if req.method() == Method::GET {
Ok(random_channel(req, &node_config).await?)
Ok(plain_events(req, &node_config).await?)
} else {
Ok(response(StatusCode::METHOD_NOT_ALLOWED).body(Body::empty())?)
}
@@ -174,9 +169,15 @@ async fn http_service_try(req: Request<Body>, node_config: &NodeConfigCached) ->
} else {
Ok(response(StatusCode::METHOD_NOT_ALLOWED).body(Body::empty())?)
}
} else if path == "/api/4/events" {
} else if path == "/api/4/table_sizes" {
if req.method() == Method::GET {
Ok(plain_events(req, &node_config).await?)
Ok(table_sizes(req, &node_config).await?)
} else {
Ok(response(StatusCode::METHOD_NOT_ALLOWED).body(Body::empty())?)
}
} else if path == "/api/4/random_channel" {
if req.method() == Method::GET {
Ok(random_channel(req, &node_config).await?)
} else {
Ok(response(StatusCode::METHOD_NOT_ALLOWED).body(Body::empty())?)
}
@@ -331,7 +332,8 @@ where
async fn binned(req: Request<Body>, node_config: &NodeConfigCached) -> Result<Response<Body>, Error> {
let (head, _body) = req.into_parts();
let query = BinnedQuery::from_request(&head)?;
let url = Url::parse(&format!("dummy:{}", head.uri))?;
let query = BinnedQuery::from_url(&url)?;
match head.headers.get("accept") {
Some(v) if v == "application/octet-stream" => binned_binary(query, node_config).await,
Some(v) if v == "application/json" => binned_json(query, node_config).await,
@@ -398,6 +400,7 @@ async fn prebinned(req: Request<Body>, node_config: &NodeConfigCached) -> Result
}
async fn plain_events(req: Request<Body>, node_config: &NodeConfigCached) -> Result<Response<Body>, Error> {
info!("httpret plain_events headers: {:?}", req.headers());
let accept_def = "";
let accept = req
.headers()
@@ -414,7 +417,7 @@ async fn plain_events(req: Request<Body>, node_config: &NodeConfigCached) -> Res
async fn plain_events_binary(req: Request<Body>, node_config: &NodeConfigCached) -> Result<Response<Body>, Error> {
let (head, _body) = req.into_parts();
let query = PlainEventsQuery::from_request(&head)?;
let query = PlainEventsBinaryQuery::from_request(&head)?;
let op = disk::channelexec::PlainEvents::new(query.channel().clone(), query.range().clone(), node_config.clone());
let s = disk::channelexec::channel_exec(op, query.channel(), query.range(), AggKind::Plain, node_config).await?;
let s = s.map(|item| item.make_frame());
@@ -424,7 +427,7 @@ async fn plain_events_binary(req: Request<Body>, node_config: &NodeConfigCached)
async fn plain_events_json(req: Request<Body>, node_config: &NodeConfigCached) -> Result<Response<Body>, Error> {
let (head, _body) = req.into_parts();
let query = PlainEventsJsonQuery::from_request(&head)?;
let query = PlainEventsJsonQuery::from_request_head(&head)?;
let op = disk::channelexec::PlainEventsJson::new(
query.channel().clone(),
query.range().clone(),

View File

@@ -1,12 +1,20 @@
use crate::api1::{channels_config_v1, channels_list_v1, gather_json_2_v1, gather_json_v1, proxy_distribute_v1};
use crate::gather::gather_get_json_generic;
use crate::{proxy_mark, response, Cont};
use disk::binned::query::BinnedQuery;
use disk::events::PlainEventsJsonQuery;
use err::Error;
use http::{HeaderValue, StatusCode};
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Request, Response, Server};
use itertools::Itertools;
use netpod::log::*;
use netpod::{ChannelSearchQuery, ChannelSearchResult, ProxyConfig};
use netpod::{
AppendToUrl, ChannelConfigQuery, ChannelSearchQuery, ChannelSearchResult, FromUrl, HasBackend, HasTimeout,
ProxyConfig,
};
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use std::future::Future;
use std::net::SocketAddr;
use std::pin::Pin;
@@ -52,12 +60,20 @@ async fn proxy_http_service_try(req: Request<Body>, proxy_config: &ProxyConfig)
Ok(channels_config_v1(req, proxy_config).await?)
} else if path == "/api/1/stats/version" {
Ok(gather_json_v1(req, "/stats/version").await?)
} else if path.starts_with("/api/1/stats/") {
} else if path == "/api/1/stats/" {
Ok(gather_json_v1(req, path).await?)
} else if path.starts_with("/api/1/gather/") {
Ok(gather_json_2_v1(req, "/api/1/gather/", proxy_config).await?)
} else if path.starts_with("/api/4/search/channel") {
} else if path == "/api/4/backends" {
Ok(backends(req, proxy_config).await?)
} else if path == "/api/4/search/channel" {
Ok(channel_search(req, proxy_config).await?)
} else if path == "/api/4/events" {
Ok(proxy_single_backend_query::<PlainEventsJsonQuery>(req, proxy_config).await?)
} else if path == "/api/4/binned" {
Ok(proxy_single_backend_query::<BinnedQuery>(req, proxy_config).await?)
} else if path == "/api/4/channel/config" {
Ok(proxy_single_backend_query::<ChannelConfigQuery>(req, proxy_config).await?)
} else if path.starts_with("/distribute") {
proxy_distribute_v1(req).await
} else {
@@ -70,13 +86,25 @@ async fn proxy_http_service_try(req: Request<Body>, proxy_config: &ProxyConfig)
}
}
#[derive(Serialize, Deserialize)]
pub struct BackendsResponse {
backends: Vec<String>,
}
pub async fn backends(_req: Request<Body>, proxy_config: &ProxyConfig) -> Result<Response<Body>, Error> {
let backends: Vec<_> = proxy_config.backends.iter().map(|k| k.name.to_string()).collect();
let res = BackendsResponse { backends };
let ret = response(StatusCode::OK).body(Body::from(serde_json::to_vec(&res)?))?;
Ok(ret)
}
pub async fn channel_search(req: Request<Body>, proxy_config: &ProxyConfig) -> Result<Response<Body>, Error> {
let (head, _body) = req.into_parts();
match head.headers.get("accept") {
Some(v) => {
if v == "application/json" {
let query = ChannelSearchQuery::from_query_string(head.uri.query())?;
let urls: Vec<Result<Url, Error>> = proxy_config
let urls = proxy_config
.search_hosts
.iter()
.map(|sh| match Url::parse(&format!("{}/api/4/search/channel", sh)) {
@@ -86,18 +114,10 @@ pub async fn channel_search(req: Request<Body>, proxy_config: &ProxyConfig) -> R
}
Err(_e) => Err(Error::with_msg(format!("parse error for: {:?}", sh))),
})
.collect();
for u in &urls {
match u {
Ok(url) => {
info!("URL: {}", url.as_str());
}
Err(_) => {
return Err(Error::with_msg("url parse error"));
}
}
}
let urls: Vec<_> = urls.into_iter().map(Result::unwrap).collect();
.fold_ok(vec![], |mut a, x| {
a.push(x);
a
})?;
let nt = |res| {
let fut = async {
let body = hyper::body::to_bytes(res).await?;
@@ -136,62 +156,54 @@ pub async fn channel_search(req: Request<Body>, proxy_config: &ProxyConfig) -> R
}
}
pub async fn events(req: Request<Body>, proxy_config: &ProxyConfig) -> Result<Response<Body>, Error> {
pub async fn proxy_single_backend_query<QT>(
req: Request<Body>,
proxy_config: &ProxyConfig,
) -> Result<Response<Body>, Error>
where
QT: FromUrl + HasBackend + AppendToUrl + HasTimeout,
{
let (head, _body) = req.into_parts();
match head.headers.get("accept") {
Some(v) => {
if v == "application/json" {
Url::parse(&format!("{}", head.uri))?;
let query = ChannelSearchQuery::from_query_string(head.uri.query())?;
let urls: Vec<Result<Url, Error>> = proxy_config
.search_hosts
let url = Url::parse(&format!("dummy:{}", head.uri))?;
let query = QT::from_url(&url)?;
let sh = get_query_host_for_backend(&query.backend(), proxy_config)?;
let urls = [sh]
.iter()
.map(|sh| match Url::parse(&format!("{}/api/4/search/channel", sh)) {
.map(|sh| match Url::parse(&format!("{}{}", sh, head.uri.path())) {
Ok(mut url) => {
query.append_to_url(&mut url);
Ok(url)
}
Err(_e) => Err(Error::with_msg(format!("parse error for: {:?}", sh))),
Err(e) => Err(Error::with_msg(format!("parse error for: {:?} {:?}", sh, e))),
})
.collect();
for u in &urls {
match u {
Ok(url) => {
info!("URL: {}", url.as_str());
}
Err(_) => {
return Err(Error::with_msg("url parse error"));
}
}
}
let urls: Vec<_> = urls.into_iter().map(Result::unwrap).collect();
.fold_ok(vec![], |mut a, x| {
a.push(x);
a
})?;
let nt = |res| {
let fut = async {
let body = hyper::body::to_bytes(res).await?;
info!("got a result {:?}", body);
let res: ChannelSearchResult = match serde_json::from_slice(&body) {
Ok(k) => k,
Err(_) => ChannelSearchResult { channels: vec![] },
};
Ok(res)
match serde_json::from_slice::<JsonValue>(&body) {
Ok(k) => Ok(k),
Err(e) => Err(e.into()),
}
};
Box::pin(fut) as Pin<Box<dyn Future<Output = _> + Send>>
};
let ft = |all: Vec<ChannelSearchResult>| {
let mut res = vec![];
for j in all {
for k in j.channels {
res.push(k);
}
}
let res = ChannelSearchResult { channels: res };
let ft = |all: Vec<JsonValue>| {
let res = match all.first() {
Some(item) => Ok(item),
None => Err(Error::with_msg("no response from upstream")),
}?;
let res = response(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.body(Body::from(serde_json::to_string(&res)?))?;
.body(Body::from(serde_json::to_string(res)?))?;
Ok(res)
};
let mut ret =
gather_get_json_generic(http::Method::GET, urls, nt, ft, Duration::from_millis(3000)).await?;
let mut ret = gather_get_json_generic(http::Method::GET, urls, nt, ft, query.timeout()).await?;
ret.headers_mut()
.append("x-proxy-log-mark", HeaderValue::from_str(proxy_mark())?);
Ok(ret)
@@ -202,3 +214,12 @@ pub async fn events(req: Request<Body>, proxy_config: &ProxyConfig) -> Result<Re
None => Ok(response(StatusCode::NOT_ACCEPTABLE).body(Body::empty())?),
}
}
fn get_query_host_for_backend(backend: &str, proxy_config: &ProxyConfig) -> Result<String, Error> {
for back in &proxy_config.backends {
if back.name == backend {
return Ok(back.url.clone());
}
}
return Err(Error::with_msg(format!("host not found for backend {:?}", backend)));
}