Simplify and fix channel search query parse

This commit is contained in:
Dominik Werder
2021-06-22 16:57:51 +02:00
parent 5219b56488
commit d0318a17c2
6 changed files with 33 additions and 254 deletions

View File

@@ -4,22 +4,14 @@ use err::Error;
use http::{Method, StatusCode};
use hyper::{Body, Client, Request, Response};
use itertools::Itertools;
use netpod::log::*;
use netpod::{ChannelSearchQuery, ChannelSearchResult, ProxyBackend, ProxyConfig, APP_JSON};
use netpod::{ChannelSearchQuery, ChannelSearchResult, ProxyConfig, APP_JSON};
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use std::future::Future;
use std::pin::Pin;
use std::time::Duration;
use tokio::time::timeout_at;
use url::Url;
fn get_live_hosts() -> &'static [(&'static str, u16)] {
// TODO take from config.
err::todo();
&[]
}
pub trait BackendAware {
fn backend(&self) -> &str;
}
@@ -272,101 +264,6 @@ pub async fn channel_search_configs_v1(
}
}
type TT0 = (ProxyBackend, http::response::Parts, hyper::body::Bytes);
type TT1 = Result<TT0, Error>;
type TT2 = tokio::task::JoinHandle<TT1>;
type TT3 = Result<TT1, tokio::task::JoinError>;
type TT4 = Result<TT3, tokio::time::error::Elapsed>;
type TT7 = Pin<Box<dyn Future<Output = TT4> + Send>>;
type TT8 = (String, TT7);
// TODO try to get rid of this.
fn subreq(
backends_req: &[&str],
endp: &str,
subq_maker: &dyn Fn(&str) -> JsonValue,
proxy_config: &ProxyConfig,
) -> Result<Vec<TT8>, Error> {
let backends = proxy_config.backends.clone();
let mut spawned = vec![];
for back in &backends {
if backends_req.contains(&back.name.as_str()) {
let back = back.clone();
let q = subq_maker(&back.name);
let endp = match back.name.as_str() {
"timeout" => "channels_timeout",
"error500" => "channels_error500",
_ => endp,
};
let uri = format!("http://{}:{}{}/{}", back.host, back.port, "/api/1", endp);
let req = Request::builder()
.method(Method::POST)
.uri(uri)
.header("content-type", "application/json")
.body(Body::from(serde_json::to_string(&q)?))?;
let jh: TT2 = tokio::spawn({
let back = back.clone();
async move {
let res = Client::new().request(req).await?;
let (pre, body) = res.into_parts();
//info!("Answer from {} status {}", back.1, pre.status);
let body_all = hyper::body::to_bytes(body).await?;
//info!("Got {} bytes from {}", body_all.len(), back.1);
Ok::<_, Error>((back, pre, body_all))
}
});
let jh = tokio::time::timeout(std::time::Duration::from_millis(5000), jh);
let bx: TT7 = Box::pin(jh);
spawned.push((back.name.clone(), bx));
}
}
Ok(spawned)
}
//fn extr<'a, T: BackendAware + FromErrorCode + Deserialize<'a>>(results: Vec<(&str, TT4)>) -> Vec<T> {
fn extr<T: BackendAware + FromErrorCode + for<'a> Deserialize<'a>>(results: Vec<(String, TT4)>) -> Vec<T> {
let mut ret = vec![];
for (backend, r) in results {
if let Ok(r20) = r {
if let Ok(r30) = r20 {
if let Ok(r2) = r30 {
if r2.1.status == 200 {
let inp_res: Result<Vec<T>, _> = serde_json::from_slice(&r2.2);
if let Ok(inp) = inp_res {
if inp.len() > 1 {
error!("more than one result item from {:?}", r2.0);
} else {
for inp2 in inp {
if inp2.backend() == r2.0.name {
ret.push(inp2);
}
}
}
} else {
error!("malformed answer from {:?}", r2.0);
ret.push(T::from_error_code(backend.as_str(), ErrorCode::Error));
}
} else {
error!("bad answer from {:?}", r2.0);
ret.push(T::from_error_code(backend.as_str(), ErrorCode::Error));
}
} else {
error!("bad answer from {:?}", r30);
ret.push(T::from_error_code(backend.as_str(), ErrorCode::Error));
}
} else {
error!("subrequest join handle error {:?}", r20);
ret.push(T::from_error_code(backend.as_str(), ErrorCode::Error));
}
} else {
error!("subrequest timeout {:?}", r);
ret.push(T::from_error_code(backend.as_str(), ErrorCode::Timeout));
}
}
ret
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct ChannelConfigV1 {
pub backend: String,
@@ -423,109 +320,7 @@ impl FromErrorCode for ChannelBackendConfigsV1 {
}
}
pub async fn channels_config_v1(req: Request<Body>, proxy_config: &ProxyConfig) -> Result<Response<Body>, Error> {
let reqbody = req.into_body();
let bodyslice = hyper::body::to_bytes(reqbody).await?;
let query: ChannelConfigsQueryV1 = serde_json::from_slice(&bodyslice)?;
let subq_maker = |backend: &str| -> JsonValue {
serde_json::to_value(ChannelConfigsQueryV1 {
regex: query.regex.clone(),
source_regex: query.source_regex.clone(),
description_regex: query.description_regex.clone(),
backends: vec![backend.into()],
ordering: query.ordering.clone(),
})
.unwrap()
};
let back2: Vec<_> = query.backends.iter().map(|x| x.as_str()).collect();
let spawned = subreq(&back2[..], "channels/config", &subq_maker, proxy_config)?;
let mut res = vec![];
for (backend, s) in spawned {
res.push((backend, s.await));
}
let res2 = ChannelConfigsResponseV1(extr(res));
let body = serde_json::to_string(&res2.0)?;
let res = response(StatusCode::OK).body(body.into())?;
Ok(res)
}
pub async fn gather_json_v1(req_m: Request<Body>, path: &str) -> Result<Response<Body>, Error> {
// TODO can this be removed?
err::todo();
let mut spawned = vec![];
let (req_h, _) = req_m.into_parts();
for host in get_live_hosts() {
for inst in &["00", "01", "02"] {
let req_hh = req_h.headers.clone();
let host_filter = if req_hh.contains_key("host_filter") {
Some(req_hh.get("host_filter").unwrap().to_str().unwrap())
} else {
None
};
let path = path.to_string();
let task = if host_filter.is_none() || host_filter.as_ref().unwrap() == &host.0 {
let task = (
host.clone(),
inst.to_string(),
tokio::spawn(async move {
let uri = format!("http://{}:{}{}", host.0, host.1, path);
let req = Request::builder().method(Method::GET).uri(uri);
let req = if false && req_hh.contains_key("retrieval_instance") {
req.header("retrieval_instance", req_hh.get("retrieval_instance").unwrap())
} else {
req
};
let req = req.header("retrieval_instance", *inst);
//.header("content-type", "application/json")
//.body(Body::from(serde_json::to_string(&q)?))?;
let req = req.body(Body::empty())?;
let deadline = tokio::time::Instant::now() + Duration::from_millis(1000);
let fut = async {
let res = Client::new().request(req).await?;
let (pre, body) = res.into_parts();
if pre.status != StatusCode::OK {
Err(Error::with_msg(format!("request failed, got {}", pre.status)))
} else {
// aggregate returns a hyper Buf which is not Read
let body_all = hyper::body::to_bytes(body).await?;
let val = match serde_json::from_slice(&body_all) {
Ok(k) => k,
Err(_e) => JsonValue::String(String::from_utf8(body_all.to_vec())?),
};
Ok(val)
}
};
let ret = timeout_at(deadline, fut).await??;
Ok::<_, Error>(ret)
}),
);
Some(task)
} else {
None
};
if let Some(task) = task {
spawned.push(task);
}
}
}
use serde_json::Map;
let mut m = Map::new();
for h in spawned {
let res = match h.2.await {
Ok(k) => match k {
Ok(k) => k,
Err(_e) => JsonValue::String(format!("ERROR")),
},
Err(_e) => JsonValue::String(format!("ERROR")),
};
m.insert(format!("{}:{}-{}", h.0 .0, h.0 .1, h.1), res);
}
let res = response(200)
.header("Content-Type", "application/json")
.body(serde_json::to_string(&m)?.into())?;
Ok(res)
}
// TODO replace usage of this by gather-generic
pub async fn gather_json_2_v1(
req: Request<Body>,
pathpre: &str,
@@ -546,7 +341,7 @@ pub async fn gather_json_2_v1(
} else {
req
};
let req = req.header(http::header::ACCEPT, "application/json");
let req = req.header(http::header::ACCEPT, APP_JSON);
//.body(Body::from(serde_json::to_string(&q)?))?;
let req = req.body(Body::empty());
let task = tokio::spawn(async move {
@@ -581,7 +376,7 @@ pub async fn gather_json_2_v1(
a.push(Hres { gh: tr.0, res });
}
let res = response(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.header(http::header::CONTENT_TYPE, APP_JSON)
.body(serde_json::to_string(&Jres { hosts: a })?.into())?;
Ok(res)
}

View File

@@ -13,7 +13,7 @@ use hyper::service::{make_service_fn, service_fn};
use hyper::{server::Server, Body, Request, Response};
use net::SocketAddr;
use netpod::log::*;
use netpod::{AggKind, Channel, FromUrl, NodeConfigCached};
use netpod::{AggKind, Channel, FromUrl, NodeConfigCached, APP_JSON, APP_OCTET};
use panic::{AssertUnwindSafe, UnwindSafe};
use pin::Pin;
use serde::{Deserialize, Serialize};
@@ -335,9 +335,9 @@ async fn binned(req: Request<Body>, node_config: &NodeConfigCached) -> Result<Re
let (head, _body) = req.into_parts();
let url = Url::parse(&format!("dummy:{}", head.uri))?;
let query = BinnedQuery::from_url(&url)?;
match head.headers.get("accept") {
Some(v) if v == "application/octet-stream" => binned_binary(query, node_config).await,
Some(v) if v == "application/json" => binned_json(query, node_config).await,
match head.headers.get(http::header::ACCEPT) {
Some(v) if v == APP_OCTET => binned_binary(query, node_config).await,
Some(v) if v == APP_JSON => binned_json(query, node_config).await,
_ => Ok(response(StatusCode::NOT_ACCEPTABLE).body(Body::empty())?),
}
}
@@ -405,11 +405,11 @@ async fn plain_events(req: Request<Body>, node_config: &NodeConfigCached) -> Res
let accept_def = "";
let accept = req
.headers()
.get("Accept")
.get(http::header::ACCEPT)
.map_or(accept_def, |k| k.to_str().unwrap_or(accept_def));
if accept == "application/json" {
if accept == APP_JSON {
Ok(plain_events_json(req, node_config).await?)
} else if accept == "application/octet-stream" {
} else if accept == APP_OCTET {
Ok(plain_events_binary(req, node_config).await?)
} else {
Err(Error::with_msg(format!("unexpected Accept: {:?}", accept)))
@@ -482,7 +482,7 @@ pub async fn clear_cache_all(req: Request<Body>, node_config: &NodeConfigCached)
};
let res = disk::cache::clear_cache_all(node_config, dry).await?;
let ret = response(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.header(http::header::CONTENT_TYPE, APP_JSON)
.body(Body::from(serde_json::to_string(&res)?))?;
Ok(ret)
}
@@ -563,7 +563,7 @@ pub async fn update_search_cache(req: Request<Body>, node_config: &NodeConfigCac
};
let res = dbconn::scan::update_search_cache(node_config).await?;
let ret = response(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.header(http::header::CONTENT_TYPE, APP_JSON)
.body(Body::from(serde_json::to_string(&res)?))?;
Ok(ret)
}
@@ -581,7 +581,7 @@ pub async fn channel_config(req: Request<Body>, node_config: &NodeConfigCached)
};
let res = parse::channelconfig::read_local_config(&channel, &node_config.node).await?;
let ret = response(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.header(http::header::CONTENT_TYPE, APP_JSON)
.body(Body::from(serde_json::to_string(&res)?))?;
Ok(ret)
}

View File

@@ -1,7 +1,4 @@
use crate::api1::{
channel_search_configs_v1, channel_search_list_v1, channels_config_v1, gather_json_2_v1, gather_json_v1,
proxy_distribute_v1,
};
use crate::api1::{channel_search_configs_v1, channel_search_list_v1, gather_json_2_v1, proxy_distribute_v1};
use crate::gather::{gather_get_json_generic, SubRes};
use crate::{response, Cont};
use disk::binned::query::BinnedQuery;
@@ -60,13 +57,11 @@ async fn proxy_http_service_try(req: Request<Body>, proxy_config: &ProxyConfig)
if path == "/api/1/channels" {
Ok(channel_search_list_v1(req, proxy_config).await?)
} else if path == "/api/1/channels/config" {
Ok(channels_config_v1(req, proxy_config).await?)
} else if path == "/api/1/channelsA/config" {
Ok(channel_search_configs_v1(req, proxy_config).await?)
} else if path == "/api/1/stats/version" {
Ok(gather_json_v1(req, "/stats/version").await?)
Err(Error::with_msg("todo"))
} else if path == "/api/1/stats/" {
Ok(gather_json_v1(req, path).await?)
Err(Error::with_msg("todo"))
} else if path.starts_with("/api/1/gather/") {
Ok(gather_json_2_v1(req, "/api/1/gather/", proxy_config).await?)
} else if path == "/api/4/backends" {
@@ -105,10 +100,11 @@ pub async fn backends(_req: Request<Body>, proxy_config: &ProxyConfig) -> Result
pub async fn channel_search(req: Request<Body>, proxy_config: &ProxyConfig) -> Result<Response<Body>, Error> {
let (head, _body) = req.into_parts();
match head.headers.get("accept") {
match head.headers.get(http::header::ACCEPT) {
Some(v) => {
if v == "application/json" {
let query = ChannelSearchQuery::from_query_string(head.uri.query())?;
if v == APP_JSON {
let url = Url::parse(&format!("dummy:{}", head.uri))?;
let query = ChannelSearchQuery::from_url(&url)?;
let urls = proxy_config
.search_hosts
.iter()
@@ -145,7 +141,7 @@ pub async fn channel_search(req: Request<Body>, proxy_config: &ProxyConfig) -> R
}
let res = ChannelSearchResult { channels: res };
let res = response(StatusCode::OK)
.header(http::header::CONTENT_TYPE, "application/json")
.header(http::header::CONTENT_TYPE, APP_JSON)
.body(Body::from(serde_json::to_string(&res)?))?;
Ok(res)
};
@@ -169,9 +165,9 @@ where
QT: FromUrl + HasBackend + AppendToUrl + HasTimeout,
{
let (head, _body) = req.into_parts();
match head.headers.get("accept") {
match head.headers.get(http::header::ACCEPT) {
Some(v) => {
if v == "application/json" {
if v == APP_JSON {
let url = Url::parse(&format!("dummy:{}", head.uri))?;
let query = QT::from_url(&url)?;
let sh = get_query_host_for_backend(&query.backend(), proxy_config)?;

View File

@@ -1,6 +1,7 @@
use crate::response;
use err::Error;
use hyper::{Body, Request, Response, StatusCode};
use netpod::log::*;
use netpod::{ChannelSearchQuery, NodeConfigCached};
use url::Url;
@@ -9,10 +10,10 @@ pub async fn channel_search(req: Request<Body>, node_config: &NodeConfigCached)
match head.headers.get("accept") {
Some(v) if v == "application/json" => {
let s1 = format!("dummy:{}", head.uri);
//netpod::log::info!("try to parse {}", s1);
info!("try to parse {:?}", s1);
let url = Url::parse(&s1)?;
let query = ChannelSearchQuery::from_url(&url)?;
//let query = ChannelSearchQuery::from_query_string(head.uri.query())?;
info!("search query: {:?}", query);
let res = dbconn::search::search_channel(query, node_config).await?;
let body = Body::from(serde_json::to_string(&res)?);
let ret = super::response(StatusCode::OK).body(body)?;

View File

@@ -19,7 +19,9 @@
<h2>Available backends</h2>
Currently available:
<ul>
<li>sf-databuffer</li>
<li>sf-databuffer</li>
<li>hipa-archive</li>
<li>gls-archive</li>
</ul>