forked from Deuxfleurs/tricot
refactor configuration to support redirects
This commit is contained in:
parent
14325395f6
commit
de72d6037f
4 changed files with 238 additions and 89 deletions
|
@ -78,7 +78,7 @@ impl CertStore {
|
||||||
|
|
||||||
let proxy_config: Arc<ProxyConfig> = rx_proxy_config.borrow().clone();
|
let proxy_config: Arc<ProxyConfig> = rx_proxy_config.borrow().clone();
|
||||||
for ent in proxy_config.entries.iter() {
|
for ent in proxy_config.entries.iter() {
|
||||||
if let HostDescription::Hostname(domain) = &ent.host {
|
if let HostDescription::Hostname(domain) = &ent.url_prefix.host {
|
||||||
if let Some((host, _port)) = domain.split_once(':') {
|
if let Some((host, _port)) = domain.split_once(':') {
|
||||||
domains.insert(host.to_string());
|
domains.insert(host.to_string());
|
||||||
} else {
|
} else {
|
||||||
|
@ -121,7 +121,7 @@ impl CertStore {
|
||||||
.borrow()
|
.borrow()
|
||||||
.entries
|
.entries
|
||||||
.iter()
|
.iter()
|
||||||
.any(|ent| ent.host.matches(domain))
|
.any(|ent| ent.url_prefix.host.matches(domain))
|
||||||
{
|
{
|
||||||
bail!("Domain {} should not have a TLS certificate.", domain);
|
bail!("Domain {} should not have a TLS certificate.", domain);
|
||||||
}
|
}
|
||||||
|
|
|
@ -234,8 +234,9 @@ async fn select_target_and_proxy(
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|ent| {
|
.filter(|ent| {
|
||||||
ent.flags.healthy
|
ent.flags.healthy
|
||||||
&& ent.host.matches(host)
|
&& ent.url_prefix.host.matches(host)
|
||||||
&& ent
|
&& ent
|
||||||
|
.url_prefix
|
||||||
.path_prefix
|
.path_prefix
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|prefix| path.starts_with(prefix))
|
.map(|prefix| path.starts_with(prefix))
|
||||||
|
@ -244,7 +245,8 @@ async fn select_target_and_proxy(
|
||||||
.max_by_key(|ent| {
|
.max_by_key(|ent| {
|
||||||
(
|
(
|
||||||
ent.priority,
|
ent.priority,
|
||||||
ent.path_prefix
|
ent.url_prefix
|
||||||
|
.path_prefix
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|x| x.len() as i32)
|
.map(|x| x.len() as i32)
|
||||||
.unwrap_or(0),
|
.unwrap_or(0),
|
||||||
|
|
|
@ -239,7 +239,7 @@ async fn dump_config_on_change(
|
||||||
let mut cfg_map = BTreeMap::<_, Vec<_>>::new();
|
let mut cfg_map = BTreeMap::<_, Vec<_>>::new();
|
||||||
for ent in cfg.entries.iter() {
|
for ent in cfg.entries.iter() {
|
||||||
cfg_map
|
cfg_map
|
||||||
.entry((&ent.host, &ent.path_prefix))
|
.entry((&ent.url_prefix.host, &ent.url_prefix.path_prefix))
|
||||||
.or_default()
|
.or_default()
|
||||||
.push(ent);
|
.push(ent);
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::consul;
|
||||||
|
|
||||||
// ---- Extract proxy config from Consul catalog ----
|
// ---- Extract proxy config from Consul catalog ----
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub enum HostDescription {
|
pub enum HostDescription {
|
||||||
Hostname(String),
|
Hostname(String),
|
||||||
Pattern(glob::Pattern),
|
Pattern(glob::Pattern),
|
||||||
|
@ -45,12 +45,48 @@ impl std::fmt::Display for HostDescription {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ProxyEntry {
|
pub struct UrlPrefix {
|
||||||
/// Publicly exposed TLS hostnames for matching this rule
|
/// Publicly exposed TLS hostnames for matching this rule
|
||||||
pub host: HostDescription,
|
pub host: HostDescription,
|
||||||
|
|
||||||
/// Path prefix for matching this rule
|
/// Path prefix for matching this rule
|
||||||
pub path_prefix: Option<String>,
|
pub path_prefix: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for UrlPrefix {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.host == other.host && self.path_prefix == other.path_prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Eq for UrlPrefix {}
|
||||||
|
|
||||||
|
impl UrlPrefix {
|
||||||
|
fn new(raw_prefix: &str) -> Option<Self> {
|
||||||
|
let (raw_host, path_prefix) = match raw_prefix.find('/') {
|
||||||
|
Some(i) => {
|
||||||
|
let (host, pp) = raw_prefix.split_at(i);
|
||||||
|
(host, Some(pp.to_string()))
|
||||||
|
}
|
||||||
|
None => (raw_prefix, None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let host = match HostDescription::new(raw_host) {
|
||||||
|
Ok(h) => h,
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Invalid hostname pattern {}: {}", raw_host, e);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(Self { host, path_prefix })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ProxyEntry {
|
||||||
|
/// An Url prefix is made of a host and maybe a path prefix
|
||||||
|
pub url_prefix: UrlPrefix,
|
||||||
/// Priority with which this rule is considered (highest first)
|
/// Priority with which this rule is considered (highest first)
|
||||||
pub priority: u32,
|
pub priority: u32,
|
||||||
|
|
||||||
|
@ -68,6 +104,10 @@ pub struct ProxyEntry {
|
||||||
/// when matching this rule
|
/// when matching this rule
|
||||||
pub add_headers: Vec<(String, String)>,
|
pub add_headers: Vec<(String, String)>,
|
||||||
|
|
||||||
|
/// Try to match all these redirection before forwarding to the backend
|
||||||
|
/// when matching this rule
|
||||||
|
pub redirects: Vec<(UrlPrefix, UrlPrefix, u32)>,
|
||||||
|
|
||||||
/// Number of calls in progress, used to deprioritize slow back-ends
|
/// Number of calls in progress, used to deprioritize slow back-ends
|
||||||
pub calls_in_progress: atomic::AtomicI64,
|
pub calls_in_progress: atomic::AtomicI64,
|
||||||
/// Time of last call, used for round-robin selection
|
/// Time of last call, used for round-robin selection
|
||||||
|
@ -76,8 +116,7 @@ pub struct ProxyEntry {
|
||||||
|
|
||||||
impl PartialEq for ProxyEntry {
|
impl PartialEq for ProxyEntry {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.host == other.host
|
self.url_prefix == other.url_prefix
|
||||||
&& self.path_prefix == other.path_prefix
|
|
||||||
&& self.priority == other.priority
|
&& self.priority == other.priority
|
||||||
&& self.service_name == other.service_name
|
&& self.service_name == other.service_name
|
||||||
&& self.target_addr == other.target_addr
|
&& self.target_addr == other.target_addr
|
||||||
|
@ -88,6 +127,58 @@ impl PartialEq for ProxyEntry {
|
||||||
}
|
}
|
||||||
impl Eq for ProxyEntry {}
|
impl Eq for ProxyEntry {}
|
||||||
|
|
||||||
|
impl ProxyEntry {
|
||||||
|
fn new(
|
||||||
|
service_name: String,
|
||||||
|
frontend: MatchTag,
|
||||||
|
target_addr: SocketAddr,
|
||||||
|
middleware: &[ConfigTag],
|
||||||
|
flags: ProxyEntryFlags,
|
||||||
|
) -> Self {
|
||||||
|
let (url_prefix, priority, https_target) = match frontend {
|
||||||
|
MatchTag::Http(u, p) => (u, p, false),
|
||||||
|
MatchTag::HttpWithTls(u, p) => (u, p, true),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut add_headers = vec![];
|
||||||
|
let mut redirects = vec![];
|
||||||
|
for mid in middleware.into_iter() {
|
||||||
|
match mid {
|
||||||
|
ConfigTag::AddHeader(k, v) => add_headers.push((k.to_string(), v.to_string())),
|
||||||
|
ConfigTag::AddRedirect(m, r, c) => redirects.push(((*m).clone(), (*r).clone(), *c)),
|
||||||
|
ConfigTag::LocalLb =>
|
||||||
|
/* handled in parent fx */
|
||||||
|
{
|
||||||
|
()
|
||||||
|
}
|
||||||
|
ConfigTag::GlobalLb =>
|
||||||
|
/* handled in parent fx */
|
||||||
|
{
|
||||||
|
()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
ProxyEntry {
|
||||||
|
// id
|
||||||
|
service_name,
|
||||||
|
// frontend
|
||||||
|
url_prefix,
|
||||||
|
priority,
|
||||||
|
// backend
|
||||||
|
target_addr,
|
||||||
|
https_target,
|
||||||
|
// middleware
|
||||||
|
flags,
|
||||||
|
add_headers,
|
||||||
|
redirects,
|
||||||
|
// internal
|
||||||
|
last_call: atomic::AtomicI64::from(0),
|
||||||
|
calls_in_progress: atomic::AtomicI64::from(0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
|
||||||
pub struct ProxyEntryFlags {
|
pub struct ProxyEntryFlags {
|
||||||
/// Is the target healthy?
|
/// Is the target healthy?
|
||||||
|
@ -115,8 +206,8 @@ impl std::fmt::Display for ProxyEntry {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}{} {}",
|
"{}{} {}",
|
||||||
self.host,
|
self.url_prefix.host,
|
||||||
self.path_prefix.as_deref().unwrap_or_default(),
|
self.url_prefix.path_prefix.as_deref().unwrap_or_default(),
|
||||||
self.priority
|
self.priority
|
||||||
)?;
|
)?;
|
||||||
if !self.flags.healthy {
|
if !self.flags.healthy {
|
||||||
|
@ -144,62 +235,103 @@ pub struct ProxyConfig {
|
||||||
pub entries: Vec<ProxyEntry>,
|
pub entries: Vec<ProxyEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tricot_tag(
|
#[derive(Debug)]
|
||||||
service_name: String,
|
enum ParsedTag<'a> {
|
||||||
tag: &str,
|
Frontend(MatchTag),
|
||||||
target_addr: SocketAddr,
|
Middleware(ConfigTag<'a>),
|
||||||
add_headers: &[(String, String)],
|
|
||||||
flags: ProxyEntryFlags,
|
|
||||||
) -> Option<ProxyEntry> {
|
|
||||||
let splits = tag.split(' ').collect::<Vec<_>>();
|
|
||||||
if (splits.len() != 2 && splits.len() != 3)
|
|
||||||
|| (splits[0] != "tricot" && splits[0] != "tricot-https")
|
|
||||||
{
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (host, path_prefix) = match splits[1].find('/') {
|
|
||||||
Some(i) => {
|
|
||||||
let (host, pp) = splits[1].split_at(i);
|
|
||||||
(host, Some(pp.to_string()))
|
|
||||||
}
|
|
||||||
None => (splits[1], None),
|
|
||||||
};
|
|
||||||
|
|
||||||
let priority = match splits.len() {
|
|
||||||
3 => splits[2].parse().ok()?,
|
|
||||||
_ => 100,
|
|
||||||
};
|
|
||||||
|
|
||||||
let host = match HostDescription::new(host) {
|
|
||||||
Ok(h) => h,
|
|
||||||
Err(e) => {
|
|
||||||
warn!("Invalid hostname pattern {}: {}", host, e);
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(ProxyEntry {
|
|
||||||
service_name,
|
|
||||||
target_addr,
|
|
||||||
https_target: (splits[0] == "tricot-https"),
|
|
||||||
host,
|
|
||||||
flags,
|
|
||||||
path_prefix,
|
|
||||||
priority,
|
|
||||||
add_headers: add_headers.to_vec(),
|
|
||||||
last_call: atomic::AtomicI64::from(0),
|
|
||||||
calls_in_progress: atomic::AtomicI64::from(0),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tricot_add_header_tag(tag: &str) -> Option<(String, String)> {
|
#[derive(Debug)]
|
||||||
let splits = tag.splitn(3, ' ').collect::<Vec<_>>();
|
enum MatchTag {
|
||||||
if splits.len() == 3 && splits[0] == "tricot-add-header" {
|
/// HTTP backend (plain text)
|
||||||
Some((splits[1].to_string(), splits[2].to_string()))
|
Http(UrlPrefix, u32),
|
||||||
} else {
|
/// HTTPS backend (TLS encrypted)
|
||||||
None
|
HttpWithTls(UrlPrefix, u32),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ConfigTag<'a> {
|
||||||
|
AddHeader(&'a str, &'a str),
|
||||||
|
AddRedirect(UrlPrefix, UrlPrefix, u32),
|
||||||
|
GlobalLb,
|
||||||
|
LocalLb,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_tricot_tags(tag: &str) -> Option<ParsedTag> {
|
||||||
|
let splits = tag.splitn(4, ' ').collect::<Vec<_>>();
|
||||||
|
let parsed_tag = match splits.as_slice() {
|
||||||
|
["tricot", raw_prefix, maybe_priority @ ..] => {
|
||||||
|
// priority is set to 100 when value is invalid or missing
|
||||||
|
let priority: u32 = maybe_priority
|
||||||
|
.iter()
|
||||||
|
.next()
|
||||||
|
.map_or(Ok(100), |x| x.parse::<u32>())
|
||||||
|
.unwrap_or(100);
|
||||||
|
UrlPrefix::new(raw_prefix)
|
||||||
|
.map(|prefix| ParsedTag::Frontend(MatchTag::Http(prefix, priority)))
|
||||||
}
|
}
|
||||||
|
["tricot-https", raw_prefix, maybe_priority @ ..] => {
|
||||||
|
// priority is set to 100 when value is invalid or missing
|
||||||
|
let priority: u32 = maybe_priority
|
||||||
|
.iter()
|
||||||
|
.next()
|
||||||
|
.map_or(Ok(100), |x| x.parse::<u32>())
|
||||||
|
.unwrap_or(100);
|
||||||
|
UrlPrefix::new(raw_prefix)
|
||||||
|
.map(|prefix| ParsedTag::Frontend(MatchTag::HttpWithTls(prefix, priority)))
|
||||||
|
}
|
||||||
|
["tricot-add-header", header_key, header_value] => Some(ParsedTag::Middleware(
|
||||||
|
ConfigTag::AddHeader(header_key, header_value),
|
||||||
|
)),
|
||||||
|
["tricot-add-redirect", raw_match, raw_replace, maybe_raw_code @ ..] => {
|
||||||
|
let (p_match, p_replace) =
|
||||||
|
match (UrlPrefix::new(raw_match), UrlPrefix::new(raw_replace)) {
|
||||||
|
(Some(m), Some(r)) => (m, r),
|
||||||
|
_ => {
|
||||||
|
debug!(
|
||||||
|
"tag {} is ignored, one of the url prefix can't be parsed",
|
||||||
|
tag
|
||||||
|
);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if matches!(p_replace.host, HostDescription::Pattern(_)) {
|
||||||
|
debug!(
|
||||||
|
"tag {} ignored as redirect to a glob pattern is not supported",
|
||||||
|
tag
|
||||||
|
);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let maybe_parsed_code = maybe_raw_code
|
||||||
|
.iter()
|
||||||
|
.next()
|
||||||
|
.map(|c| c.parse::<u32>().ok())
|
||||||
|
.flatten();
|
||||||
|
let http_code = match maybe_parsed_code {
|
||||||
|
Some(301) => 301,
|
||||||
|
Some(302) => 302,
|
||||||
|
_ => {
|
||||||
|
debug!(
|
||||||
|
"tag {} has a missing or invalid http code, set it to 302",
|
||||||
|
tag
|
||||||
|
);
|
||||||
|
302
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(ParsedTag::Middleware(ConfigTag::AddRedirect(
|
||||||
|
p_match, p_replace, http_code,
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
["tricot-global-lb", ..] => Some(ParsedTag::Middleware(ConfigTag::GlobalLb)),
|
||||||
|
["tricot-local-lb", ..] => Some(ParsedTag::Middleware(ConfigTag::LocalLb)),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
trace!("tag {} parsed as {:?}", tag, parsed_tag);
|
||||||
|
parsed_tag
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_consul_service(
|
fn parse_consul_service(
|
||||||
|
@ -208,8 +340,6 @@ fn parse_consul_service(
|
||||||
) -> Vec<ProxyEntry> {
|
) -> Vec<ProxyEntry> {
|
||||||
trace!("Parsing service: {:#?}", s);
|
trace!("Parsing service: {:#?}", s);
|
||||||
|
|
||||||
let mut entries = vec![];
|
|
||||||
|
|
||||||
let ip_addr = match s.service.address.parse() {
|
let ip_addr = match s.service.address.parse() {
|
||||||
Ok(ip) => ip,
|
Ok(ip) => ip,
|
||||||
_ => match s.node.address.parse() {
|
_ => match s.node.address.parse() {
|
||||||
|
@ -225,30 +355,47 @@ fn parse_consul_service(
|
||||||
};
|
};
|
||||||
let addr = SocketAddr::new(ip_addr, s.service.port);
|
let addr = SocketAddr::new(ip_addr, s.service.port);
|
||||||
|
|
||||||
if s.service.tags.contains(&"tricot-global-lb".into()) {
|
// tag parsing
|
||||||
flags.global_lb = true;
|
let mut collected_middleware = vec![];
|
||||||
} else if s.service.tags.contains(&"tricot-site-lb".into()) {
|
let mut collected_frontends = vec![];
|
||||||
flags.site_lb = true;
|
for tag in s.service.tags.iter() {
|
||||||
|
match parse_tricot_tags(tag) {
|
||||||
|
Some(ParsedTag::Frontend(x)) => collected_frontends.push(x),
|
||||||
|
Some(ParsedTag::Middleware(y)) => collected_middleware.push(y),
|
||||||
|
_ => trace!(
|
||||||
|
"service {}: tag '{}' could not be parsed",
|
||||||
|
s.service.service,
|
||||||
|
tag
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// some legacy processing that would need a refactor later
|
||||||
|
for mid in collected_middleware.iter() {
|
||||||
|
match mid {
|
||||||
|
ConfigTag::AddHeader(_, _) | ConfigTag::AddRedirect(_, _, _) =>
|
||||||
|
/* not handled here */
|
||||||
|
{
|
||||||
|
()
|
||||||
|
}
|
||||||
|
ConfigTag::GlobalLb => flags.global_lb = true,
|
||||||
|
ConfigTag::LocalLb => flags.site_lb = true,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut add_headers = vec![];
|
|
||||||
for tag in s.service.tags.iter() {
|
|
||||||
if let Some(pair) = parse_tricot_add_header_tag(tag) {
|
|
||||||
add_headers.push(pair);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for tag in s.service.tags.iter() {
|
// build proxy entries
|
||||||
if let Some(ent) = parse_tricot_tag(
|
let entries = collected_frontends
|
||||||
|
.into_iter()
|
||||||
|
.map(|frt| {
|
||||||
|
ProxyEntry::new(
|
||||||
s.service.service.clone(),
|
s.service.service.clone(),
|
||||||
tag,
|
frt,
|
||||||
addr,
|
addr,
|
||||||
&add_headers[..],
|
collected_middleware.as_ref(),
|
||||||
flags,
|
flags,
|
||||||
) {
|
)
|
||||||
entries.push(ent);
|
})
|
||||||
}
|
.collect::<Vec<_>>();
|
||||||
}
|
|
||||||
|
|
||||||
trace!("Result of parsing service:");
|
trace!("Result of parsing service:");
|
||||||
for ent in entries.iter() {
|
for ent in entries.iter() {
|
||||||
|
@ -347,8 +494,8 @@ impl ProxyConfigMetrics {
|
||||||
let mut patterns = HashMap::new();
|
let mut patterns = HashMap::new();
|
||||||
for ent in rx.borrow().entries.iter() {
|
for ent in rx.borrow().entries.iter() {
|
||||||
let attrs = (
|
let attrs = (
|
||||||
ent.host.to_string(),
|
ent.url_prefix.host.to_string(),
|
||||||
ent.path_prefix.clone().unwrap_or_default(),
|
ent.url_prefix.path_prefix.clone().unwrap_or_default(),
|
||||||
ent.service_name.clone(),
|
ent.service_name.clone(),
|
||||||
);
|
);
|
||||||
*patterns.entry(attrs).or_default() += 1;
|
*patterns.entry(attrs).or_default() += 1;
|
||||||
|
|
Loading…
Reference in a new issue