forked from Deuxfleurs/tricot
Try to clean up code and to fix WebSocket problems
This commit is contained in:
parent
6383d98772
commit
8c6114c3d3
3 changed files with 60 additions and 43 deletions
39
src/https.rs
39
src/https.rs
|
@ -194,25 +194,24 @@ async fn handle(
|
||||||
handle_error(reverse_proxy::call(remote_addr.ip(), &to_addr, req).await)
|
handle_error(reverse_proxy::call(remote_addr.ip(), &to_addr, req).await)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Do further processing (compression, additionnal headers) only for 2xx responses
|
if response.status().is_success() {
|
||||||
if !response.status().is_success() {
|
// (TODO: maybe we want to add these headers even if it's not a success?)
|
||||||
return Ok(response);
|
for (header, value) in proxy_to.add_headers.iter() {
|
||||||
|
response.headers_mut().insert(
|
||||||
|
HeaderName::from_bytes(header.as_bytes())?,
|
||||||
|
HeaderValue::from_str(value)?,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (header, value) in proxy_to.add_headers.iter() {
|
|
||||||
response.headers_mut().insert(
|
|
||||||
HeaderName::from_bytes(header.as_bytes())?,
|
|
||||||
HeaderValue::from_str(value)?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
trace!("Response: {:?}", response);
|
|
||||||
info!("{} {} {}", method, response.status().as_u16(), uri);
|
|
||||||
|
|
||||||
if https_config.enable_compression {
|
if https_config.enable_compression {
|
||||||
try_compress(response, method, accept_encoding, &https_config).await
|
response =
|
||||||
} else {
|
try_compress(response, method.clone(), accept_encoding, &https_config).await?
|
||||||
Ok(response)
|
};
|
||||||
}
|
|
||||||
|
trace!("Final response: {:?}", response);
|
||||||
|
info!("{} {} {}", method, response.status().as_u16(), uri);
|
||||||
|
Ok(response)
|
||||||
} else {
|
} else {
|
||||||
debug!("{}{} -> NOT FOUND", host, path);
|
debug!("{}{} -> NOT FOUND", host, path);
|
||||||
info!("{} 404 {}", method, uri);
|
info!("{} 404 {}", method, uri);
|
||||||
|
@ -240,10 +239,14 @@ async fn try_compress(
|
||||||
https_config: &HttpsConfig,
|
https_config: &HttpsConfig,
|
||||||
) -> Result<Response<Body>> {
|
) -> Result<Response<Body>> {
|
||||||
// Don't bother compressing successfull responses for HEAD and PUT (they should have an empty body)
|
// Don't bother compressing successfull responses for HEAD and PUT (they should have an empty body)
|
||||||
// Don't compress partial content, that would be wierd
|
// Don't compress partial content as it causes issues
|
||||||
// If already compressed, return as is
|
// Don't bother compressing non-2xx results
|
||||||
|
// Don't compress Upgrade responses (e.g. websockets)
|
||||||
|
// Don't compress responses that are already compressed
|
||||||
if (response.status().is_success() && (method == Method::HEAD || method == Method::PUT))
|
if (response.status().is_success() && (method == Method::HEAD || method == Method::PUT))
|
||||||
|| response.status() == StatusCode::PARTIAL_CONTENT
|
|| response.status() == StatusCode::PARTIAL_CONTENT
|
||||||
|
|| !response.status().is_success()
|
||||||
|
|| response.headers().get(header::CONNECTION) == Some(&HeaderValue::from_static("Upgrade"))
|
||||||
|| response.headers().get(header::CONTENT_ENCODING).is_some()
|
|| response.headers().get(header::CONTENT_ENCODING).is_some()
|
||||||
{
|
{
|
||||||
return Ok(response);
|
return Ok(response);
|
||||||
|
|
|
@ -355,16 +355,16 @@ pub fn spawn_proxy_config_task(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_tricot_add_header_tag() {
|
fn test_parse_tricot_add_header_tag() {
|
||||||
match parse_tricot_add_header_tag("tricot-add-header Content-Security-Policy default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'") {
|
match parse_tricot_add_header_tag("tricot-add-header Content-Security-Policy default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'") {
|
||||||
Some((name, value)) => {
|
Some((name, value)) => {
|
||||||
assert_eq!(name, "Content-Security-Policy");
|
assert_eq!(name, "Content-Security-Policy");
|
||||||
assert_eq!(value, "default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'");
|
assert_eq!(value, "default-src 'none'; img-src 'self'; script-src 'self'; style-src 'self'");
|
||||||
}
|
}
|
||||||
_ => panic!("Passed a valid tag but the function says it is not valid")
|
_ => panic!("Passed a valid tag but the function says it is not valid")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,9 +48,19 @@ fn remove_hop_headers(headers: &HeaderMap<HeaderValue>) -> HeaderMap<HeaderValue
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_proxied_response<B>(mut response: Response<B>) -> Response<B> {
|
fn copy_upgrade_headers(
|
||||||
*response.headers_mut() = remove_hop_headers(response.headers());
|
old_headers: &HeaderMap<HeaderValue>,
|
||||||
response
|
new_headers: &mut HeaderMap<HeaderValue>,
|
||||||
|
) -> Result<()> {
|
||||||
|
if let Some(conn) = old_headers.get(header::CONNECTION) {
|
||||||
|
if conn.to_str()?.to_lowercase() == "upgrade" {
|
||||||
|
if let Some(upgrade) = old_headers.get(header::UPGRADE) {
|
||||||
|
new_headers.insert(header::CONNECTION, "Upgrade".try_into()?);
|
||||||
|
new_headers.insert(header::UPGRADE, upgrade.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn forward_uri<B>(forward_url: &str, req: &Request<B>) -> Result<Uri> {
|
fn forward_uri<B>(forward_url: &str, req: &Request<B>) -> Result<Uri> {
|
||||||
|
@ -72,12 +82,13 @@ fn create_proxied_request<B>(
|
||||||
.uri(forward_uri(forward_url, &request)?)
|
.uri(forward_uri(forward_url, &request)?)
|
||||||
.version(hyper::Version::HTTP_11);
|
.version(hyper::Version::HTTP_11);
|
||||||
|
|
||||||
let headers = builder.headers_mut().unwrap();
|
let old_headers = request.headers();
|
||||||
|
let new_headers = builder.headers_mut().unwrap();
|
||||||
|
|
||||||
*headers = remove_hop_headers(request.headers());
|
*new_headers = remove_hop_headers(old_headers);
|
||||||
|
|
||||||
// If request does not have host header, add it from original URI authority
|
// If request does not have host header, add it from original URI authority
|
||||||
if let header::Entry::Vacant(entry) = headers.entry(header::HOST) {
|
if let header::Entry::Vacant(entry) = new_headers.entry(header::HOST) {
|
||||||
if let Some(authority) = request.uri().authority() {
|
if let Some(authority) = request.uri().authority() {
|
||||||
entry.insert(authority.as_str().parse()?);
|
entry.insert(authority.as_str().parse()?);
|
||||||
}
|
}
|
||||||
|
@ -86,7 +97,7 @@ fn create_proxied_request<B>(
|
||||||
// Concatenate cookie headers into single header
|
// Concatenate cookie headers into single header
|
||||||
// (HTTP/2 allows several cookie headers, but we are proxying to HTTP/1.1 that does not)
|
// (HTTP/2 allows several cookie headers, but we are proxying to HTTP/1.1 that does not)
|
||||||
let mut cookie_concat = vec![];
|
let mut cookie_concat = vec![];
|
||||||
for cookie in headers.get_all(header::COOKIE) {
|
for cookie in new_headers.get_all(header::COOKIE) {
|
||||||
if !cookie_concat.is_empty() {
|
if !cookie_concat.is_empty() {
|
||||||
cookie_concat.extend(b"; ");
|
cookie_concat.extend(b"; ");
|
||||||
}
|
}
|
||||||
|
@ -94,12 +105,12 @@ fn create_proxied_request<B>(
|
||||||
}
|
}
|
||||||
if !cookie_concat.is_empty() {
|
if !cookie_concat.is_empty() {
|
||||||
// insert clears the old value of COOKIE and inserts the concatenated version instead
|
// insert clears the old value of COOKIE and inserts the concatenated version instead
|
||||||
headers.insert(header::COOKIE, cookie_concat.try_into()?);
|
new_headers.insert(header::COOKIE, cookie_concat.try_into()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add forwarding information in the headers
|
// Add forwarding information in the headers
|
||||||
let x_forwarded_for_header_name = "x-forwarded-for";
|
let x_forwarded_for_header_name = "x-forwarded-for";
|
||||||
match headers.entry(x_forwarded_for_header_name) {
|
match new_headers.entry(x_forwarded_for_header_name) {
|
||||||
header::Entry::Vacant(entry) => {
|
header::Entry::Vacant(entry) => {
|
||||||
entry.insert(client_ip.to_string().parse()?);
|
entry.insert(client_ip.to_string().parse()?);
|
||||||
}
|
}
|
||||||
|
@ -110,24 +121,27 @@ fn create_proxied_request<B>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
headers.insert(
|
new_headers.insert(
|
||||||
HeaderName::from_bytes(b"x-forwarded-proto")?,
|
HeaderName::from_bytes(b"x-forwarded-proto")?,
|
||||||
"https".try_into()?,
|
"https".try_into()?,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Proxy upgrade requests properly
|
// Proxy upgrade requests properly
|
||||||
if let Some(conn) = request.headers().get(header::CONNECTION) {
|
copy_upgrade_headers(old_headers, new_headers)?;
|
||||||
if conn.to_str()?.to_lowercase() == "upgrade" {
|
|
||||||
if let Some(upgrade) = request.headers().get(header::UPGRADE) {
|
|
||||||
headers.insert(header::CONNECTION, "Upgrade".try_into()?);
|
|
||||||
headers.insert(header::UPGRADE, upgrade.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(builder.body(request.into_body())?)
|
Ok(builder.body(request.into_body())?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn create_proxied_response<B>(mut response: Response<B>) -> Result<Response<B>> {
|
||||||
|
let old_headers = response.headers();
|
||||||
|
let mut new_headers = remove_hop_headers(old_headers);
|
||||||
|
|
||||||
|
copy_upgrade_headers(old_headers, &mut new_headers)?;
|
||||||
|
|
||||||
|
*response.headers_mut() = new_headers;
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn call(
|
pub async fn call(
|
||||||
client_ip: IpAddr,
|
client_ip: IpAddr,
|
||||||
forward_uri: &str,
|
forward_uri: &str,
|
||||||
|
@ -146,7 +160,7 @@ pub async fn call(
|
||||||
|
|
||||||
trace!("Inner response: {:?}", response);
|
trace!("Inner response: {:?}", response);
|
||||||
|
|
||||||
let proxied_response = create_proxied_response(response);
|
let proxied_response = create_proxied_response(response)?;
|
||||||
Ok(proxied_response)
|
Ok(proxied_response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -173,7 +187,7 @@ pub async fn call_https(
|
||||||
|
|
||||||
trace!("Inner response (HTTPS): {:?}", response);
|
trace!("Inner response (HTTPS): {:?}", response);
|
||||||
|
|
||||||
let proxied_response = create_proxied_response(response);
|
let proxied_response = create_proxied_response(response)?;
|
||||||
Ok(proxied_response)
|
Ok(proxied_response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue