forked from Deuxfleurs/tricot
Exclude partial content from compression
This commit is contained in:
parent
e3a030ceaf
commit
7488d8e907
2 changed files with 39 additions and 29 deletions
|
@ -93,7 +93,7 @@ impl CertStore {
|
||||||
|
|
||||||
for dom in domains.iter() {
|
for dom in domains.iter() {
|
||||||
match t_last_check.get(dom) {
|
match t_last_check.get(dom) {
|
||||||
Some(t) if Instant::now() - *t < Duration::from_secs(3600) => continue,
|
Some(t) if Instant::now() - *t < Duration::from_secs(60) => continue,
|
||||||
_ => t_last_check.insert(dom.to_string(), Instant::now()),
|
_ => t_last_check.insert(dom.to_string(), Instant::now()),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -179,7 +179,7 @@ impl CertStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---- Acquire lock ----
|
// ---- Acquire lock ----
|
||||||
// the lock is acquired for fifteen minutes,
|
// the lock is acquired for half an hour,
|
||||||
// so that in case of an error we won't retry before
|
// so that in case of an error we won't retry before
|
||||||
// that delay expires
|
// that delay expires
|
||||||
|
|
||||||
|
@ -190,8 +190,8 @@ impl CertStore {
|
||||||
.create_session(&ConsulSessionRequest {
|
.create_session(&ConsulSessionRequest {
|
||||||
name: lock_name.clone(),
|
name: lock_name.clone(),
|
||||||
node: None,
|
node: None,
|
||||||
lock_delay: Some("15m".into()),
|
lock_delay: Some("30m".into()),
|
||||||
ttl: Some("30m".into()),
|
ttl: Some("45m".into()),
|
||||||
behavior: Some("delete".into()),
|
behavior: Some("delete".into()),
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
60
src/https.rs
60
src/https.rs
|
@ -10,6 +10,7 @@ use async_compression::tokio::bufread::*;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use http::header::{HeaderName, HeaderValue};
|
use http::header::{HeaderName, HeaderValue};
|
||||||
|
use http::method::Method;
|
||||||
use hyper::server::conn::Http;
|
use hyper::server::conn::Http;
|
||||||
use hyper::service::service_fn;
|
use hyper::service::service_fn;
|
||||||
use hyper::{header, Body, Request, Response, StatusCode};
|
use hyper::{header, Body, Request, Response, StatusCode};
|
||||||
|
@ -164,7 +165,7 @@ async fn handle(
|
||||||
info!("{} {} {}", method, response.status().as_u16(), uri);
|
info!("{} {} {}", method, response.status().as_u16(), uri);
|
||||||
|
|
||||||
if https_config.enable_compression {
|
if https_config.enable_compression {
|
||||||
try_compress(response, accept_encoding, &https_config).await
|
try_compress(response, method, accept_encoding, &https_config).await
|
||||||
} else {
|
} else {
|
||||||
Ok(response)
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
@ -180,9 +181,21 @@ async fn handle(
|
||||||
|
|
||||||
async fn try_compress(
|
async fn try_compress(
|
||||||
response: Response<Body>,
|
response: Response<Body>,
|
||||||
|
method: Method,
|
||||||
accept_encoding: Vec<(Option<Encoding>, f32)>,
|
accept_encoding: Vec<(Option<Encoding>, f32)>,
|
||||||
https_config: &HttpsConfig,
|
https_config: &HttpsConfig,
|
||||||
) -> Result<Response<Body>> {
|
) -> Result<Response<Body>> {
|
||||||
|
// Don't bother compressing successfull responses for HEAD and PUT (they should have an empty body)
|
||||||
|
// Don't compress partial content, that would be wierd
|
||||||
|
// If already compressed, return as is
|
||||||
|
if (response.status().is_success() && (method == Method::HEAD || method == Method::PUT))
|
||||||
|
|| response.status() == StatusCode::PARTIAL_CONTENT
|
||||||
|
|| response.headers().get(header::CONTENT_ENCODING).is_some()
|
||||||
|
{
|
||||||
|
return Ok(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select preferred encoding among those proposed in accept_encoding
|
||||||
let max_q: f32 = accept_encoding
|
let max_q: f32 = accept_encoding
|
||||||
.iter()
|
.iter()
|
||||||
.max_by_key(|(_, q)| (q * 10000f32) as i64)
|
.max_by_key(|(_, q)| (q * 10000f32) as i64)
|
||||||
|
@ -208,11 +221,6 @@ async fn try_compress(
|
||||||
Some(enc) => enc,
|
Some(enc) => enc,
|
||||||
};
|
};
|
||||||
|
|
||||||
// If already compressed, return as is
|
|
||||||
if response.headers().get(header::CONTENT_ENCODING).is_some() {
|
|
||||||
return Ok(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If content type not in mime types for which to compress, return as is
|
// If content type not in mime types for which to compress, return as is
|
||||||
match response.headers().get(header::CONTENT_TYPE) {
|
match response.headers().get(header::CONTENT_TYPE) {
|
||||||
Some(ct) => {
|
Some(ct) => {
|
||||||
|
@ -229,7 +237,7 @@ async fn try_compress(
|
||||||
return Ok(response);
|
return Ok(response);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => return Ok(response),
|
None => return Ok(response), // don't compress if unknown mime type
|
||||||
};
|
};
|
||||||
|
|
||||||
let (mut head, mut body) = response.into_parts();
|
let (mut head, mut body) = response.into_parts();
|
||||||
|
@ -257,34 +265,36 @@ async fn try_compress(
|
||||||
let body_rd =
|
let body_rd =
|
||||||
StreamReader::new(body.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)));
|
StreamReader::new(body.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)));
|
||||||
|
|
||||||
debug!(
|
trace!(
|
||||||
"Compressing response body as {:?} (at least {} bytes)",
|
"Compressing response body as {:?} (at least {} bytes)",
|
||||||
encoding, sum_lengths
|
encoding,
|
||||||
|
sum_lengths
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// we don't know the compressed content-length so remove that header
|
||||||
head.headers.remove(header::CONTENT_LENGTH);
|
head.headers.remove(header::CONTENT_LENGTH);
|
||||||
|
|
||||||
let compressed_body = match encoding {
|
let (encoding, compressed_body) = match encoding {
|
||||||
Encoding::Gzip => {
|
Encoding::Gzip => (
|
||||||
head.headers
|
"gzip",
|
||||||
.insert(header::CONTENT_ENCODING, "gzip".parse()?);
|
Body::wrap_stream(ReaderStream::new(GzipEncoder::new(body_rd))),
|
||||||
Body::wrap_stream(ReaderStream::new(GzipEncoder::new(body_rd)))
|
),
|
||||||
}
|
|
||||||
// Encoding::Brotli => {
|
// Encoding::Brotli => {
|
||||||
// head.headers.insert(header::CONTENT_ENCODING, "br".parse()?);
|
// head.headers.insert(header::CONTENT_ENCODING, "br".parse()?);
|
||||||
// Body::wrap_stream(ReaderStream::new(BrotliEncoder::new(body_rd)))
|
// Body::wrap_stream(ReaderStream::new(BrotliEncoder::new(body_rd)))
|
||||||
// }
|
// }
|
||||||
Encoding::Deflate => {
|
Encoding::Deflate => (
|
||||||
head.headers
|
"deflate",
|
||||||
.insert(header::CONTENT_ENCODING, "deflate".parse()?);
|
Body::wrap_stream(ReaderStream::new(DeflateEncoder::new(body_rd))),
|
||||||
Body::wrap_stream(ReaderStream::new(DeflateEncoder::new(body_rd)))
|
),
|
||||||
}
|
Encoding::Zstd => (
|
||||||
Encoding::Zstd => {
|
"zstd",
|
||||||
head.headers
|
Body::wrap_stream(ReaderStream::new(ZstdEncoder::new(body_rd))),
|
||||||
.insert(header::CONTENT_ENCODING, "zstd".parse()?);
|
),
|
||||||
Body::wrap_stream(ReaderStream::new(ZstdEncoder::new(body_rd)))
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
head.headers
|
||||||
|
.insert(header::CONTENT_ENCODING, encoding.parse()?);
|
||||||
|
|
||||||
Ok(Response::from_parts(head, compressed_body))
|
Ok(Response::from_parts(head, compressed_body))
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue