Remove content-length when compressed; don't compress small response

This commit is contained in:
Alex 2021-12-09 19:00:50 +01:00
parent 3f2a52dc8e
commit e0912dc5fe
No known key found for this signature in database
GPG Key ID: EDABF9711E244EB1
1 changed files with 28 additions and 4 deletions

View File

@ -7,6 +7,7 @@ use log::*;
use accept_encoding_fork::Encoding;
use async_compression::tokio::bufread::*;
use futures::StreamExt;
use futures::TryStreamExt;
use http::header::{HeaderName, HeaderValue};
use hyper::server::conn::Http;
@ -163,7 +164,7 @@ async fn handle(
info!("{} {} {}", method, response.status().as_u16(), uri);
if https_config.enable_compression {
try_compress(response, accept_encoding, &https_config)
try_compress(response, accept_encoding, &https_config).await
} else {
Ok(response)
}
@ -177,7 +178,7 @@ async fn handle(
}
}
fn try_compress(
async fn try_compress(
response: Response<Body>,
accept_encoding: Vec<(Option<Encoding>, f32)>,
https_config: &HttpsConfig,
@ -230,11 +231,34 @@ fn try_compress(
None => return Ok(response),
};
debug!("Compressing response body as {:?}", encoding);
let (mut head, mut body) = response.into_parts();
let (mut head, body) = response.into_parts();
// ---- If body is smaller than 1400 bytes, don't compress ----
let mut chunks = vec![];
let mut sum_lengths = 0;
while sum_lengths < 1400 {
match body.next().await {
Some(chunk) => {
let chunk = chunk?;
sum_lengths += chunk.len();
chunks.push(chunk);
}
None => {
return Ok(Response::from_parts(head, Body::from(chunks.concat())));
}
}
}
// put beginning chunks back into body
let body = futures::stream::iter(chunks.into_iter().map(|c| Ok(c))).chain(body);
// make an async reader from that for compressor
let body_rd =
StreamReader::new(body.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)));
debug!("Compressing response body as {:?} (at least {} bytes)", encoding, sum_lengths);
head.headers.remove(header::CONTENT_LENGTH);
let compressed_body = match encoding {
Encoding::Gzip => {
head.headers