forked from Deuxfleurs/tricot
Remove content-length when compressed; don't compress small response
This commit is contained in:
parent
3f2a52dc8e
commit
e0912dc5fe
1 changed files with 28 additions and 4 deletions
32
src/https.rs
32
src/https.rs
|
@ -7,6 +7,7 @@ use log::*;
|
||||||
|
|
||||||
use accept_encoding_fork::Encoding;
|
use accept_encoding_fork::Encoding;
|
||||||
use async_compression::tokio::bufread::*;
|
use async_compression::tokio::bufread::*;
|
||||||
|
use futures::StreamExt;
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use http::header::{HeaderName, HeaderValue};
|
use http::header::{HeaderName, HeaderValue};
|
||||||
use hyper::server::conn::Http;
|
use hyper::server::conn::Http;
|
||||||
|
@ -163,7 +164,7 @@ async fn handle(
|
||||||
info!("{} {} {}", method, response.status().as_u16(), uri);
|
info!("{} {} {}", method, response.status().as_u16(), uri);
|
||||||
|
|
||||||
if https_config.enable_compression {
|
if https_config.enable_compression {
|
||||||
try_compress(response, accept_encoding, &https_config)
|
try_compress(response, accept_encoding, &https_config).await
|
||||||
} else {
|
} else {
|
||||||
Ok(response)
|
Ok(response)
|
||||||
}
|
}
|
||||||
|
@ -177,7 +178,7 @@ async fn handle(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn try_compress(
|
async fn try_compress(
|
||||||
response: Response<Body>,
|
response: Response<Body>,
|
||||||
accept_encoding: Vec<(Option<Encoding>, f32)>,
|
accept_encoding: Vec<(Option<Encoding>, f32)>,
|
||||||
https_config: &HttpsConfig,
|
https_config: &HttpsConfig,
|
||||||
|
@ -230,11 +231,34 @@ fn try_compress(
|
||||||
None => return Ok(response),
|
None => return Ok(response),
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("Compressing response body as {:?}", encoding);
|
let (mut head, mut body) = response.into_parts();
|
||||||
|
|
||||||
let (mut head, body) = response.into_parts();
|
// ---- If body is smaller than 1400 bytes, don't compress ----
|
||||||
|
let mut chunks = vec![];
|
||||||
|
let mut sum_lengths = 0;
|
||||||
|
while sum_lengths < 1400 {
|
||||||
|
match body.next().await {
|
||||||
|
Some(chunk) => {
|
||||||
|
let chunk = chunk?;
|
||||||
|
sum_lengths += chunk.len();
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return Ok(Response::from_parts(head, Body::from(chunks.concat())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// put beginning chunks back into body
|
||||||
|
let body = futures::stream::iter(chunks.into_iter().map(|c| Ok(c))).chain(body);
|
||||||
|
|
||||||
|
// make an async reader from that for compressor
|
||||||
let body_rd =
|
let body_rd =
|
||||||
StreamReader::new(body.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)));
|
StreamReader::new(body.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)));
|
||||||
|
|
||||||
|
debug!("Compressing response body as {:?} (at least {} bytes)", encoding, sum_lengths);
|
||||||
|
head.headers.remove(header::CONTENT_LENGTH);
|
||||||
|
|
||||||
let compressed_body = match encoding {
|
let compressed_body = match encoding {
|
||||||
Encoding::Gzip => {
|
Encoding::Gzip => {
|
||||||
head.headers
|
head.headers
|
||||||
|
|
Loading…
Reference in a new issue