Skip to content

Commit

Permalink
feat(devserver/http): support gzipped responses (#4011)
Browse files Browse the repository at this point in the history
Implements WEB-666.

This PR enables gzip compressions for the responses by default. This can
be extended to other compression method (i.e brotli), but for now
implements minimal default compression only to pass existing test case.

Fixes `test/integration/compression/test/index.test.js`.
  • Loading branch information
kwonoj authored Mar 2, 2023
1 parent c74cf18 commit b6dc9d0
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 9 deletions.
19 changes: 17 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -110,3 +110,8 @@ node-file-trace = { path = "crates/node-file-trace", version = "0.1.0" }
# it works. next-swc have various platforms, some doesn't support native (using openssl-sys)
# and some aren't buildable with rustls.
reqwest = { version = "0.11.13", default-features = false }
async-compression = { version = "0.3.13", default-features = false, features = [
"gzip",
"tokio",
] }
tokio-util = { version = "0.7.7", features = ["io"] }
2 changes: 2 additions & 0 deletions crates/turbopack-dev-server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ bench = false

[dependencies]
anyhow = "1.0.47"
async-compression = { workspace = true }
futures = "0.3.25"
hyper = { version = "0.14", features = ["full"] }
hyper-tungstenite = "0.8.1"
Expand All @@ -25,6 +26,7 @@ serde_json = "1.0.85"
serde_qs = "0.10.1"
tokio = "1.21.2"
tokio-stream = "0.1.9"
tokio-util = { workspace = true }
turbo-tasks = { path = "../turbo-tasks" }
turbo-tasks-fs = { path = "../turbo-tasks-fs" }
turbo-tasks-hash = { path = "../turbo-tasks-hash" }
Expand Down
58 changes: 51 additions & 7 deletions crates/turbopack-dev-server/src/http.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,15 @@
use std::io::{Error, ErrorKind};

use anyhow::Result;
use futures::{StreamExt, TryStreamExt};
use hyper::{header::HeaderName, Request, Response};
use hyper::{
header::{HeaderName, CONTENT_ENCODING, CONTENT_LENGTH},
http::HeaderValue,
Request, Response,
};
use mime::Mime;
use mime_guess::mime;
use tokio_util::io::{ReaderStream, StreamReader};
use turbo_tasks::TransientInstance;
use turbo_tasks_fs::{FileContent, FileContentReadRef};
use turbopack_core::{asset::AssetContent, issue::IssueReporterVc, version::VersionedContent};
Expand Down Expand Up @@ -92,14 +100,32 @@ pub async fn process_request_with_content_source(
);
}

// naively checking if content is `compressible`.
let mut should_compress = false;
let should_compress_predicate =
|mime: &Mime| match (mime.type_(), mime.subtype(), mime.suffix()) {
(_, mime::PLAIN, _)
| (_, mime::JSON, _)
| (mime::TEXT, _, _)
| (mime::APPLICATION, mime::XML, _)
| (mime::APPLICATION, mime::JAVASCRIPT, _)
| (_, _, Some(mime::XML))
| (_, _, Some(mime::JSON))
| (_, _, Some(mime::TEXT)) => true,
_ => false,
};

if let Some(content_type) = file.content_type() {
header_map.append(
"content-type",
hyper::header::HeaderValue::try_from(content_type.to_string())?,
);

should_compress = should_compress_predicate(content_type);
} else if let hyper::header::Entry::Vacant(entry) = header_map.entry("content-type")
{
let guess = mime_guess::from_path(&original_path).first_or_octet_stream();
should_compress = should_compress_predicate(&guess);
// If a text type, application/javascript, or application/json was
// guessed, use a utf-8 charset as we most likely generated it as
// such.
Expand All @@ -117,13 +143,31 @@ pub async fn process_request_with_content_source(
}

let content = file.content();
header_map.insert(
"Content-Length",
hyper::header::HeaderValue::try_from(content.len().to_string())?,
);
let response = if should_compress {
header_map.insert(CONTENT_ENCODING, HeaderValue::from_static("gzip"));

// Grab ropereader stream, coerce anyhow::Error to std::io::Error
let stream_ext = content
.read()
.into_stream()
.map_err(|err| Error::new(ErrorKind::Other, err));

let gzipped_stream =
ReaderStream::new(async_compression::tokio::bufread::GzipEncoder::new(
StreamReader::new(stream_ext),
));

response.body(hyper::Body::wrap_stream(gzipped_stream))?
} else {
header_map.insert(
CONTENT_LENGTH,
hyper::header::HeaderValue::try_from(content.len().to_string())?,
);

response.body(hyper::Body::wrap_stream(content.read()))?
};

let bytes = content.read();
return Ok(response.body(hyper::Body::wrap_stream(bytes))?);
return Ok(response);
}
}
GetFromSourceResult::HttpProxy(proxy_result) => {
Expand Down

0 comments on commit b6dc9d0

Please sign in to comment.