From 7b844e13c653bd6ba6c027bdfd94e5e0d3c804f0 Mon Sep 17 00:00:00 2001 From: OJ Kwon <1210596+kwonoj@users.noreply.github.com> Date: Wed, 1 Mar 2023 21:39:18 -0800 Subject: [PATCH] feat(devserver/http): support gzipped responses (vercel/turbo#4011) Implements WEB-666. This PR enables gzip compressions for the responses by default. This can be extended to other compression method (i.e brotli), but for now implements minimal default compression only to pass existing test case. Fixes `test/integration/compression/test/index.test.js`. --- crates/turbopack-dev-server/Cargo.toml | 2 + crates/turbopack-dev-server/src/http.rs | 58 ++++++++++++++++++++++--- 2 files changed, 53 insertions(+), 7 deletions(-) diff --git a/crates/turbopack-dev-server/Cargo.toml b/crates/turbopack-dev-server/Cargo.toml index 28959cea404ea..1899b8e7b4f33 100644 --- a/crates/turbopack-dev-server/Cargo.toml +++ b/crates/turbopack-dev-server/Cargo.toml @@ -11,6 +11,7 @@ bench = false [dependencies] anyhow = "1.0.47" +async-compression = { workspace = true } futures = "0.3.25" hyper = { version = "0.14", features = ["full"] } hyper-tungstenite = "0.8.1" @@ -25,6 +26,7 @@ serde_json = "1.0.85" serde_qs = "0.10.1" tokio = "1.21.2" tokio-stream = "0.1.9" +tokio-util = { workspace = true } turbo-tasks = { path = "../turbo-tasks" } turbo-tasks-fs = { path = "../turbo-tasks-fs" } turbo-tasks-hash = { path = "../turbo-tasks-hash" } diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 883535bb7c5f5..33c726db06ee7 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -1,7 +1,15 @@ +use std::io::{Error, ErrorKind}; + use anyhow::Result; use futures::{StreamExt, TryStreamExt}; -use hyper::{header::HeaderName, Request, Response}; +use hyper::{ + header::{HeaderName, CONTENT_ENCODING, CONTENT_LENGTH}, + http::HeaderValue, + Request, Response, +}; +use mime::Mime; use mime_guess::mime; +use tokio_util::io::{ReaderStream, StreamReader}; use turbo_tasks::TransientInstance; use turbo_tasks_fs::{FileContent, FileContentReadRef}; use turbopack_core::{asset::AssetContent, issue::IssueReporterVc, version::VersionedContent}; @@ -92,14 +100,32 @@ pub async fn process_request_with_content_source( ); } + // naively checking if content is `compressible`. + let mut should_compress = false; + let should_compress_predicate = + |mime: &Mime| match (mime.type_(), mime.subtype(), mime.suffix()) { + (_, mime::PLAIN, _) + | (_, mime::JSON, _) + | (mime::TEXT, _, _) + | (mime::APPLICATION, mime::XML, _) + | (mime::APPLICATION, mime::JAVASCRIPT, _) + | (_, _, Some(mime::XML)) + | (_, _, Some(mime::JSON)) + | (_, _, Some(mime::TEXT)) => true, + _ => false, + }; + if let Some(content_type) = file.content_type() { header_map.append( "content-type", hyper::header::HeaderValue::try_from(content_type.to_string())?, ); + + should_compress = should_compress_predicate(content_type); } else if let hyper::header::Entry::Vacant(entry) = header_map.entry("content-type") { let guess = mime_guess::from_path(&original_path).first_or_octet_stream(); + should_compress = should_compress_predicate(&guess); // If a text type, application/javascript, or application/json was // guessed, use a utf-8 charset as we most likely generated it as // such. @@ -117,13 +143,31 @@ pub async fn process_request_with_content_source( } let content = file.content(); - header_map.insert( - "Content-Length", - hyper::header::HeaderValue::try_from(content.len().to_string())?, - ); + let response = if should_compress { + header_map.insert(CONTENT_ENCODING, HeaderValue::from_static("gzip")); + + // Grab ropereader stream, coerce anyhow::Error to std::io::Error + let stream_ext = content + .read() + .into_stream() + .map_err(|err| Error::new(ErrorKind::Other, err)); + + let gzipped_stream = + ReaderStream::new(async_compression::tokio::bufread::GzipEncoder::new( + StreamReader::new(stream_ext), + )); + + response.body(hyper::Body::wrap_stream(gzipped_stream))? + } else { + header_map.insert( + CONTENT_LENGTH, + hyper::header::HeaderValue::try_from(content.len().to_string())?, + ); + + response.body(hyper::Body::wrap_stream(content.read()))? + }; - let bytes = content.read(); - return Ok(response.body(hyper::Body::wrap_stream(bytes))?); + return Ok(response); } } GetFromSourceResult::HttpProxy(proxy_result) => {