Skip to content

Commit

Permalink
Fix some issues with automatic gzip decompression
Browse files Browse the repository at this point in the history
This plugs a buffer stream in between the decompressor stream and the
"user" stream. This make sure that (i) the correct number of bytes is
read from the http stream and thus decoded (fixes #859) and (ii) that we
can read the http stream in chunks instead of byte-by-byte (the previous
code even warns about this usage).

Fixes #859.
  • Loading branch information
fredrikekre committed Jun 20, 2022
1 parent 00aa294 commit 99d3177
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 8 deletions.
4 changes: 3 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "HTTP"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
authors = ["Jacob Quinn", "contributors: https://github.com/JuliaWeb/HTTP.jl/graphs/contributors"]
version = "1.0.1"
version = "1.0.2"

[deps]
Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
Expand All @@ -13,6 +13,7 @@ LoggingExtras = "e6f89c97-d47a-5376-807f-9c37f3926c36"
MbedTLS = "739be429-bea8-5141-9913-cc70e7f3736d"
NetworkOptions = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SimpleBufferStream = "777ac1f9-54b0-4bf8-805c-2214025038e7"
Sockets = "6462fe0b-24de-5631-8697-dd941f90decc"
URIs = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
Expand All @@ -22,6 +23,7 @@ CodecZlib = "0.7"
IniFile = "0.5"
LoggingExtras = "0.4.9"
MbedTLS = "0.6.8, 0.7, 1"
SimpleBufferStream = "1.1"
URIs = "1.3"
julia = "1.6"

Expand Down
43 changes: 36 additions & 7 deletions src/clientlayers/StreamRequest.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ module StreamRequest

using ..IOExtras, ..Messages, ..Streams, ..ConnectionPool, ..Strings, ..RedirectRequest, ..Exceptions
using LoggingExtras, CodecZlib, URIs
using SimpleBufferStream: BufferStream

export streamlayer

Expand Down Expand Up @@ -103,13 +104,41 @@ writechunk(stream, body::Union{Dict, NamedTuple}) = writebodystream(stream, body
writechunk(stream, body) = write(stream, body)

function readbody(stream::Stream, res::Response, decompress)
readstream = decompress && header(res, "Content-Encoding") == "gzip" ? GzipDecompressorStream(stream) : stream
if isbytes(res.body)
res.body = read(readstream)
elseif !isredirect(stream) && !retryable(stream)
# if the request/response pair are going to be redirected or retried,
# we want to avoid "contaminating" our response body stream
write(res.body, readstream)
# Bail early if we are not going to read anything.
# If the request/response pair are going to be redirected or retried,
# we want to avoid "contaminating" our response body stream.
willread = isbytes(res.body) || (!isredirect(stream) && !retryable(stream))
willread || return

if decompress && header(res, "Content-Encoding") == "gzip"
# Plug in a buffer stream in between so that we can (i) read the http stream in
# chunks instead of byte-by-byte and (ii) make sure to stop reading the http stream
# at eof.
buf = BufferStream()
gzstream = GzipDecompressorStream(buf)
tsk = @async begin
try
write(gzstream, stream)
finally
# Close here to (i) deallocate resources in zlib and (ii) make sure that
# read(buf)/write(..., buf) below don't block forever. Note that this will
# close the stream wrapped by the decompressor (buf) but *not* the http
# stream, which should be left open.
close(gzstream)
end
end
if isbytes(res.body)
res.body = read(buf)
else
write(res.body, buf)
end
wait(tsk)
else
if isbytes(res.body)
res.body = read(stream)
else
write(res.body, stream)
end
end
end

Expand Down

0 comments on commit 99d3177

Please sign in to comment.