Skip to content

Commit

Permalink
Fix handling of chunked+gzipped response when first chunk does not gi…
Browse files Browse the repository at this point in the history
…ve uncompressed data (#3477)
  • Loading branch information
socketpair committed Jan 4, 2019
1 parent 127e123 commit 553fa1d
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 8 deletions.
24 changes: 16 additions & 8 deletions aiohttp/streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,15 +254,23 @@ def end_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
raise RuntimeError("Called end_chunk_receiving without calling "
"begin_chunk_receiving first")
if not self._http_chunk_splits or \
self._http_chunk_splits[-1] != self.total_bytes:
self._http_chunk_splits.append(self.total_bytes)

# wake up readchunk when end of http chunk received
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, False)
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0

if self.total_bytes == pos:
# Note, when chunked + gzip is used, we can receive some chunk
# of compressed data, but that data may not be enough for gzip FSM
# to yield any uncompressed data. That's why current position may
# not change after receiving a chunk.
return

self._http_chunk_splits.append(self.total_bytes)

# wake up readchunk when end of http chunk received
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, False)

async def _wait(self, func_name: str) -> None:
# StreamReader uses a future to link the protocol feed_data() method
Expand Down
29 changes: 29 additions & 0 deletions tests/test_streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -719,6 +719,35 @@ async def test_readchunk_with_other_read_calls(self) -> None:
assert b'' == data
assert not end_of_chunk

async def test_read_empty_chunks(self) -> None:
loop = asyncio.get_event_loop()
stream = self._make_one()

# Simulate empty first chunk. This is significant special case
stream.begin_http_chunk_receiving()
stream.end_http_chunk_receiving()

stream.begin_http_chunk_receiving()
stream.feed_data(b'ungzipped')
stream.end_http_chunk_receiving()

# Possible when compression is enabled.
stream.begin_http_chunk_receiving()
stream.end_http_chunk_receiving()

# is also possible
stream.begin_http_chunk_receiving()
stream.end_http_chunk_receiving()

stream.begin_http_chunk_receiving()
stream.feed_data(b' data')
stream.end_http_chunk_receiving()

stream.feed_eof()

data = await stream.read()
assert data == b'ungzipped data'

async def test_readchunk_separate_http_chunk_tail(self) -> None:
"""Test that stream.readchunk returns (b'', True) when end of
http chunk received after body
Expand Down

0 comments on commit 553fa1d

Please sign in to comment.