From 5ddff951a40d0e699b3037f8482b53bf1d9f3990 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 22:27:46 +0000 Subject: [PATCH] [PR #10125/d58d2c3d backport][3.11] Disable zero copy writes in the ``StreamWriter`` (#10126) Co-authored-by: J. Nick Koston --- CHANGES/10125.bugfix.rst | 1 + aiohttp/http_writer.py | 2 +- tests/test_http_writer.py | 27 +++++++++++++-------------- 3 files changed, 15 insertions(+), 15 deletions(-) create mode 100644 CHANGES/10125.bugfix.rst diff --git a/CHANGES/10125.bugfix.rst b/CHANGES/10125.bugfix.rst new file mode 100644 index 00000000000..4ece1e68d96 --- /dev/null +++ b/CHANGES/10125.bugfix.rst @@ -0,0 +1 @@ +Disabled zero copy writes in the ``StreamWriter`` -- by :user:`bdraco`. diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index c66fda3d8d0..edd19ed65da 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -90,7 +90,7 @@ def _writelines(self, chunks: Iterable[bytes]) -> None: transport = self._protocol.transport if transport is None or transport.is_closing(): raise ClientConnectionResetError("Cannot write to closing transport") - transport.writelines(chunks) + transport.write(b"".join(chunks)) async def write( self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000 diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 0ed0e615700..5f316fad2f7 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -104,16 +104,15 @@ async def test_write_large_payload_deflate_compression_data_in_eof( assert transport.write.called # type: ignore[attr-defined] chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] transport.write.reset_mock() # type: ignore[attr-defined] - assert not transport.writelines.called # type: ignore[attr-defined] # This payload compresses to 20447 bytes payload = b"".join( [bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)] ) await msg.write_eof(payload) - assert not transport.write.called # type: ignore[attr-defined] - assert transport.writelines.called # type: ignore[attr-defined] - chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined] + chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined] + + assert all(chunks) content = b"".join(chunks) assert zlib.decompress(content) == (b"data" * 4096) + payload @@ -180,7 +179,7 @@ async def test_write_payload_deflate_compression_chunked( await msg.write(b"data") await msg.write_eof() - chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] assert all(chunks) content = b"".join(chunks) assert content == expected @@ -216,7 +215,7 @@ async def test_write_payload_deflate_compression_chunked_data_in_eof( await msg.write(b"data") await msg.write_eof(b"end") - chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] assert all(chunks) content = b"".join(chunks) assert content == expected @@ -235,16 +234,16 @@ async def test_write_large_payload_deflate_compression_chunked_data_in_eof( # This payload compresses to 1111 bytes payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)]) await msg.write_eof(payload) - assert not transport.write.called # type: ignore[attr-defined] - chunks = [] - for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined] - chunked_payload = list(write_lines_call[1][0])[1:] - chunked_payload.pop() - chunks.extend(chunked_payload) + compressed = [] + chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined] + chunked_body = b"".join(chunks) + split_body = chunked_body.split(b"\r\n") + while split_body: + if split_body.pop(0): + compressed.append(split_body.pop(0)) - assert all(chunks) - content = b"".join(chunks) + content = b"".join(compressed) assert zlib.decompress(content) == (b"data" * 4096) + payload