Skip to content

Commit

Permalink
iostream: Add tests for behavior around close with read_until
Browse files Browse the repository at this point in the history
  • Loading branch information
bdarnell committed Aug 5, 2019
1 parent 9d5510e commit 3892ecb
Showing 1 changed file with 56 additions and 0 deletions.
56 changes: 56 additions & 0 deletions tornado/test/iostream_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
)
from tornado.test.util import skipIfNonUnix, refusing_port, skipPypy3V58
from tornado.web import RequestHandler, Application
import asyncio
import errno
import hashlib
import os
Expand Down Expand Up @@ -164,6 +165,27 @@ class TestReadWriteMixin(object):
def make_iostream_pair(self, **kwargs):
raise NotImplementedError

def iostream_pair(self, **kwargs):
"""Like make_iostream_pair, but called by ``async with``.
In py37 this becomes simpler with contextlib.asynccontextmanager.
"""

class IOStreamPairContext:
def __init__(self, test, kwargs):
self.test = test
self.kwargs = kwargs

async def __aenter__(self):
self.pair = await self.test.make_iostream_pair(**self.kwargs)
return self.pair

async def __aexit__(self, typ, value, tb):
for s in self.pair:
s.close()

return IOStreamPairContext(self, kwargs)

@gen_test
def test_write_zero_bytes(self):
# Attempting to write zero bytes should run the callback without
Expand Down Expand Up @@ -259,6 +281,40 @@ def test_large_read_until(self):
ws.close()
rs.close()

@gen_test
async def test_read_until_with_close_after_second_packet(self):
# This is a regression test for a regression in Tornado 6.0
# (maybe 6.0.3?) reported in
# https://github.com/tornadoweb/tornado/issues/2717
#
# The data arrives in two chunks; the stream is closed at the
# same time that the second chunk is received. If the second
# chunk is larger than the first, it works, but when this bug
# existed it would fail if the second chunk were smaller than
# the first. This is due to the optimization that the
# read_until condition is only checked when the buffer doubles
# in size
async with self.iostream_pair() as (rs, ws):
rf = asyncio.ensure_future(rs.read_until(b"done"))
await ws.write(b"x" * 2048)
ws.write(b"done")
ws.close()
await rf

@gen_test
async def test_read_until_unsatisfied_after_close(self):
# If a stream is closed while reading, it raises
# StreamClosedError instead of UnsatisfiableReadError (the
# latter should only be raised when byte limits are reached).
# The particular scenario tested here comes from #2717.
async with self.iostream_pair() as (rs, ws):
rf = asyncio.ensure_future(rs.read_until(b"done"))
await ws.write(b"x" * 2048)
ws.write(b"foo")
ws.close()
with self.assertRaises(StreamClosedError):
await rf

@gen_test
def test_close_callback_with_pending_read(self):
# Regression test for a bug that was introduced in 2.3
Expand Down

0 comments on commit 3892ecb

Please sign in to comment.