Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Backports for release 6.0.4 #2819

Merged
merged 13 commits into from
Mar 4, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@
dist: trusty
sudo: false

matrix:
fast_finish: true

language: python
# For a list of available versions, run
# aws s3 ls s3://travis-python-archives/binaries/ubuntu/14.04/x86_64/
Expand All @@ -26,6 +23,7 @@ language: python
packages:
- libgnutls-dev
jobs:
fast_finish: true
include:
# 3.5.2 is interesting because it's the version in ubuntu 16.04, and due to python's
# "provisional feature" rules there are significant differences between patch
Expand Down
12 changes: 12 additions & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,18 @@ environment:
TOX_ENV: "py37"
TOX_ARGS: ""

- PYTHON: "C:\\Python38"
PYTHON_VERSION: "3.8.x"
PYTHON_ARCH: "32"
TOX_ENV: "py38"
TOX_ARGS: "tornado.test.websocket_test"

- PYTHON: "C:\\Python38-x64"
PYTHON_VERSION: "3.8.x"
PYTHON_ARCH: "64"
TOX_ENV: "py38"
TOX_ARGS: ""

install:
# Make sure the right python version is first on the PATH.
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
Expand Down
1 change: 1 addition & 0 deletions docs/releases.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Release notes
.. toctree::
:maxdepth: 2

releases/v6.0.4
releases/v6.0.3
releases/v6.0.2
releases/v6.0.1
Expand Down
21 changes: 21 additions & 0 deletions docs/releases/v6.0.4.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
What's new in Tornado 6.0.4
===========================

Mar 3, 2020
-----------

General changes
~~~~~~~~~~~~~~~

- Binary wheels are now available for Python 3.8 on Windows. Note that it is
still necessary to use
``asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())`` for
this platform/version.

Bug fixes
~~~~~~~~~

- Fixed an issue in `.IOStream` (introduced in 6.0.0) that resulted in
``StreamClosedError`` being incorrectly raised if a stream is closed mid-read
but there is enough buffered data to satisfy the read.
- `.AnyThreadEventLoopPolicy` now always uses the selector event loop on Windows.
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ def build_extension(self, ext):
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
Expand Down
4 changes: 2 additions & 2 deletions tornado/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,5 @@
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "6.0.3"
version_info = (6, 0, 3, 0)
version = "6.0.4"
version_info = (6, 0, 4, 0)
2 changes: 1 addition & 1 deletion tornado/httputil.py
Original file line number Diff line number Diff line change
Expand Up @@ -690,7 +690,7 @@ class HTTPFile(ObjectDict):


def _parse_request_range(
range_header: str
range_header: str,
) -> Optional[Tuple[Optional[int], Optional[int]]]:
"""Parses a Range header.

Expand Down
30 changes: 28 additions & 2 deletions tornado/iostream.py
Original file line number Diff line number Diff line change
Expand Up @@ -609,6 +609,15 @@ def close(
if self._read_until_close:
self._read_until_close = False
self._finish_read(self._read_buffer_size, False)
elif self._read_future is not None:
# resolve reads that are pending and ready to complete
try:
pos = self._find_read_pos()
except UnsatisfiableReadError:
pass
else:
if pos is not None:
self._read_from_buffer(pos)
if self._state is not None:
self.io_loop.remove_handler(self.fileno())
self._state = None
Expand Down Expand Up @@ -794,8 +803,25 @@ def _handle_read(self) -> None:
self._read_from_buffer(pos)

def _start_read(self) -> Future:
self._check_closed() # Before reading, check that stream is not closed.
assert self._read_future is None, "Already reading"
if self._read_future is not None:
# It is an error to start a read while a prior read is unresolved.
# However, if the prior read is unresolved because the stream was
# closed without satisfying it, it's better to raise
# StreamClosedError instead of AssertionError. In particular, this
# situation occurs in harmless situations in http1connection.py and
# an AssertionError would be logged noisily.
#
# On the other hand, it is legal to start a new read while the
# stream is closed, in case the read can be satisfied from the
# read buffer. So we only want to check the closed status of the
# stream if we need to decide what kind of error to raise for
# "already reading".
#
# These conditions have proven difficult to test; we have no
# unittests that reliably verify this behavior so be careful
# when making changes here. See #2651 and #2719.
self._check_closed()
assert self._read_future is None, "Already reading"
self._read_future = Future()
return self._read_future

Expand Down
11 changes: 10 additions & 1 deletion tornado/platform/asyncio.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import concurrent.futures
import functools
import sys

from threading import get_ident
from tornado.gen import convert_yielded
Expand Down Expand Up @@ -307,7 +308,15 @@ def to_asyncio_future(tornado_future: asyncio.Future) -> asyncio.Future:
return convert_yielded(tornado_future)


class AnyThreadEventLoopPolicy(asyncio.DefaultEventLoopPolicy): # type: ignore
if sys.platform == "win32" and hasattr(asyncio, "WindowsSelectorEventLoopPolicy"):
# "Any thread" and "selector" should be orthogonal, but there's not a clean
# interface for composing policies so pick the right base.
_BasePolicy = asyncio.WindowsSelectorEventLoopPolicy # type: ignore
else:
_BasePolicy = asyncio.DefaultEventLoopPolicy


class AnyThreadEventLoopPolicy(_BasePolicy): # type: ignore
"""Event loop policy that allows loop creation on any thread.

The default `asyncio` event loop policy only automatically creates
Expand Down
2 changes: 1 addition & 1 deletion tornado/tcpclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def __init__(

@staticmethod
def split(
addrinfo: List[Tuple]
addrinfo: List[Tuple],
) -> Tuple[
List[Tuple[socket.AddressFamily, Tuple]],
List[Tuple[socket.AddressFamily, Tuple]],
Expand Down
8 changes: 8 additions & 0 deletions tornado/test/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import asyncio
import sys

# Use the selector event loop on windows. Do this in tornado/test/__init__.py
# instead of runtests.py so it happens no matter how the test is run (such as
# through editor integrations).
if sys.platform == "win32" and hasattr(asyncio, "WindowsSelectorEventLoopPolicy"):
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # type: ignore
72 changes: 68 additions & 4 deletions tornado/test/iostream_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
)
from tornado.test.util import skipIfNonUnix, refusing_port, skipPypy3V58
from tornado.web import RequestHandler, Application
import asyncio
import errno
import hashlib
import os
Expand All @@ -31,6 +32,7 @@
import socket
import ssl
import sys
import typing
from unittest import mock
import unittest

Expand Down Expand Up @@ -165,6 +167,27 @@ class TestReadWriteMixin(object):
def make_iostream_pair(self, **kwargs):
raise NotImplementedError

def iostream_pair(self, **kwargs):
"""Like make_iostream_pair, but called by ``async with``.

In py37 this becomes simpler with contextlib.asynccontextmanager.
"""

class IOStreamPairContext:
def __init__(self, test, kwargs):
self.test = test
self.kwargs = kwargs

async def __aenter__(self):
self.pair = await self.test.make_iostream_pair(**self.kwargs)
return self.pair

async def __aexit__(self, typ, value, tb):
for s in self.pair:
s.close()

return IOStreamPairContext(self, kwargs)

@gen_test
def test_write_zero_bytes(self):
# Attempting to write zero bytes should run the callback without
Expand Down Expand Up @@ -261,7 +284,41 @@ def test_large_read_until(self):
rs.close()

@gen_test
def test_close_callback_with_pending_read(self):
async def test_read_until_with_close_after_second_packet(self):
# This is a regression test for a regression in Tornado 6.0
# (maybe 6.0.3?) reported in
# https://github.com/tornadoweb/tornado/issues/2717
#
# The data arrives in two chunks; the stream is closed at the
# same time that the second chunk is received. If the second
# chunk is larger than the first, it works, but when this bug
# existed it would fail if the second chunk were smaller than
# the first. This is due to the optimization that the
# read_until condition is only checked when the buffer doubles
# in size
async with self.iostream_pair() as (rs, ws):
rf = asyncio.ensure_future(rs.read_until(b"done"))
await ws.write(b"x" * 2048)
ws.write(b"done")
ws.close()
await rf

@gen_test
async def test_read_until_unsatisfied_after_close(self: typing.Any):
# If a stream is closed while reading, it raises
# StreamClosedError instead of UnsatisfiableReadError (the
# latter should only be raised when byte limits are reached).
# The particular scenario tested here comes from #2717.
async with self.iostream_pair() as (rs, ws):
rf = asyncio.ensure_future(rs.read_until(b"done"))
await ws.write(b"x" * 2048)
ws.write(b"foo")
ws.close()
with self.assertRaises(StreamClosedError):
await rf

@gen_test
def test_close_callback_with_pending_read(self: typing.Any):
# Regression test for a bug that was introduced in 2.3
# where the IOStream._close_callback would never be called
# if there were pending reads.
Expand Down Expand Up @@ -980,9 +1037,16 @@ def connect_to_server(self, server_cls):
server = server_cls(ssl_options=_server_ssl_options())
server.add_socket(sock)

client = SSLIOStream(
socket.socket(), ssl_options=dict(cert_reqs=ssl.CERT_NONE)
)
ssl_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_NONE
# These tests fail with ConnectionAbortedErrors with TLS
# 1.3 on windows python 3.7.4 (which includes an upgrade
# to openssl 1.1.c. Other platforms might be affected with
# newer openssl too). Disable it until we figure out
# what's up.
ssl_ctx.options |= getattr(ssl, "OP_NO_TLSv1_3", 0)
client = SSLIOStream(socket.socket(), ssl_options=ssl_ctx)
yield client.connect(("127.0.0.1", port))
self.assertIsNotNone(client.socket.cipher())
finally:
Expand Down
16 changes: 12 additions & 4 deletions tornado/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -1071,7 +1071,11 @@ def flush(self, include_footers: bool = False) -> "Future[None]":
self._headers_written = True
for transform in self._transforms:
assert chunk is not None
self._status_code, self._headers, chunk = transform.transform_first_chunk(
(
self._status_code,
self._headers,
chunk,
) = transform.transform_first_chunk(
self._status_code, self._headers, chunk, include_footers
)
# Ignore the chunk and only write the headers for HEAD requests
Expand Down Expand Up @@ -3527,9 +3531,13 @@ def _decode_signed_value_v2(
clock: Callable[[], float],
) -> Optional[bytes]:
try:
key_version, timestamp_bytes, name_field, value_field, passed_sig = _decode_fields_v2(
value
)
(
key_version,
timestamp_bytes,
name_field,
value_field,
passed_sig,
) = _decode_fields_v2(value)
except ValueError:
return None
signed_string = value[: -len(passed_sig)]
Expand Down