Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 2 additions & 50 deletions tests/simple_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,64 +3,16 @@
# Copyright 2012 - 2017, New York University and the TUF contributors
# SPDX-License-Identifier: MIT OR Apache-2.0

"""
<Program>
simple_server.py
<Author>
Konstantin Andrianov.
<Started>
February 15, 2012.
<Copyright>
See LICENSE-MIT or LICENSE for licensing information.
<Purpose>
This is a basic server that was designed to be used in conjunction with
test_download.py to test download.py module.
<Reference>
SimpleHTTPServer:
http://docs.python.org/library/simplehttpserver.html#module-SimpleHTTPServer
"""
"""Simple HTTP server for python-tuf tests"""

import socketserver
import sys
from http.server import SimpleHTTPRequestHandler
from typing import Type, Union


class QuietHTTPRequestHandler(SimpleHTTPRequestHandler):
"""A SimpleHTTPRequestHandler that does not write incoming requests to
stderr."""

def log_request(
self, code: Union[int, str] = "-", size: Union[int, str] = "-"
) -> None:
pass


# NOTE: On Windows/Python2 tests that use this simple_server.py in a
# subprocesses hang after a certain amount of requests (~68), if a PIPE is
# passed as Popen's stderr argument. This problem doesn't emerge if
# we silence the HTTP messages.
# If you decide to receive the HTTP messages, then this bug
# could reappear.

# pylint: disable=invalid-name
handler: Type[Union[SimpleHTTPRequestHandler, QuietHTTPRequestHandler]]

if len(sys.argv) > 2 and sys.argv[2]:
handler = QuietHTTPRequestHandler
else:
handler = SimpleHTTPRequestHandler

# Allow re-use so you can re-run tests as often as you want even if the
# tests re-use ports. Otherwise TCP TIME-WAIT prevents reuse for ~1 minute
socketserver.TCPServer.allow_reuse_address = True

httpd = socketserver.TCPServer(("localhost", 0), handler)
httpd = socketserver.TCPServer(("localhost", 0), SimpleHTTPRequestHandler)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍 Thanks for cleaning up!

As the comment above says, I introduced this workaround for testing on Windows in Python2 (7dbb30a). So this has become irrelevant quite a while ago.

port_message = "bind succeeded, server port is: " + str(httpd.server_address[1])
print(port_message)
httpd.serve_forever()
11 changes: 6 additions & 5 deletions tests/test_fetcher_ng.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from unittest.mock import Mock, patch

import requests
import urllib3.exceptions

from tests import utils
from tuf import unittest_toolbox
Expand Down Expand Up @@ -125,20 +124,22 @@ def test_http_error(self) -> None:
def test_response_read_timeout(self, mock_session_get: Any) -> None:
mock_response = Mock()
attr = {
"raw.read.side_effect": urllib3.exceptions.ReadTimeoutError(
urllib3.HTTPConnectionPool("localhost"), "", "Read timed out."
"iter_content.side_effect": requests.exceptions.ConnectionError(
"Simulated timeout"
)
}
mock_response.configure_mock(**attr)
mock_session_get.return_value = mock_response

with self.assertRaises(exceptions.SlowRetrievalError):
next(self.fetcher.fetch(self.url))
mock_response.raw.read.assert_called_once()
mock_response.iter_content.assert_called_once()

# Read/connect session timeout error
@patch.object(
requests.Session, "get", side_effect=urllib3.exceptions.TimeoutError
requests.Session,
"get",
side_effect=requests.exceptions.Timeout("Simulated timeout"),
)
def test_session_get_timeout(self, mock_session_get: Any) -> None:
with self.assertRaises(exceptions.SlowRetrievalError):
Expand Down
48 changes: 10 additions & 38 deletions tuf/ngclient/_internal/requests_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

# Imports
import requests
import urllib3.exceptions
import requests.exceptions

import tuf
from tuf.api import exceptions
Expand Down Expand Up @@ -80,7 +80,7 @@ def fetch(self, url: str) -> Iterator[bytes]:
response = session.get(
url, stream=True, timeout=self.socket_timeout
)
except urllib3.exceptions.TimeoutError as e:
except requests.exceptions.Timeout as e:
raise exceptions.SlowRetrievalError from e

# Check response status.
Expand All @@ -99,26 +99,12 @@ def _chunks(self, response: "requests.Response") -> Iterator[bytes]:
download."""

try:
while True:
# We download a fixed chunk of data in every round. This is
# so that we can defend against slow retrieval attacks.
# Furthermore, we do not wish to download an extremely
# large file in one shot.

# NOTE: This may not handle some servers adding a
# Content-Encoding header, which may cause urllib3 to
# misbehave:
# https://github.com/pypa/pip/blob/404838abcca467648180b358598c597b74d568c9/src/pip/_internal/download.py#L547-L582
data = response.raw.read(self.chunk_size)

# We might have no more data to read, we signal
# that the download is complete.
if not data:
break

for data in response.iter_content(self.chunk_size):
yield data

except urllib3.exceptions.ReadTimeoutError as e:
except (
requests.exceptions.ConnectionError,
requests.exceptions.Timeout,
) as e:
raise exceptions.SlowRetrievalError from e

finally:
Expand All @@ -138,31 +124,17 @@ def _get_session(self, url: str) -> requests.Session:
if not parsed_url.scheme or not parsed_url.hostname:
raise exceptions.DownloadError("Failed to parse URL {url}")

session_index = parsed_url.scheme + "+" + parsed_url.hostname
session_index = f"{parsed_url.scheme}+{parsed_url.hostname}"
session = self._sessions.get(session_index)

if not session:
session = requests.Session()
self._sessions[session_index] = session

# Attach some default headers to every Session.
requests_user_agent = session.headers["User-Agent"]
# Follows the RFC: https://tools.ietf.org/html/rfc7231#section-5.5.3
tuf_user_agent = (
"tuf/" + tuf.__version__ + " " + requests_user_agent
)
session.headers.update(
{
# Tell the server not to compress or modify anything.
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding#Directives
"Accept-Encoding": "identity",
# The TUF user agent.
"User-Agent": tuf_user_agent,
}
)
ua = f"tuf/{tuf.__version__} {session.headers['User-Agent']}"
session.headers["User-Agent"] = ua

logger.debug("Made new session %s", session_index)

else:
logger.debug("Reusing session %s", session_index)

Expand Down