Skip to content

Commit

Permalink
Refactor time-based tests to use fake and sharp loop time
Browse files Browse the repository at this point in the history
Utilise a new library — https://github.com/nolar/looptime — to have a fake time in event loops. The library does a lot of things, but the most important one is the sharp time with predictable steps with **no code overhead included**, which is typically random.

Everything else goes as a side-effect: e.g. the fast execution of such tests with zero real-time for any fake duration of loop time, convenient `looptime` fixture for assertions, etc.

Signed-off-by: Sergey Vasilyev <nolar@nolar.info>
  • Loading branch information
nolar committed Dec 26, 2021
1 parent 06d0094 commit e0f78f3
Show file tree
Hide file tree
Showing 22 changed files with 379 additions and 482 deletions.
1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[pytest]
addopts =
--strict-markers
--looptime
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ coveralls
freezegun
import-linter
isort>=5.5.0
looptime
lxml
# Mypy requires typed-ast, which is broken on PyPy 3.7 (could work in PyPy 3.8).
mypy==0.930; implementation_name == "cpython"
Expand Down
60 changes: 32 additions & 28 deletions tests/apis/test_api_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,23 +138,23 @@ async def test_parsing_in_streams(
(delete, 'delete'),
])
async def test_direct_timeout_in_requests(
resp_mocker, aresponses, hostname, fn, method, settings, logger, timer):
resp_mocker, aresponses, hostname, fn, method, settings, logger, looptime):

async def serve_slowly():
await asyncio.sleep(1.0)
await asyncio.sleep(10)
return aiohttp.web.json_response({})

mock = resp_mocker(side_effect=serve_slowly)
aresponses.add(hostname, '/url', method, mock)

with timer, pytest.raises(asyncio.TimeoutError):
timeout = aiohttp.ClientTimeout(total=0.1)
with pytest.raises(asyncio.TimeoutError):
timeout = aiohttp.ClientTimeout(total=1.23)
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
# To reduce the test duration we disable retries for this test.
settings.networking.error_backoffs = None
await fn('/url', timeout=timeout, settings=settings, logger=logger)

assert 0.1 < timer.seconds < 0.2
assert looptime == 1.23


@pytest.mark.parametrize('fn, method', [
Expand All @@ -164,84 +164,85 @@ async def serve_slowly():
(delete, 'delete'),
])
async def test_settings_timeout_in_requests(
resp_mocker, aresponses, hostname, fn, method, settings, logger, timer):
resp_mocker, aresponses, hostname, fn, method, settings, logger, looptime):

async def serve_slowly():
await asyncio.sleep(1.0)
await asyncio.sleep(10)
return aiohttp.web.json_response({})

mock = resp_mocker(side_effect=serve_slowly)
aresponses.add(hostname, '/url', method, mock)

with timer, pytest.raises(asyncio.TimeoutError):
settings.networking.request_timeout = 0.1
with pytest.raises(asyncio.TimeoutError):
settings.networking.request_timeout = 1.23
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
# To reduce the test duration we disable retries for this test.
settings.networking.error_backoffs = None
await fn('/url', settings=settings, logger=logger)

assert 0.1 < timer.seconds < 0.2
assert looptime == 1.23


@pytest.mark.parametrize('method', ['get']) # the only supported method at the moment
async def test_direct_timeout_in_streams(
resp_mocker, aresponses, hostname, method, settings, logger, timer):
resp_mocker, aresponses, hostname, method, settings, logger, looptime):

async def serve_slowly():
await asyncio.sleep(1.0)
await asyncio.sleep(10)
return "{}"

mock = resp_mocker(side_effect=serve_slowly)
aresponses.add(hostname, '/url', method, mock)

with timer, pytest.raises(asyncio.TimeoutError):
timeout = aiohttp.ClientTimeout(total=0.1)
with pytest.raises(asyncio.TimeoutError):
timeout = aiohttp.ClientTimeout(total=1.23)
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
# To reduce the test duration we disable retries for this test.
settings.networking.error_backoffs = None
async for _ in stream('/url', timeout=timeout, settings=settings, logger=logger):
pass

assert 0.1 < timer.seconds < 0.2
assert looptime == 1.23


@pytest.mark.parametrize('method', ['get']) # the only supported method at the moment
async def test_settings_timeout_in_streams(
resp_mocker, aresponses, hostname, method, settings, logger, timer):
resp_mocker, aresponses, hostname, method, settings, logger, looptime):

async def serve_slowly():
await asyncio.sleep(1.0)
await asyncio.sleep(10)
return "{}"

mock = resp_mocker(side_effect=serve_slowly)
aresponses.add(hostname, '/url', method, mock)

with timer, pytest.raises(asyncio.TimeoutError):
settings.networking.request_timeout = 0.1
with pytest.raises(asyncio.TimeoutError):
settings.networking.request_timeout = 1.23
# aiohttp raises an asyncio.TimeoutError which is automatically retried.
# To reduce the test duration we disable retries for this test.
settings.networking.error_backoffs = None
async for _ in stream('/url', settings=settings, logger=logger):
pass

assert 0.1 < timer.seconds < 0.2
assert looptime == 1.23


@pytest.mark.parametrize('delay, expected', [
pytest.param(0.0, [], id='instant-none'),
pytest.param(0.1, [{'fake': 'result1'}], id='fast-single'),
pytest.param(9.9, [{'fake': 'result1'}, {'fake': 'result2'}], id='inf-double'),
@pytest.mark.parametrize('delay, expected_times, expected_items', [
pytest.param(0, [], [], id='instant-none'),
pytest.param(2, [1], [{'fake': 'result1'}], id='fast-single'),
pytest.param(9, [1, 4], [{'fake': 'result1'}, {'fake': 'result2'}], id='inf-double'),
])
@pytest.mark.parametrize('method', ['get']) # the only supported method at the moment
async def test_stopper_in_streams(
resp_mocker, aresponses, hostname, method, delay, expected, settings, logger):
resp_mocker, aresponses, hostname, method, delay, settings, logger, looptime,
expected_items, expected_times):

async def stream_slowly(request: aiohttp.ClientRequest):
response = aiohttp.web.StreamResponse()
await response.prepare(request)
await asyncio.sleep(0.05)
await asyncio.sleep(1)
await response.write(b'{"fake": "result1"}\n')
await asyncio.sleep(0.15)
await asyncio.sleep(3)
await response.write(b'{"fake": "result2"}\n')
await response.write_eof()
return response
Expand All @@ -252,7 +253,10 @@ async def stream_slowly(request: aiohttp.ClientRequest):
asyncio.get_running_loop().call_later(delay, stopper.set_result, None)

items = []
times = []
async for item in stream('/url', stopper=stopper, settings=settings, logger=logger):
items.append(item)
times.append(float(looptime))

assert items == expected
assert items == expected_items
assert times == expected_times
70 changes: 6 additions & 64 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import logging
import re
import sys
import time
from unittest.mock import Mock

import aiohttp.web
Expand Down Expand Up @@ -552,69 +551,6 @@ def no_certvalidator():
yield from _with_module_absent('certvalidator')


#
# Helpers for the timing checks.
#

@pytest.fixture()
def timer():
return Timer()


class Timer(object):
"""
A helper context manager to measure the time of the code-blocks.
Also, supports direct comparison with time-deltas and the numbers of seconds.
Usage:
with Timer() as timer:
do_something()
print(f"Executing for {timer.seconds}s already.")
do_something_else()
print(f"Executed in {timer.seconds}s.")
assert timer < 5.0
"""

def __init__(self):
super().__init__()
self._ts = None
self._te = None

@property
def seconds(self):
if self._ts is None:
return None
elif self._te is None:
return time.perf_counter() - self._ts
else:
return self._te - self._ts

def __repr__(self):
status = 'new' if self._ts is None else 'running' if self._te is None else 'finished'
return f'<Timer: {self.seconds}s ({status})>'

def __enter__(self):
self._ts = time.perf_counter()
self._te = None
return self

def __exit__(self, exc_type, exc_val, exc_tb):
self._te = time.perf_counter()

async def __aenter__(self):
return self.__enter__()

async def __aexit__(self, exc_type, exc_val, exc_tb):
return self.__exit__(exc_type, exc_val, exc_tb)

def __int__(self):
return int(self.seconds)

def __float__(self):
return float(self.seconds)

#
# Helpers for the logging checks.
#
Expand Down Expand Up @@ -725,3 +661,9 @@ def _no_asyncio_pending_tasks(event_loop):
remains = after - before
if remains:
pytest.fail(f"Unattended asyncio tasks detected: {remains!r}")


@pytest.fixture()
def loop(event_loop):
"""Sync aiohttp's server-side timeline with kopf's client-side timeline."""
return event_loop
7 changes: 3 additions & 4 deletions tests/handling/daemons/test_daemon_rematching.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


async def test_running_daemon_is_stopped_when_mismatches(
resource, dummy, timer, mocker, caplog, assert_logs, k8s_mocked, simulate_cycle):
resource, dummy, looptime, mocker, caplog, assert_logs, k8s_mocked, simulate_cycle):
caplog.set_level(logging.DEBUG)

@kopf.daemon(*resource, id='fn', when=lambda **_: is_matching)
Expand All @@ -26,9 +26,8 @@ async def fn(**kwargs):
mocker.resetall()
is_matching = False
await simulate_cycle({})
with timer:
await dummy.wait_for_daemon_done()
await dummy.wait_for_daemon_done()

assert timer.seconds < 0.01 # near-instantly
assert looptime == 0
stopped = dummy.kwargs['stopped']
assert DaemonStoppingReason.FILTERS_MISMATCH in stopped.reason
21 changes: 9 additions & 12 deletions tests/handling/daemons/test_daemon_termination.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

async def test_daemon_exits_gracefully_and_instantly_on_resource_deletion(
settings, resource, dummy, simulate_cycle,
caplog, assert_logs, k8s_mocked, frozen_time, mocker, timer):
caplog, assert_logs, k8s_mocked, frozen_time, mocker, looptime):
caplog.set_level(logging.DEBUG)

# A daemon-under-test.
Expand All @@ -31,18 +31,17 @@ async def fn(**kwargs):
await simulate_cycle(event_object)

# Check that the daemon has exited near-instantly, with no delays.
with timer:
await dummy.wait_for_daemon_done()
await dummy.wait_for_daemon_done()

assert timer.seconds < 0.01 # near-instantly
assert looptime == 0
assert k8s_mocked.sleep.call_count == 0
assert k8s_mocked.patch.call_count == 1
assert k8s_mocked.patch.call_args_list[0][1]['payload']['metadata']['finalizers'] == []


async def test_daemon_exits_gracefully_and_instantly_on_operator_exiting(
settings, resource, dummy, simulate_cycle, background_daemon_killer,
caplog, assert_logs, k8s_mocked, frozen_time, mocker, timer):
caplog, assert_logs, k8s_mocked, frozen_time, mocker, looptime):
caplog.set_level(logging.DEBUG)

# A daemon-under-test.
Expand All @@ -63,10 +62,9 @@ async def fn(**kwargs):
background_daemon_killer.cancel()

# Check that the daemon has exited near-instantly, with no delays.
with timer:
await dummy.wait_for_daemon_done()
await dummy.wait_for_daemon_done()

assert timer.seconds < 0.01 # near-instantly
assert looptime == 0
assert k8s_mocked.sleep.call_count == 0
assert k8s_mocked.patch.call_count == 0

Expand All @@ -78,7 +76,7 @@ async def fn(**kwargs):
@pytest.mark.usefixtures('background_daemon_killer')
async def test_daemon_exits_gracefully_and_instantly_on_operator_pausing(
settings, memories, resource, dummy, simulate_cycle, conflicts_found,
caplog, assert_logs, k8s_mocked, frozen_time, mocker, timer):
caplog, assert_logs, k8s_mocked, frozen_time, mocker, looptime):
caplog.set_level(logging.DEBUG)

# A daemon-under-test.
Expand All @@ -99,9 +97,8 @@ async def fn(**kwargs):
await conflicts_found.turn_to(True)

# Check that the daemon has exited near-instantly, with no delays.
with timer:
await dummy.wait_for_daemon_done()
assert timer.seconds < 0.01 # near-instantly
await dummy.wait_for_daemon_done()
assert looptime == 0

# There is no way to test for re-spawning here: it is done by watch-events,
# which are tested by the paused operators elsewhere (test_daemon_spawning.py).
Expand Down
Loading

0 comments on commit e0f78f3

Please sign in to comment.