Skip to content

Commit

Permalink
test: refactor tests with pytest.mark.parametrize
Browse files Browse the repository at this point in the history
  • Loading branch information
spool committed Jun 14, 2024
1 parent 5db89c8 commit 08573b7
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 126 deletions.
3 changes: 2 additions & 1 deletion mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ plugins:
- search:
lang: en
- same-dir
# optional
# - include-markdown
# - markdown-exec
- gen-files:
Expand All @@ -57,9 +58,9 @@ plugins:
- mkdocstrings:
handlers:
python:
paths: [.]
selection:
docstring_style: numpy
paths: [.]
options:
separate_signature: true
show_signature_annotations: true
Expand Down
11 changes: 7 additions & 4 deletions reginald/models/chat_interact.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@
from reginald.models.base import ResponseModel
from reginald.models.setup_llm import setup_llm

from ..utils import REGINALD_PROMPT

INPUT_PROMPT: Final[str] = ">>> "
REGINALD_PROMPT: Final[str] = "Reginald: "
EXIT_STRS: set[str] = {"exit", "exit()", "quit()", "bye Reginald"}
CLEAR_HISTORY_STRS: set[str] = {"clear_history", r"\clear_history"}

ART: Final[
str
Expand All @@ -29,7 +32,7 @@ def run_chat_interact(streaming: bool = False, **kwargs) -> ResponseModel:

while True:
message = input(INPUT_PROMPT)
if message in ["exit", "exit()", "quit()", "bye Reginald"]:
if message in EXIT_STRS:
return response_model
if message == "":
continue
Expand All @@ -39,9 +42,9 @@ def run_chat_interact(streaming: bool = False, **kwargs) -> ResponseModel:
and response_model.chat_engine.get(user_id) is not None
):
response_model.chat_engine[user_id].reset()
print("\n{REGINALD_PROMPT}History cleared.")
print(f"\n{REGINALD_PROMPT}History cleared.")
else:
print("\n{REGINALD_PROMPT}No history to clear.")
print(f"\n{REGINALD_PROMPT}No history to clear.")
continue

if streaming:
Expand Down
165 changes: 86 additions & 79 deletions reginald/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,85 +5,7 @@

from rich.progress import Progress, SpinnerColumn, TextColumn

REGINAL_PROMPT: Final[str] = "Reginald: "


def stream_iter_progress_wrapper(
streamer: Iterable | Callable | chain,
task_str: str = REGINAL_PROMPT,
progress_bar: bool = True,
end: str = "",
*args,
**kwargs,
) -> Iterable:
"""Add a progress bar for iteration.
Examples
--------
>>> from time import sleep
>>> def sleeper(naps: int = 3) -> Generator[str, None, None]:
... for nap in range(naps):
... sleep(1)
... yield f'nap: {nap}'
>>> tuple(stream_iter_progress_wrapper(streamer=sleeper))
<BLANKLINE>
Reginald: ('nap: 0', 'nap: 1', 'nap: 2')
>>> tuple(stream_iter_progress_wrapper(
... streamer=sleeper, progress_bar=False))
Reginald: ('nap: 0', 'nap: 1', 'nap: 2')
"""
if isinstance(streamer, Callable):
streamer = streamer(*args, **kwargs)
if progress_bar:
with Progress(
TextColumn("{task.description}[progress.description]"),
SpinnerColumn(),
transient=True,
) as progress:
if isinstance(streamer, list | tuple):
streamer = (item for item in streamer)
assert isinstance(streamer, Generator)
progress.add_task(task_str)
first_item = next(streamer)
streamer = chain((first_item,), streamer)
print(task_str, end=end)
return streamer


def stream_progress_wrapper(
streamer: Callable,
task_str: str = REGINAL_PROMPT,
progress_bar: bool = True,
end: str = "\n",
*args,
**kwargs,
) -> Any:
"""Add a progress bar for iteration.
Examples
--------
>>> from time import sleep
>>> def sleeper(seconds: int = 3) -> str:
... sleep(seconds)
... return f'{seconds} seconds nap'
>>> stream_progress_wrapper(sleeper)
<BLANKLINE>
Reginald:
'3 seconds nap'
"""
if progress_bar:
with Progress(
TextColumn("{task.description}[progress.description]"),
SpinnerColumn(),
transient=True,
) as progress:
progress.add_task(task_str)
results: Any = streamer(*args, **kwargs)
print(task_str, end=end)
return results
else:
print(task_str, end=end)
return streamer(*args, **kwargs)
REGINALD_PROMPT: Final[str] = "Reginald: "


def get_env_var(
Expand Down Expand Up @@ -140,3 +62,88 @@ def create_folder(folder: str) -> None:
os.makedirs(folder)
else:
logging.info(f"Folder '{folder}' already exists")


def stream_progress_wrapper(
streamer: Callable,
task_str: str = REGINALD_PROMPT,
use_spinner: bool = True,
end: str = "\n",
*args,
**kwargs,
) -> Any:
"""Add a progress bar for iteration.
Examples
--------
>>> from time import sleep
>>> def sleeper(seconds: int = 3) -> str:
... sleep(seconds)
... return f'{seconds} seconds nap'
>>> stream_progress_wrapper(sleeper)
<BLANKLINE>
Reginald:
'3 seconds nap'
>>> stream_progress_wrapper(sleeper, use_spinner=False, end='')
Reginald: '3 seconds nap'
"""
if use_spinner:
with Progress(
TextColumn("{task.description}[progress.description]"),
SpinnerColumn(),
transient=True,
) as progress:
progress.add_task(task_str)
results: Any = streamer(*args, **kwargs)
print(task_str, end=end)
return results
else:
print(task_str, end=end)
return streamer(*args, **kwargs)


def stream_iter_progress_wrapper(
streamer: Iterable | Callable | chain,
task_str: str = REGINALD_PROMPT,
use_spinner: bool = True,
end: str = "",
*args,
**kwargs,
) -> Iterable:
"""Add a progress bar for iteration.
Parameters
----------
streamer
Function that
Examples
--------
>>> from time import sleep
>>> def sleeper(naps: int = 3) -> Generator[str, None, None]:
... for nap in range(naps):
... sleep(1)
... yield f'nap: {nap}'
>>> tuple(stream_iter_progress_wrapper(streamer=sleeper))
<BLANKLINE>
Reginald: ('nap: 0', 'nap: 1', 'nap: 2')
>>> tuple(stream_iter_progress_wrapper(
... streamer=sleeper, use_spinner=False))
Reginald: ('nap: 0', 'nap: 1', 'nap: 2')
"""
if isinstance(streamer, Callable):
streamer = streamer(*args, **kwargs)
if use_spinner:
with Progress(
TextColumn("{task.description}[progress.description]"),
SpinnerColumn(),
transient=True,
) as progress:
if isinstance(streamer, list | tuple):
streamer = (item for item in streamer)
assert isinstance(streamer, Generator)
progress.add_task(task_str)
first_item = next(streamer)
streamer = chain((first_item,), streamer)
print(task_str, end=end)
return streamer
71 changes: 29 additions & 42 deletions tests/test_chat_interact.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,30 @@
from typer.testing import CliRunner

from reginald.cli import cli
from reginald.models.chat_interact import art, run_chat_interact
from reginald.models.chat_interact import (
ART,
CLEAR_HISTORY_STRS,
EXIT_STRS,
INPUT_PROMPT,
REGINALD_PROMPT,
run_chat_interact,
)
from reginald.models.simple.hello import Hello

runner = CliRunner()
art_split = art.splitlines()
art_split = ART.splitlines()


def test_chat_cli():
"""Test sending an input `str` via `cli` and then exiting."""
result = runner.invoke(cli, ["chat"], input="What's up dock?\nexit\n")
term_stdout_lines: list[str] = result.stdout.split("\n")
assert term_stdout_lines[: len(art_split)] == art_split
assert term_stdout_lines[len(art_split) + 1] == ">>> "
assert term_stdout_lines[len(art_split) + 2] == "Reginald: Hello! How are you?"
assert term_stdout_lines[len(art_split) + 3] == ">>> "
assert term_stdout_lines[len(art_split) + 1] == INPUT_PROMPT
assert (
term_stdout_lines[len(art_split) + 2] == f"{REGINALD_PROMPT}Hello! How are you?"
)
assert term_stdout_lines[len(art_split) + 3] == INPUT_PROMPT


def test_chat_cli_no_stream():
Expand All @@ -29,51 +38,29 @@ def test_chat_cli_no_stream():
)
term_stdout_lines: list[str] = result.stdout.split("\n")
assert term_stdout_lines[: len(art_split)] == art_split
assert term_stdout_lines[len(art_split) + 1] == ">>> "
assert term_stdout_lines[len(art_split) + 1] == INPUT_PROMPT
assert (
term_stdout_lines[len(art_split) + 2]
== "Reginald: Let's discuss this in a channel!"
== f"{REGINALD_PROMPT}Let's discuss this in a channel!"
)
assert term_stdout_lines[len(art_split) + 3] == ">>> "


def test_chat_interact_exit():
with mock.patch.object(builtins, "input", lambda _: "exit"):
interaction = run_chat_interact(model="hello")
assert isinstance(interaction, Hello)
assert term_stdout_lines[len(art_split) + 3] == INPUT_PROMPT


def test_chat_interact_exit_with_bracket():
with mock.patch.object(builtins, "input", lambda _: "exit()"):
@pytest.mark.parametrize("input", EXIT_STRS)
def test_chat_interact_exit(input: str):
with mock.patch.object(builtins, "input", lambda _: input):
interaction = run_chat_interact(model="hello")
assert isinstance(interaction, Hello)


def test_chat_interact_quit_with_bracket():
with mock.patch.object(builtins, "input", lambda _: "quit()"):
interaction = run_chat_interact(model="hello")
assert isinstance(interaction, Hello)


def test_chat_interact_bye():
with mock.patch.object(builtins, "input", lambda _: "bye Reginald"):
interaction = run_chat_interact(model="hello")
assert isinstance(interaction, Hello)


def test_chat_interact_clear_history():
result = runner.invoke(cli, ["chat"], input="clear_history\n")
term_stdout_lines: list[str] = result.stdout.split("\n")
assert term_stdout_lines[: len(art_split)] == art_split
assert term_stdout_lines[len(art_split) + 1] == ">>> "
assert term_stdout_lines[len(art_split) + 2] == "Reginald: No history to clear."
assert term_stdout_lines[len(art_split) + 3] == ">>> "


def test_chat_interact_slash_clear_history():
result = runner.invoke(cli, ["chat"], input="\clear_history\n")
@pytest.mark.parametrize("input", CLEAR_HISTORY_STRS)
def test_chat_interact_clear_history(input: str):
result = runner.invoke(cli, ["chat"], input=input)
term_stdout_lines: list[str] = result.stdout.split("\n")
assert term_stdout_lines[: len(art_split)] == art_split
assert term_stdout_lines[len(art_split) + 1] == ">>> "
assert term_stdout_lines[len(art_split) + 2] == "Reginald: No history to clear."
assert term_stdout_lines[len(art_split) + 3] == ">>> "
assert term_stdout_lines[len(art_split) + 1] == INPUT_PROMPT
assert (
term_stdout_lines[len(art_split) + 2]
== f"{REGINALD_PROMPT}No history to clear."
)
assert term_stdout_lines[len(art_split) + 3] == INPUT_PROMPT

0 comments on commit 08573b7

Please sign in to comment.