Skip to content

Commit

Permalink
add more tests
Browse files Browse the repository at this point in the history
  • Loading branch information
wenzhe-log10 committed Apr 25, 2024
1 parent f6d71ec commit 291f607
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 1 deletion.
4 changes: 3 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,10 @@ logging-tags:
python examples/logging/tags_openai.py

logging-magentic:
python examples/logging/magentic_async_stream_logging.py
python examples/logging/magentic_prompt.py
python examples/logging/magentic_prompt_stream.py
python examples/logging/magentic_function_logging.py
python examples/logging/magentic_async_stream_logging.py
python examples/logging/magentic_async_parallel_function_call.py
python examples/logging/magentic_async_multi_session_tags.py
python examples/logging/magentic_async_widget.py
Expand Down
14 changes: 14 additions & 0 deletions examples/logging/magentic_prompt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import openai
from magentic import prompt

from log10.load import log10


log10(openai, USE_ASYNC_=True)


@prompt("Tell me a joke")
def llm() -> str: ...


print(llm())
16 changes: 16 additions & 0 deletions examples/logging/magentic_prompt_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import openai
from magentic import StreamedStr, prompt

from log10.load import log10


log10(openai, USE_ASYNC_=True)


@prompt("Tell me a joke")
def llm() -> StreamedStr: ...


response = llm()
for chunk in response:
print(chunk, end="", flush=True)

0 comments on commit 291f607

Please sign in to comment.