Skip to content

Commit 1da0ef7

Browse files
authored
[Feature] Add core and base modules (#1)
* add core and base modules * add conftest and test init agent
1 parent d64f959 commit 1da0ef7

File tree

15 files changed

+155
-4
lines changed

15 files changed

+155
-4
lines changed

pyproject.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,5 +29,7 @@ authors = [
2929
]
3030
requires-python = ">=3.10"
3131
dependencies = [
32-
"mcp[cli]>=1.9.4"
32+
"asyncio>=3.4.3",
33+
"mcp[cli]>=1.9.4",
34+
"pydantic>=2.11.7"
3335
]

pytest.ini

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
[pytest]
2+
asyncio_mode=auto

src/llm_agents_from_scratch/base/__init__.py

Whitespace-only changes.
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
"""Base LLM"""
2+
3+
from abc import ABC, abstractmethod
4+
5+
from llm_agents_from_scratch.data_structures import ChatMessage, CompleteResult
6+
7+
8+
class BaseLLM(ABC):
9+
"""Base LLM Class."""
10+
11+
@abstractmethod
12+
async def complete(self, prompt: str) -> CompleteResult:
13+
"""Text Complete.
14+
15+
Args:
16+
prompt (str): The prompt the LLM should use as input.
17+
18+
Returns:
19+
str: The completion of the prompt.
20+
"""
21+
22+
@abstractmethod
23+
async def chat(self, chat_messages: list[ChatMessage]) -> ChatMessage:
24+
"""Chat interface.
25+
26+
Args:
27+
chat_messages (list[ChatMessage]): chat history.
28+
29+
Returns:
30+
ChatMessage: The response of the LLM structured as a `ChatMessage`.
31+
"""
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
"""Base Tool"""
2+
3+
4+
class BaseTool:
5+
pass
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .agent import LLMAgent
2+
3+
__all__ = ["LLMAgent"]
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
"""Agent Module."""
2+
3+
from typing_extensions import Self
4+
5+
from llm_agents_from_scratch.base.llm import BaseLLM
6+
from llm_agents_from_scratch.base.tool import BaseTool
7+
8+
9+
class LLMAgent:
10+
"""A simple LLM Agent Class."""
11+
12+
def __init__(self, llm: BaseLLM, tools: list[BaseTool] = []):
13+
self.llm = llm
14+
self.tools = tools
15+
16+
def add_tool(self, tool: BaseTool) -> Self:
17+
"""Add a tool to the agents tool set.
18+
19+
NOTE: Supports fluent style for convenience.
20+
21+
Args:
22+
tool (BaseTool): The tool to equip the LLM agent.
23+
"""
24+
self.tools = self.tools + [tool]
25+
return self
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from .llm import ChatMessage, ChatRole, CompleteResult
2+
3+
__all__ = [
4+
# llm
5+
"ChatRole",
6+
"ChatMessage",
7+
"CompleteResult",
8+
]
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
"""Data Structures for LLMs"""
2+
3+
from enum import Enum
4+
5+
from pydantic import BaseModel, ConfigDict
6+
7+
8+
class ChatRole(str, Enum):
9+
USER = "user"
10+
ASSISTANT = "assistant"
11+
SYSTEM = "system"
12+
TOOL_CALL = "tool_call"
13+
14+
15+
class ChatMessage(BaseModel):
16+
model_config = ConfigDict(arbitrary_types_allowed=True)
17+
role: ChatRole
18+
content: str
19+
20+
21+
class CompleteResult(BaseModel):
22+
response: str
23+
full_response: str

tests/conftest.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import pytest
2+
3+
from llm_agents_from_scratch.base.llm import BaseLLM
4+
from llm_agents_from_scratch.data_structures import ChatMessage, CompleteResult
5+
6+
7+
class MockBaseLLM(BaseLLM):
8+
async def complete(self, prompt: str) -> CompleteResult:
9+
result = "mock complete"
10+
return CompleteResult(
11+
response=result, full_response=f"{prompt} {result}"
12+
)
13+
14+
async def chat(self, chat_messages: list[ChatMessage]) -> ChatMessage:
15+
return ChatMessage(role="assistant", content="mock chat response")
16+
17+
18+
@pytest.fixture()
19+
def mock_llm() -> BaseLLM:
20+
return MockBaseLLM()

0 commit comments

Comments
 (0)