Skip to content

Commit

Permalink
Fix extra Anthropic import error.
Browse files Browse the repository at this point in the history
  • Loading branch information
eli64s committed Oct 7, 2024
1 parent 86f0775 commit 105e444
Show file tree
Hide file tree
Showing 4 changed files with 56 additions and 23 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "readmeai"
version = "0.5.93"
version = "0.5.94"
description = "Automated README file generator, powered by AI."
authors = ["Eli <egsalamie@gmail.com>"]
license = "MIT"
Expand Down
63 changes: 48 additions & 15 deletions readmeai/models/claude.py → readmeai/models/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Anthropic API service implementation."""

import logging
from typing import Any
import os
from typing import Any, Optional

from tenacity import (
retry,
Expand All @@ -15,13 +15,13 @@
from readmeai.models.base import BaseModelHandler
from readmeai.models.tokens import token_handler

_logger = logging.getLogger(__name__)

try:
import anthropic

ANTHROPIC_AVAILABLE = True
except ImportError:
_logger.warning("Anthropic API not found, please install 'anthropic'.")
anthropic = None
ANTHROPIC_AVAILABLE = False


class AnthropicHandler(BaseModelHandler):
Expand All @@ -33,14 +33,36 @@ def __init__(
self, config_loader: ConfigLoader, context: RepositoryContext
) -> None:
super().__init__(config_loader, context)
self._model_settings()
self.client: Optional[Any] = None
self.model: str = "claude-3-opus-20240229"
if ANTHROPIC_AVAILABLE:
self._model_settings()
else:
self._logger.warning(
"Anthropic library is not available. Some features will be disabled."
)

def _model_settings(self):
self.client = anthropic.AsyncAnthropic()
self.model = "claude-3-opus-20240229"
if not ANTHROPIC_AVAILABLE:
self._logger.error(
"Attempted to configure Anthropic model without the required library."
)
return

api_key = os.getenv("ANTHROPIC_API_KEY")
if not api_key:
self._logger.error(
"ANTHROPIC_API_KEY environment variable is not set."
)
return

self.client = anthropic.AsyncAnthropic(api_key=api_key)

async def _build_payload(self, prompt: str, tokens: int) -> dict[str, Any]:
"""Build payload for POST request to the Anthropic API."""
if not ANTHROPIC_AVAILABLE:
raise RuntimeError("Anthropic library is not available.")

return {
"model": self.model,
"max_tokens": tokens,
Expand All @@ -58,6 +80,8 @@ async def _build_payload(self, prompt: str, tokens: int) -> dict[str, Any]:
anthropic.APIConnectionError,
anthropic.RateLimitError,
)
if ANTHROPIC_AVAILABLE
else tuple()
),
)
async def _make_request(
Expand All @@ -68,24 +92,33 @@ async def _make_request(
repo_files: list[tuple[str, str]] | None,
) -> Any:
"""Processes Anthropic API responses and returns generated text."""
if not ANTHROPIC_AVAILABLE:
self._logger.error(
"Cannot make request: Anthropic library is not available."
)
return index, self.placeholder

if self.client is None:
self._logger.error("Anthropic client is not properly initialized.")
return index, self.placeholder

try:
prompt = await token_handler(self.config, index, prompt, tokens)

parameters = await self._build_payload(prompt, tokens)

async with self.rate_limit_semaphore:
response = await self.client.messages.create(**parameters)
data = response.content[0].text
data = (
response.content[0].text
if hasattr(response, "content")
else str(response)
)
self._logger.info(
f"Response from Anthropic for '{index}': {data}"
)
return index, data

except (
anthropic.APIError,
anthropic.APIConnectionError,
anthropic.RateLimitError,
) as e:
except () as e:
self._logger.error(
f"Error processing request for '{index}': {e!r}"
)
Expand Down
2 changes: 1 addition & 1 deletion readmeai/models/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from readmeai.config.settings import ConfigLoader
from readmeai.errors import UnsupportedServiceError
from readmeai.ingestion.models import RepositoryContext
from readmeai.models.anthropic import AnthropicHandler
from readmeai.models.base import BaseModelHandler
from readmeai.models.claude import AnthropicHandler
from readmeai.models.gemini import GeminiHandler
from readmeai.models.offline import OfflineHandler
from readmeai.models.openai import OpenAIHandler
Expand Down
12 changes: 6 additions & 6 deletions tests/models/test_claude.py → tests/models/test_anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from readmeai.config.settings import ConfigLoader
from readmeai.ingestion.models import RepositoryContext
from readmeai.models.claude import AnthropicHandler
from readmeai.models.anthropic import AnthropicHandler


@pytest.fixture
Expand All @@ -17,14 +17,14 @@ def anthropic_handler(repository_context_fixture: RepositoryContext):


@pytest.mark.asyncio
async def test_model_settings(anthropic_handler):
async def test_model_settings(anthropic_handler: AnthropicHandler):
anthropic_handler._model_settings()
assert isinstance(anthropic_handler.client, anthropic.AsyncAnthropic)
assert anthropic_handler.model == "claude-3-opus-20240229"


@pytest.mark.asyncio
async def test_build_payload(anthropic_handler):
async def test_build_payload(anthropic_handler: AnthropicHandler):
prompt = "Test prompt"
tokens = 100
payload = await anthropic_handler._build_payload(prompt, tokens)
Expand All @@ -34,7 +34,7 @@ async def test_build_payload(anthropic_handler):


@pytest.mark.asyncio
@patch("readmeai.models.claude.token_handler", new_callable=AsyncMock)
@patch("readmeai.models.anthropic.token_handler", new_callable=AsyncMock)
@patch("anthropic.AsyncAnthropic", new_callable=AsyncMock)
async def test_make_request_success(
mock_create, mock_token_handler, anthropic_handler: AnthropicHandler
Expand All @@ -57,7 +57,7 @@ async def test_make_request_success(


@pytest.mark.asyncio
@patch("readmeai.models.claude.token_handler", new_callable=AsyncMock)
@patch("readmeai.models.anthropic.token_handler", new_callable=AsyncMock)
@patch("anthropic.AsyncAnthropic", new_callable=AsyncMock)
async def test_make_request_api_error(
mock_create, mock_token_handler, anthropic_handler: AnthropicHandler
Expand All @@ -78,7 +78,7 @@ async def test_make_request_api_error(


@pytest.mark.asyncio
@patch("readmeai.models.claude.token_handler", new_callable=AsyncMock)
@patch("readmeai.models.anthropic.token_handler", new_callable=AsyncMock)
@patch("anthropic.AsyncAnthropic", new_callable=AsyncMock)
async def test_make_request_unexpected_error(
mock_create, mock_token_handler, anthropic_handler: AnthropicHandler
Expand Down

0 comments on commit 105e444

Please sign in to comment.