Skip to content

Commit

Permalink
Merge pull request #3 from llm-agents-php/feature/error-handling
Browse files Browse the repository at this point in the history
Handle LLM errors
  • Loading branch information
butschster authored Sep 5, 2024
2 parents 3ff8f6d + 1c6f619 commit 3eeeb4d
Show file tree
Hide file tree
Showing 9 changed files with 82 additions and 2 deletions.
2 changes: 1 addition & 1 deletion composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"require": {
"php": "^8.3",
"openai-php/client": "^0.10.1",
"llm-agents/agents": "^1.0",
"llm-agents/agents": "^1.4",
"guzzlehttp/guzzle": "^7.0"
},
"require-dev": {
Expand Down
10 changes: 10 additions & 0 deletions src/Exception/LimitExceededException.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<?php

declare(strict_types=1);

namespace LLM\Agents\OpenAI\Client\Exception;

final class LimitExceededException extends OpenAiClientException
{

}
10 changes: 10 additions & 0 deletions src/Exception/OpenAiClientException.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<?php

declare(strict_types=1);

namespace LLM\Agents\OpenAI\Client\Exception;

class OpenAiClientException extends \Exception
{

}
10 changes: 10 additions & 0 deletions src/Exception/RateLimitException.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<?php

declare(strict_types=1);

namespace LLM\Agents\OpenAI\Client\Exception;

final class RateLimitException extends OpenAiClientException
{

}
10 changes: 10 additions & 0 deletions src/Exception/TimeoutException.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<?php

declare(strict_types=1);

namespace LLM\Agents\OpenAI\Client\Exception;

final class TimeoutException extends OpenAiClientException
{

}
15 changes: 14 additions & 1 deletion src/LLM.php
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
use LLM\Agents\LLM\Prompt\PromptInterface;
use LLM\Agents\LLM\Prompt\Tool;
use LLM\Agents\LLM\Response\Response;
use LLM\Agents\OpenAI\Client\Exception\LimitExceededException;
use LLM\Agents\OpenAI\Client\Exception\RateLimitException;
use LLM\Agents\OpenAI\Client\Exception\TimeoutException;
use OpenAI\Contracts\ClientContract;

final class LLM implements LLMInterface
Expand Down Expand Up @@ -77,7 +80,17 @@ public function generate(
->chat()
->createStreamed($request);

return $this->streamParser->parse($stream, $callback);
try {
return $this->streamParser->parse($stream, $callback);
} catch (LimitExceededException) {
throw new \LLM\Agents\LLM\Exception\LimitExceededException(
currentLimit: $request['max_tokens'],
);
} catch (RateLimitException) {
throw new \LLM\Agents\LLM\Exception\RateLimitException();
} catch (TimeoutException) {
throw new \LLM\Agents\LLM\Exception\TimeoutException();
}
}

protected function buildOptions(OptionsInterface $options): array
Expand Down
11 changes: 11 additions & 0 deletions src/Parsers/ChatResponseParser.php
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
namespace LLM\Agents\OpenAI\Client\Parsers;

use LLM\Agents\OpenAI\Client\Event\MessageChunk;
use LLM\Agents\OpenAI\Client\Exception\LimitExceededException;
use LLM\Agents\OpenAI\Client\Exception\RateLimitException;
use LLM\Agents\OpenAI\Client\Exception\TimeoutException;
use LLM\Agents\OpenAI\Client\StreamChunkCallbackInterface;
use LLM\Agents\LLM\Response\FinishReason;
use LLM\Agents\LLM\Response\Response;
Expand All @@ -21,6 +24,11 @@ public function __construct(
private ?EventDispatcherInterface $eventDispatcher = null,
) {}

/**
* @throws LimitExceededException
* @throws RateLimitException
* @throws TimeoutException
*/
public function parse(ResponseStreamContract $stream, ?StreamChunkCallbackInterface $callback = null): Response
{
$callback ??= static fn(?string $chunk, bool $stop, ?string $finishReason = null) => null;
Expand Down Expand Up @@ -112,6 +120,9 @@ public function parse(ResponseStreamContract $stream, ?StreamChunkCallbackInterf
tools: \array_values($toolCalls),
finishReason: $finishReason->value,
),
$finishReason === FinishReason::Length => throw new LimitExceededException(),
$finishReason === FinishReason::Timeout => throw new TimeoutException(),
$finishReason === FinishReason::Limit => throw new RateLimitException(),
};
}
}
8 changes: 8 additions & 0 deletions src/Parsers/ParserInterface.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,19 @@

namespace LLM\Agents\OpenAI\Client\Parsers;

use LLM\Agents\OpenAI\Client\Exception\LimitExceededException;
use LLM\Agents\OpenAI\Client\Exception\RateLimitException;
use LLM\Agents\OpenAI\Client\Exception\TimeoutException;
use LLM\Agents\OpenAI\Client\StreamChunkCallbackInterface;
use LLM\Agents\LLM\Response\Response;
use OpenAI\Contracts\ResponseStreamContract;

interface ParserInterface
{
/**
* @throws LimitExceededException
* @throws RateLimitException
* @throws TimeoutException
*/
public function parse(ResponseStreamContract $stream, ?StreamChunkCallbackInterface $callback = null): Response;
}
8 changes: 8 additions & 0 deletions src/StreamResponseParser.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@

namespace LLM\Agents\OpenAI\Client;

use LLM\Agents\OpenAI\Client\Exception\LimitExceededException;
use LLM\Agents\OpenAI\Client\Exception\RateLimitException;
use LLM\Agents\OpenAI\Client\Exception\TimeoutException;
use LLM\Agents\OpenAI\Client\Parsers\ParserInterface;
use LLM\Agents\LLM\Exception\LLMException;
use LLM\Agents\LLM\Response\Response;
Expand All @@ -19,6 +22,11 @@ public function registerParser(string $type, ParserInterface $parser): void
$this->parsers[$type] = $parser;
}

/**
* @throws LimitExceededException
* @throws RateLimitException
* @throws TimeoutException
*/
public function parse(StreamResponse $stream, ?StreamChunkCallbackInterface $callback = null): Response
{
$this->validateStreamResponse($stream);
Expand Down

0 comments on commit 3eeeb4d

Please sign in to comment.