Skip to content
This repository was archived by the owner on Jul 16, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
],
"require": {
"php": ">=8.2",
"php-llm/llm-chain": "^0.6",
"php-llm/llm-chain": "^0.7",
"symfony/config": "^6.4 || ^7.0",
"symfony/dependency-injection": "^6.4 || ^7.0",
"symfony/framework-bundle": "^6.4 || ^7.0"
Expand Down
2 changes: 1 addition & 1 deletion src/DependencyInjection/LlmChainExtension.php
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ private function processPlatformConfig(string $name, array $platform, ContainerB
$definition
->replaceArgument('$baseUrl', $platform['base_url'])
->replaceArgument('$deployment', $platform['deployment'])
->replaceArgument('$key', $platform['api_key'])
->replaceArgument('$apiKey', $platform['api_key'])
->replaceArgument('$apiVersion', $platform['version']);

$container->setDefinition('llm_chain.platform.'.$name, $definition);
Expand Down
6 changes: 3 additions & 3 deletions src/Profiler/TraceableLanguageModel.php
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@

use PhpLlm\LlmChain\LanguageModel;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Response\Response;
use PhpLlm\LlmChain\Response\ResponseInterface;

/**
* @phpstan-type LlmCallData array{
* messages: MessageBag,
* options: array<string, mixed>,
* response: Response,
* response: ResponseInterface,
* }
*/
final class TraceableLanguageModel implements LanguageModel
Expand All @@ -28,7 +28,7 @@ public function __construct(
) {
}

public function call(MessageBag $messages, array $options = []): Response
public function call(MessageBag $messages, array $options = []): ResponseInterface
{
$response = $this->llm->call($messages, $options);

Expand Down
5 changes: 3 additions & 2 deletions src/Resources/config/services.php
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
use PhpLlm\LlmChain\Store\Pinecone\Store as PineconeStore;
use PhpLlm\LlmChain\StructuredOutput\ChainProcessor as StructureOutputProcessor;
use PhpLlm\LlmChain\StructuredOutput\ResponseFormatFactory;
use PhpLlm\LlmChain\StructuredOutput\ResponseFormatFactoryInterface;
use PhpLlm\LlmChain\StructuredOutput\SchemaFactory;
use PhpLlm\LlmChain\ToolBox\ChainProcessor as ToolProcessor;
use PhpLlm\LlmChain\ToolBox\ParameterAnalyzer;
Expand Down Expand Up @@ -53,7 +54,7 @@
'$baseUrl' => abstract_arg('Base URL for Azure API'),
'$deployment' => abstract_arg('Deployment for Azure API'),
'$apiVersion' => abstract_arg('API version for Azure API'),
'$key' => abstract_arg('API key for Azure API'),
'$apiKey' => abstract_arg('API key for Azure API'),
])
->set(OpenAIPlatform::class)
->abstract()
Expand Down Expand Up @@ -106,8 +107,8 @@

// structured output
->set(ResponseFormatFactory::class)
->alias(ResponseFormatFactoryInterface::class, ResponseFormatFactory::class)
->set(SchemaFactory::class)
->factory([SchemaFactory::class, 'create'])
->set(StructureOutputProcessor::class)

// tools
Expand Down