From a304df6cad709e48f2a40d1a39458b67b4938e40 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Wed, 15 May 2024 14:44:17 +0200 Subject: [PATCH 01/17] 1433: Started on the module --- llm_services.info.yml | 5 +++ src/Annotation/LLModelsProvider.php | 42 +++++++++++++++++++++ src/Exceptions/LLMException.php | 7 ++++ src/Exceptions/LLMNotSupportedException.php | 7 ++++ src/Plugin/LLModelManager.php | 34 +++++++++++++++++ src/Plugin/LLModels/LLModelsInterface.php | 7 ++++ 6 files changed, 102 insertions(+) create mode 100644 llm_services.info.yml create mode 100644 src/Annotation/LLModelsProvider.php create mode 100644 src/Exceptions/LLMException.php create mode 100644 src/Exceptions/LLMNotSupportedException.php create mode 100644 src/Plugin/LLModelManager.php create mode 100644 src/Plugin/LLModels/LLModelsInterface.php diff --git a/llm_services.info.yml b/llm_services.info.yml new file mode 100644 index 0000000..de13cf5 --- /dev/null +++ b/llm_services.info.yml @@ -0,0 +1,5 @@ +name: "Large language model services" +description: 'Large language module services to communicat with the models.' +type: module +core_version_requirement: ^10 +configure: llm_services.plugin_settings_local_tasks diff --git a/src/Annotation/LLModelsProvider.php b/src/Annotation/LLModelsProvider.php new file mode 100644 index 0000000..52792e5 --- /dev/null +++ b/src/Annotation/LLModelsProvider.php @@ -0,0 +1,42 @@ +alterInfo('llm_models_info'); + $this->setCacheBackend($cache_backend, 'llmodels_plugins'); + } + +} diff --git a/src/Plugin/LLModels/LLModelsInterface.php b/src/Plugin/LLModels/LLModelsInterface.php new file mode 100644 index 0000000..6863e05 --- /dev/null +++ b/src/Plugin/LLModels/LLModelsInterface.php @@ -0,0 +1,7 @@ + Date: Wed, 15 May 2024 15:07:51 +0200 Subject: [PATCH 02/17] 1433: Added actions --- .github/PULL_REQUEST_TEMPLATE.md | 23 +++ .github/workflows/pr.yml | 154 ++++++++++++++++++ CHANGELOG.md | 13 ++ composer.json | 61 +++++++ package.json | 13 ++ phpcs.xml.dist | 23 +++ phpstan.neon | 12 ++ scripts/code-analysis | 36 ++++ src/Exceptions/LLMException.php | 5 + src/Exceptions/LLMNotSupportedException.php | 7 - src/Exceptions/NotSupportedException.php | 13 ++ ...Manager.php => LLModelProviderManager.php} | 14 +- src/Plugin/LLModels/LLMProviderInterface.php | 18 ++ src/Plugin/LLModels/LLModelsInterface.php | 7 - 14 files changed, 378 insertions(+), 21 deletions(-) create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/workflows/pr.yml create mode 100644 CHANGELOG.md create mode 100644 composer.json create mode 100644 package.json create mode 100644 phpcs.xml.dist create mode 100644 phpstan.neon create mode 100755 scripts/code-analysis delete mode 100644 src/Exceptions/LLMNotSupportedException.php create mode 100644 src/Exceptions/NotSupportedException.php rename src/Plugin/{LLModelManager.php => LLModelProviderManager.php} (64%) create mode 100644 src/Plugin/LLModels/LLMProviderInterface.php delete mode 100644 src/Plugin/LLModels/LLModelsInterface.php diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..a4b263e --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,23 @@ +#### Link to ticket + +Please add a link to the ticket being addressed by this change. + +#### Description + +Please include a short description of the suggested change and the reasoning behind the approach you have chosen. + +#### Screenshot of the result + +If your change affects the user interface you should include a screenshot of the result with the pull request. + +#### Checklist + +- [ ] My code passes our static analysis suite. +- [ ] My code passes our continuous integration process. + +If your code does not pass all the requirements on the checklist you have to add a comment explaining why this change +should be exempt from the list. + +#### Additional comments or questions + +If you have any further comments or questions for the reviewer please add them here. diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 0000000..e82f27e --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,154 @@ +on: pull_request +name: Review +jobs: + changelog: + runs-on: ubuntu-latest + name: Changelog should be updated + strategy: + fail-fast: false + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 2 + + - name: Git fetch + run: git fetch + + - name: Check that changelog has been updated. + run: git diff --exit-code origin/${{ github.base_ref }} -- CHANGELOG.md && exit 1 || exit 0 + + test-composer-files: + name: Validate composer + runs-on: ubuntu-latest + strategy: + matrix: + php-versions: [ '8.3' ] + dependency-version: [ prefer-lowest, prefer-stable ] + steps: + - uses: actions/checkout@master + + - name: Setup PHP, with composer and extensions + uses: shivammathur/setup-php@v2 + with: + php-version: ${{ matrix.php-versions }} + extensions: json + coverage: none + tools: composer:v2 + + # https://github.com/shivammathur/setup-php#cache-composer-dependencies + - name: Get composer cache directory + id: composer-cache + run: echo "::set-output name=dir::$(composer config cache-files-dir)" + + - name: Cache dependencies + uses: actions/cache@v2 + with: + path: ${{ steps.composer-cache.outputs.dir }} + key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} + restore-keys: ${{ runner.os }}-composer- + + - name: Validate composer files + run: | + composer validate --strict composer.json + # Check that dependencies resolve. + composer update --${{ matrix.dependency-version }} --prefer-dist --no-interaction + + php-coding-standards: + name: PHP coding standards + runs-on: ubuntu-latest + strategy: + matrix: + php-versions: [ '8.3' ] + steps: + - uses: actions/checkout@master + + - name: Setup PHP, with composer and extensions + uses: shivammathur/setup-php@v2 + with: + php-version: ${{ matrix.php-versions }} + extensions: json + coverage: none + tools: composer:v2 + + # https://github.com/shivammathur/setup-php#cache-composer-dependencies + - name: Get composer cache directory + id: composer-cache + run: echo "::set-output name=dir::$(composer config cache-files-dir)" + + - name: Cache dependencies + uses: actions/cache@v2 + with: + path: ${{ steps.composer-cache.outputs.dir }} + key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} + restore-keys: ${{ runner.os }}-composer- + + - name: Install Dependencies + run: | + composer install --no-interaction --no-progress + + - name: PHPCS + run: | + composer coding-standards-check/phpcs + + php-code-analysis: + name: PHP code analysis + runs-on: ubuntu-latest + strategy: + matrix: + php-versions: [ '8.3' ] + steps: + - uses: actions/checkout@master + + - name: Setup PHP, with composer and extensions + uses: shivammathur/setup-php@v2 + with: + php-version: ${{ matrix.php-versions }} + extensions: json + coverage: none + tools: composer:v2 + # https://github.com/shivammathur/setup-php#cache-composer-dependencies + + - name: Get composer cache directory + id: composer-cache + run: echo "::set-output name=dir::$(composer config cache-files-dir)" + + - name: Cache dependencies + uses: actions/cache@v2 + with: + path: ${{ steps.composer-cache.outputs.dir }} + key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} + restore-keys: ${{ runner.os }}-composer- + + - name: Code analysis + run: | + ./scripts/code-analysis + + markdownlint: + runs-on: ubuntu-latest + name: markdownlint + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Get yarn cache directory path + id: yarn-cache-dir-path + run: echo "::set-output name=dir::$(yarn cache dir)" + + - name: Cache yarn packages + uses: actions/cache@v2 + id: yarn-cache + with: + path: ${{ steps.yarn-cache-dir-path.outputs.dir }} + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + restore-keys: | + ${{ runner.os }}-yarn- + + - name: Yarn install + uses: actions/setup-node@v2 + with: + node-version: '20' + - run: yarn install + + - name: markdownlint + run: yarn coding-standards-check/markdownlint diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..ad9812a --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,13 @@ + +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +- First version of the module + +[Unreleased]: https://github.com/itk-dev/llm_services/compare/develop...HEAD diff --git a/composer.json b/composer.json new file mode 100644 index 0000000..d1690da --- /dev/null +++ b/composer.json @@ -0,0 +1,61 @@ +{ + "name": "itkdev/llm_services", + "type": "drupal-module", + "description": "Drupal large language module service integration module", + "minimum-stability": "dev", + "prefer-stable": true, + "license": "EUPL-1.2", + "repositories": { + "drupal": { + "type": "composer", + "url": "https://packages.drupal.org/8" + }, + "assets": { + "type": "composer", + "url": "https://asset-packagist.org" + } + }, + "require": { + "php": "^8.3" + }, + "require-dev": { + "drupal/coder": "^8.3", + "mglaman/phpstan-drupal": "^1.2", + "phpstan/extension-installer": "^1.3", + "phpstan/phpstan-deprecation-rules": "^1.2", + "vincentlanglet/twig-cs-fixer": "^2.9" + }, + "extra" : { + "composer-exit-on-patch-failure": false, + "enable-patching" : true, + "patches": { + } + }, + "scripts": { + "code-analysis/phpstan": [ + "phpstan analyse" + ], + "code-analysis": [ + "@code-analysis/phpstan" + ], + "coding-standards-check/phpcs": [ + "phpcs --standard=phpcs.xml.dist" + ], + "coding-standards-check": [ + "@coding-standards-check/phpcs" + ], + "coding-standards-apply/phpcs": [ + "phpcbf --standard=phpcs.xml.dist" + ], + "coding-standards-apply": [ + "@coding-standards-apply/phpcs" + ] + }, + "config": { + "sort-packages": true, + "allow-plugins": { + "phpstan/extension-installer": true, + "dealerdirect/phpcodesniffer-composer-installer": true + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..52fcd34 --- /dev/null +++ b/package.json @@ -0,0 +1,13 @@ +{ + "license": "UNLICENSED", + "private": true, + "devDependencies": { + "markdownlint-cli": "^0.32.2" + }, + "scripts": { + "coding-standards-check/markdownlint": "yarn markdownlint --ignore LICENSE.md --ignore vendor --ignore node_modules '*.md' 'modules/os2forms_digital_post/**/*.md'", + "coding-standards-check": "yarn coding-standards-check/markdownlint", + "coding-standards-apply/markdownlint": "yarn markdownlint --ignore LICENSE.md --ignore vendor --ignore node_modules '*.md' 'modules/os2forms_digital_post/**/*.md' --fix", + "coding-standards-apply": "yarn coding-standards-apply/markdownlint" + } +} diff --git a/phpcs.xml.dist b/phpcs.xml.dist new file mode 100644 index 0000000..e6cd9bb --- /dev/null +++ b/phpcs.xml.dist @@ -0,0 +1,23 @@ + + + OS2web Audit PHP Code Sniffer configuration + + . + vendor/ + node_modules/ + + + + + + + + + + + + + + + + diff --git a/phpstan.neon b/phpstan.neon new file mode 100644 index 0000000..d228014 --- /dev/null +++ b/phpstan.neon @@ -0,0 +1,12 @@ +parameters: + level: 6 + paths: + - ./ + excludePaths: + # @see https://github.com/mglaman/drupal-check/issues/261#issuecomment-1030141772/ + - vendor + - '*/node_modules/*' + ignoreErrors: + # This is how drupal works.... + - '#Unsafe usage of new static\(\).#' + diff --git a/scripts/code-analysis b/scripts/code-analysis new file mode 100755 index 0000000..7977367 --- /dev/null +++ b/scripts/code-analysis @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +script_dir=$(pwd) +module_name=$(basename "$script_dir") +drupal_dir=vendor/drupal-module-code-analysis +# Relative to $drupal_dir +module_path=web/modules/contrib/$module_name + +cd "$script_dir" || exit + +drupal_composer() { + composer --working-dir="$drupal_dir" --no-interaction "$@" +} + +# Create new Drupal 9 project +if [ ! -f "$drupal_dir/composer.json" ]; then + composer --no-interaction create-project drupal/recommended-project:^10 "$drupal_dir" +fi +# Copy our code into the modules folder +mkdir -p "$drupal_dir/$module_path" +# https://stackoverflow.com/a/15373763 +rsync --archive --compress . --filter=':- .gitignore' --exclude "$drupal_dir" --exclude .git "$drupal_dir/$module_path" + +drupal_composer config minimum-stability dev + +# Allow ALL plugins +# https://getcomposer.org/doc/06-config.md#allow-plugins +drupal_composer config --no-plugins allow-plugins true + +drupal_composer require wikimedia/composer-merge-plugin +drupal_composer config extra.merge-plugin.include "$module_path/composer.json" +# https://www.drupal.org/project/drupal/issues/3220043#comment-14845434 +drupal_composer require --dev symfony/phpunit-bridge + +# Run PHPStan +(cd "$drupal_dir" && vendor/bin/phpstan --configuration="$module_path/phpstan.neon") diff --git a/src/Exceptions/LLMException.php b/src/Exceptions/LLMException.php index 660db8c..a176d5e 100644 --- a/src/Exceptions/LLMException.php +++ b/src/Exceptions/LLMException.php @@ -2,6 +2,11 @@ namespace Drupal\llm_services\Exceptions; +/** + * Base execution that all other exceptions should extend. + * + * This will enable other modules to use this exception as an catch all. + */ class LLMException extends \Exception { } diff --git a/src/Exceptions/LLMNotSupportedException.php b/src/Exceptions/LLMNotSupportedException.php deleted file mode 100644 index 7cf7bf9..0000000 --- a/src/Exceptions/LLMNotSupportedException.php +++ /dev/null @@ -1,7 +0,0 @@ -alterInfo('llm_models_info'); - $this->setCacheBackend($cache_backend, 'llmodels_plugins'); + $this->alterInfo('llm_provider_info'); + $this->setCacheBackend($cache_backend, 'llm_provider_plugins'); } } diff --git a/src/Plugin/LLModels/LLMProviderInterface.php b/src/Plugin/LLModels/LLMProviderInterface.php new file mode 100644 index 0000000..a1b3d0a --- /dev/null +++ b/src/Plugin/LLModels/LLMProviderInterface.php @@ -0,0 +1,18 @@ + + * List of supported language models. + */ + public function listModels(): array; + +} diff --git a/src/Plugin/LLModels/LLModelsInterface.php b/src/Plugin/LLModels/LLModelsInterface.php deleted file mode 100644 index 6863e05..0000000 --- a/src/Plugin/LLModels/LLModelsInterface.php +++ /dev/null @@ -1,7 +0,0 @@ - Date: Wed, 15 May 2024 15:57:28 +0200 Subject: [PATCH 03/17] 1433: Added first out-lint for ollama plugin --- .gitignore | 1 + composer.json | 6 ++- src/Plugin/LLModels/Ollama.php | 87 ++++++++++++++++++++++++++++++++++ 3 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 .gitignore create mode 100644 src/Plugin/LLModels/Ollama.php diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..22d0d82 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +vendor diff --git a/composer.json b/composer.json index d1690da..6fb669a 100644 --- a/composer.json +++ b/composer.json @@ -16,7 +16,8 @@ } }, "require": { - "php": "^8.3" + "php": "^8.3", + "theodo-group/llphant": "^0.6.10" }, "require-dev": { "drupal/coder": "^8.3", @@ -55,7 +56,8 @@ "sort-packages": true, "allow-plugins": { "phpstan/extension-installer": true, - "dealerdirect/phpcodesniffer-composer-installer": true + "dealerdirect/phpcodesniffer-composer-installer": true, + "php-http/discovery": true } } } diff --git a/src/Plugin/LLModels/Ollama.php b/src/Plugin/LLModels/Ollama.php new file mode 100644 index 0000000..cca4658 --- /dev/null +++ b/src/Plugin/LLModels/Ollama.php @@ -0,0 +1,87 @@ +configuration; + } + + /** + * {@inheritdoc} + */ + public function setConfiguration(array $configuration): static { + $this->configuration = $configuration + $this->defaultConfiguration(); + return $this; + } + + /** + * {@inheritdoc} + */ + public function defaultConfiguration(): array { + return [ + 'url' => 'http://ollama', + 'port' => '11434' + ]; + } + + /** + * {@inheritdoc} + */ + public function buildConfigurationForm(array $form, FormStateInterface $form_state): array { + $form['url'] = [ + '#type' => 'textfield', + '#title' => $this->t('The URL to connect to the Ollama API.'), + '#default_value' => $this->configuration['url'], + ]; + + $form['port'] = [ + '#type' => 'textfield', + '#title' => $this->t('The port that Ollama runs on.'), + '#default_value' => $this->configuration['port'], + ]; + + return $form; + } + + /** + * {@inheritdoc} + */ + public function validateConfigurationForm(array &$form, FormStateInterface $form_state) { + // TODO: Implement validateConfigurationForm() method. + } + + /** + * {@inheritdoc} + */ + public function submitConfigurationForm(array &$form, FormStateInterface $form_state): void { + if (!$form_state->getErrors()) { + $values = $form_state->getValues(); + $configuration = [ + 'url' => $values['url'], + 'port' => $values['port'], + ]; + $this->setConfiguration($configuration); + } + } +} From 2a5b841061ff6005991603c853e4b8cd4806599a Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Thu, 16 May 2024 11:13:22 +0200 Subject: [PATCH 04/17] 1433: Added basic plugin handling --- llm_services.links.menu.yml | 5 + llm_services.links.task.yml | 5 + llm_services.routing.yml | 8 ++ llm_services.services.yml | 5 + package.json | 4 +- phpcs.xml.dist | 2 +- src/Controller/LocalTasksController.php | 58 +++++++++ src/Form/PluginSettingsForm.php | 122 ++++++++++++++++++ src/Form/PluginSettingsFormInterface.php | 20 +++ src/Form/SettingsForm.php | 95 ++++++++++++++ src/Plugin/Derivative/LocalTask.php | 64 +++++++++ src/Plugin/LLModelProviderManager.php | 10 +- src/Plugin/LLModels/LLMProviderInterface.php | 18 --- .../LLMProviderInterface.php | 20 +++ .../Ollama.php | 20 ++- 15 files changed, 428 insertions(+), 28 deletions(-) create mode 100644 llm_services.links.menu.yml create mode 100644 llm_services.links.task.yml create mode 100644 llm_services.routing.yml create mode 100644 llm_services.services.yml create mode 100644 src/Controller/LocalTasksController.php create mode 100644 src/Form/PluginSettingsForm.php create mode 100644 src/Form/PluginSettingsFormInterface.php create mode 100644 src/Form/SettingsForm.php create mode 100644 src/Plugin/Derivative/LocalTask.php delete mode 100644 src/Plugin/LLModels/LLMProviderInterface.php create mode 100644 src/Plugin/LLModelsProviders/LLMProviderInterface.php rename src/Plugin/{LLModels => LLModelsProviders}/Ollama.php (78%) diff --git a/llm_services.links.menu.yml b/llm_services.links.menu.yml new file mode 100644 index 0000000..a2f58cc --- /dev/null +++ b/llm_services.links.menu.yml @@ -0,0 +1,5 @@ +llm_services.admin_settings: + title: 'LLM services settings' + parent: system.admin_config_system + description: 'Settings for the LLM services' + route_name: llm_services.plugin_settings_local_tasks diff --git a/llm_services.links.task.yml b/llm_services.links.task.yml new file mode 100644 index 0000000..2ebde1c --- /dev/null +++ b/llm_services.links.task.yml @@ -0,0 +1,5 @@ +llm_services.plugin_settings_tasks: + title: 'LLM Services settings' + route_name: llm_services.plugin_settings_local_tasks + base_route: llm_services.plugin_settings_local_tasks + deriver: Drupal\llm_services\Plugin\Derivative\LocalTask diff --git a/llm_services.routing.yml b/llm_services.routing.yml new file mode 100644 index 0000000..2f38d71 --- /dev/null +++ b/llm_services.routing.yml @@ -0,0 +1,8 @@ +llm_services.plugin_settings_local_tasks: + path: '/admin/config/llm_services/settings/{type}' + defaults: + _controller: '\Drupal\llm_services\Controller\LocalTasksController::dynamicTasks' + _title: 'LLM Services settings' + type: '' + requirements: + _permission: 'administer site' diff --git a/llm_services.services.yml b/llm_services.services.yml new file mode 100644 index 0000000..7a68d97 --- /dev/null +++ b/llm_services.services.yml @@ -0,0 +1,5 @@ +services: + plugin.manager.llm_services: + class: Drupal\llm_services\Plugin\LLModelProviderManager + parent: default_plugin_manager + diff --git a/package.json b/package.json index 52fcd34..ec3c847 100644 --- a/package.json +++ b/package.json @@ -5,9 +5,9 @@ "markdownlint-cli": "^0.32.2" }, "scripts": { - "coding-standards-check/markdownlint": "yarn markdownlint --ignore LICENSE.md --ignore vendor --ignore node_modules '*.md' 'modules/os2forms_digital_post/**/*.md'", + "coding-standards-check/markdownlint": "yarn markdownlint --ignore LICENSE.md --ignore vendor --ignore node_modules '*.md' 'modules/llm_services/**/*.md'", "coding-standards-check": "yarn coding-standards-check/markdownlint", - "coding-standards-apply/markdownlint": "yarn markdownlint --ignore LICENSE.md --ignore vendor --ignore node_modules '*.md' 'modules/os2forms_digital_post/**/*.md' --fix", + "coding-standards-apply/markdownlint": "yarn markdownlint --ignore LICENSE.md --ignore vendor --ignore node_modules '*.md' 'modules/llm_services/**/*.md' --fix", "coding-standards-apply": "yarn coding-standards-apply/markdownlint" } } diff --git a/phpcs.xml.dist b/phpcs.xml.dist index e6cd9bb..9d4bd68 100644 --- a/phpcs.xml.dist +++ b/phpcs.xml.dist @@ -1,6 +1,6 @@ - OS2web Audit PHP Code Sniffer configuration + LLM services PHP Code Sniffer configuration . vendor/ diff --git a/src/Controller/LocalTasksController.php b/src/Controller/LocalTasksController.php new file mode 100644 index 0000000..43d617e --- /dev/null +++ b/src/Controller/LocalTasksController.php @@ -0,0 +1,58 @@ +formBuilder = $formBuilder; + $this->configFactory = $configFactory; + } + + /** + * {@inheritdoc} + */ + public static function create(ContainerInterface $container): LocalTasksController|static { + return new static( + $container->get('form_builder'), + $container->get('config.factory'), + ); + } + + /** + * Get dynamic tasks. + * + * @param string|null $type + * The type of form to retrieve. Default to NULL. + * + * @return array + * An array containing the form definition. + */ + public function dynamicTasks(string $type = NULL): array { + if (empty($type)) { + return $this->formBuilder->getForm('\Drupal\llm_services\Form\SettingsForm'); + } + + return $this->formBuilder->getForm('\Drupal\llm_services\Form\PluginSettingsForm', $type); + } + +} diff --git a/src/Form/PluginSettingsForm.php b/src/Form/PluginSettingsForm.php new file mode 100644 index 0000000..8300ddd --- /dev/null +++ b/src/Form/PluginSettingsForm.php @@ -0,0 +1,122 @@ +manager = $manager; + } + + /** + * {@inheritdoc} + */ + public static function create(ContainerInterface $container): static { + return new static( + $container->get('config.factory'), + $container->get('plugin.manager.llm_services') + ); + } + + /** + * {@inheritdoc} + */ + public static function getConfigName(): string { + return 'llm_services.plugin_settings'; + } + + /** + * {@inheritdoc} + */ + protected function getEditableConfigNames(): array { + return [$this->getConfigName()]; + } + + /** + * {@inheritdoc} + */ + public function getFormId(): string { + return $this->getConfigName() . '_settings_form'; + } + + /** + * {@inheritdoc} + */ + public function buildForm(array $form, FormStateInterface $form_state): array { + $plugin_id = $form_state->getBuildInfo()['args'][0]; + $instance = $this->getPluginInstance($plugin_id); + $form = $instance->buildConfigurationForm($form, $form_state); + + return parent::buildForm($form, $form_state); + } + + /** + * {@inheritdoc} + */ + public function validateForm(array &$form, FormStateInterface $form_state): void { + $plugin_id = $form_state->getBuildInfo()['args'][0]; + $instance = $this->getPluginInstance($plugin_id); + $instance->validateConfigurationForm($form, $form_state); + } + + /** + * {@inheritdoc} + */ + public function submitForm(array &$form, FormStateInterface $form_state): void { + $plugin_id = $form_state->getBuildInfo()['args'][0]; + $instance = $this->getPluginInstance($plugin_id); + $instance->submitConfigurationForm($form, $form_state); + + $config = $this->config($this->getConfigName()); + $config->set($plugin_id, $instance->getConfiguration()); + $config->save(); + + parent::submitForm($form, $form_state); + } + + /** + * Returns plugin instance for a given plugin id. + * + * @param string $plugin_id + * The plugin_id for the plugin instance. + * + * @return object + * Plugin instance. + * + * @throws \Drupal\Component\Plugin\Exception\PluginException + */ + public function getPluginInstance(string $plugin_id): object { + $configuration = $this->config($this->getConfigName())->get($plugin_id); + + return $this->manager->createInstance($plugin_id, $configuration ?? []); + } + +} diff --git a/src/Form/PluginSettingsFormInterface.php b/src/Form/PluginSettingsFormInterface.php new file mode 100644 index 0000000..8383199 --- /dev/null +++ b/src/Form/PluginSettingsFormInterface.php @@ -0,0 +1,20 @@ +get('config.factory'), + $container->get('plugin.manager.llm_services') + ); + } + + /** + * The name of the configuration setting. + * + * @var string + */ + public static string $configName = 'llm_services.settings'; + + /** + * {@inheritdoc} + */ + protected function getEditableConfigNames(): array { + return [self::$configName]; + } + + /** + * {@inheritdoc} + */ + public function getFormId(): string { + return 'llm_services_admin_form'; + } + + /** + * {@inheritdoc} + */ + public function buildForm(array $form, FormStateInterface $form_state): array { + $config = $this->config(self::$configName); + + $plugins = $this->providerManager->getDefinitions(); + ksort($plugins); + $options = array_map(function ($plugin) { + /** @var \Drupal\Core\StringTranslation\TranslatableMarkup $title */ + $title = $plugin['title']; + return $title->render(); + }, $plugins); + + $form['provider'] = [ + '#type' => 'select', + '#title' => $this->t('Log provider'), + '#description' => $this->t('Select the logger provider you which to use'), + '#options' => $options, + '#default_value' => $config->get('provider'), + ]; + + return parent::buildForm($form, $form_state); + } + + /** + * {@inheritdoc} + */ + public function submitForm(array &$form, FormStateInterface $form_state): void { + parent::submitForm($form, $form_state); + + $this->config(self::$configName) + ->set('provider', $form_state->getValue('provider')) + ->save(); + } + +} diff --git a/src/Plugin/Derivative/LocalTask.php b/src/Plugin/Derivative/LocalTask.php new file mode 100644 index 0000000..82d9451 --- /dev/null +++ b/src/Plugin/Derivative/LocalTask.php @@ -0,0 +1,64 @@ +get('plugin.manager.llm_services') + ); + } + + /** + * {@inheritdoc} + * + * @throws \ReflectionException + */ + public function getDerivativeDefinitions($base_plugin_definition): array { + $plugins = $this->providerManager->getDefinitions(); + ksort($plugins); + + // Sadly, it seems that it is not possible to just invalidate the + // deriver/menu cache stuff. To get the local tasks menu links. So instead + // of clearing all caches on settings save to only show selected plugins, we + // show em all. + $options = array_map(function ($plugin) { + // Only the plugins that provide configuration options. + $reflector = new \ReflectionClass($plugin['class']); + if ($reflector->implementsInterface('Drupal\Component\Plugin\ConfigurableInterface')) { + /** @var \Drupal\Core\StringTranslation\TranslatableMarkup $title */ + $title = $plugin['title']; + return $title->render(); + } + }, $plugins); + + foreach (['settings' => 'Settings'] + $options as $plugin => $title) { + $this->derivatives[$plugin] = $base_plugin_definition; + $this->derivatives[$plugin]['title'] = $title; + $this->derivatives[$plugin]['route_parameters'] = ['type' => $plugin]; + if ($plugin === 'settings') { + $this->derivatives[$plugin]['route_parameters']['type'] = ''; + } + } + + return $this->derivatives; + } + +} diff --git a/src/Plugin/LLModelProviderManager.php b/src/Plugin/LLModelProviderManager.php index f7a7897..0a08119 100644 --- a/src/Plugin/LLModelProviderManager.php +++ b/src/Plugin/LLModelProviderManager.php @@ -10,7 +10,7 @@ * Provides the LLM plugin manager. * * @see \Drupal\llm_services\Annotation\LLModelsProvider - * @see \Drupal\llm_services\Plugin\LLModels\LLMProviderInterface + * @see \Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface * @see plugin_api */ class LLModelProviderManager extends DefaultPluginManager { @@ -20,15 +20,15 @@ class LLModelProviderManager extends DefaultPluginManager { */ public function __construct(\Traversable $namespaces, CacheBackendInterface $cache_backend, ModuleHandlerInterface $module_handler) { parent::__construct( - 'Plugin/LLModels', + 'Plugin/LLModelsProviders', $namespaces, $module_handler, + 'Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface', 'Drupal\llm_services\Annotation\LLModelsProvider', - 'Drupal\llm_services\Plugin\LLModels\LLMProviderInterface', ); - $this->alterInfo('llm_provider_info'); - $this->setCacheBackend($cache_backend, 'llm_provider_plugins'); + $this->alterInfo('llm_services_providers_info'); + $this->setCacheBackend($cache_backend, 'llm_services_providers_plugins'); } } diff --git a/src/Plugin/LLModels/LLMProviderInterface.php b/src/Plugin/LLModels/LLMProviderInterface.php deleted file mode 100644 index a1b3d0a..0000000 --- a/src/Plugin/LLModels/LLMProviderInterface.php +++ /dev/null @@ -1,18 +0,0 @@ - - * List of supported language models. - */ - public function listModels(): array; - -} diff --git a/src/Plugin/LLModelsProviders/LLMProviderInterface.php b/src/Plugin/LLModelsProviders/LLMProviderInterface.php new file mode 100644 index 0000000..d568858 --- /dev/null +++ b/src/Plugin/LLModelsProviders/LLMProviderInterface.php @@ -0,0 +1,20 @@ + + * List of supported language models. + */ + public function listModels(): array; + +} diff --git a/src/Plugin/LLModels/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php similarity index 78% rename from src/Plugin/LLModels/Ollama.php rename to src/Plugin/LLModelsProviders/Ollama.php index cca4658..a07b6a7 100644 --- a/src/Plugin/LLModels/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -1,6 +1,6 @@ setConfiguration($configuration); + } + /** * {@inheritdoc} */ public function listModels(): array { // TODO: Implement listModels() method. - return []; + return ['FAKE LLM' => 'fakeGPT']; } /** From 71fb72c48fcbb6672e89c867ecda00b9fdc0ff48 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Thu, 16 May 2024 16:20:10 +0200 Subject: [PATCH 05/17] 1433: Started on ollama plugin connection --- .gitignore | 1 + composer.json | 1 + llm_services.services.yml | 3 +- src/Client/Ollama.php | 94 +++++++++++++++++++ src/Drush/Commands/LlmServicesCommands.php | 51 ++++++++++ src/Form/SettingsForm.php | 2 +- src/Plugin/Derivative/LocalTask.php | 2 +- src/Plugin/LLModelProviderManager.php | 33 ++++++- .../LLMProviderInterface.php | 38 +++++++- src/Plugin/LLModelsProviders/Ollama.php | 37 +++++++- 10 files changed, 254 insertions(+), 8 deletions(-) create mode 100644 src/Client/Ollama.php create mode 100644 src/Drush/Commands/LlmServicesCommands.php diff --git a/.gitignore b/.gitignore index 22d0d82..7579f74 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ vendor +composer.lock diff --git a/composer.json b/composer.json index 6fb669a..3d12d81 100644 --- a/composer.json +++ b/composer.json @@ -17,6 +17,7 @@ }, "require": { "php": "^8.3", + "openai-php/client": "^0.8.5", "theodo-group/llphant": "^0.6.10" }, "require-dev": { diff --git a/llm_services.services.yml b/llm_services.services.yml index 7a68d97..ba82f17 100644 --- a/llm_services.services.yml +++ b/llm_services.services.yml @@ -2,4 +2,5 @@ services: plugin.manager.llm_services: class: Drupal\llm_services\Plugin\LLModelProviderManager parent: default_plugin_manager - + arguments: + - '@config.factory' diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php new file mode 100644 index 0000000..19a59c9 --- /dev/null +++ b/src/Client/Ollama.php @@ -0,0 +1,94 @@ +> + * Basic information about the models. + * + * @throws \GuzzleHttp\Exception\GuzzleException + * @throws \JsonException + */ + function listLocalModels(): array { + $data = $this->call(method: 'get', uri: '/api/tags'); + + // @todo: change to value objects. + $models = []; + foreach ($data['models'] as $item) { + $models[$item['model']] = [ + 'name' => $item['name'], + 'size' => $item['size'], + 'modified' => $item['modified_at'], + 'digest' => $item['digest'], + ]; + } + + return $models; + } + + /** + * Make request to Ollama. + * + * @param string $method + * The method to use (GET/POST). + * @param string $uri + * The API endpoint to call + * @param array $options + * Extra options and/or payload to post. + * + * @return mixed + * The result of the call. + * + * @todo: what about stream calls? + * + * @throws \GuzzleHttp\Exception\GuzzleException + * @throws \JsonException + */ + private function call(string $method, string $uri, array $options = []): mixed { + $client = \Drupal::httpClient(); + + $request = $client->request($method, $this->getURL($uri), $options); + + if ($request->getStatusCode() !== 200) { + throw new \Exception('Request failed'); + } + $response = $request->getBody()->getContents(); + + return json_decode($response, TRUE, 512, JSON_THROW_ON_ERROR); + } + + /** + * Returns a URL string with the given URI appended to the base URL. + * + * @param string $uri + * The URI to append to the base URL. Default is an empty string. + * + * @return string T + * The complete URL string. + */ + private function getURL(string $uri = ''): string { + return $this->url . ':' . $this->port . ($uri ? '/' . ltrim($uri, '/') : ''); + } + +} diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php new file mode 100644 index 0000000..b4f4ab2 --- /dev/null +++ b/src/Drush/Commands/LlmServicesCommands.php @@ -0,0 +1,51 @@ +get('plugin.manager.llm_services'), + ); + } + + /** + * Command description here. + * + * @throws \Drupal\Component\Plugin\Exception\PluginException + */ + #[CLI\Command(name: 'llm:list:models', aliases: ['llm-list'])] + #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] + #[CLI\Usage(name: 'llm:list:models foo', description: 'List moduls available ')] + public function commandName(string $provider): void { + $provider = $this->providerManager->createInstance('ollama'); + $models = $provider->listModels(); + + // @todo output more information. + foreach ($models as $model) { + $this->writeln($model['name'] . ' (' . $model['modified'] . ')'); + } + } + +} diff --git a/src/Form/SettingsForm.php b/src/Form/SettingsForm.php index 8df8c90..fbd02b9 100644 --- a/src/Form/SettingsForm.php +++ b/src/Form/SettingsForm.php @@ -1,6 +1,6 @@ configFactory = $configFactory; + parent::__construct( 'Plugin/LLModelsProviders', $namespaces, @@ -31,4 +48,18 @@ public function __construct(\Traversable $namespaces, CacheBackendInterface $cac $this->setCacheBackend($cache_backend, 'llm_services_providers_plugins'); } + /** + * {@inheritdoc} + */ + public function createInstance($plugin_id, array $configuration = []): LLMProviderInterface { + if (empty($configuration)) { + $configuration = $this->configFactory->get(PluginSettingsForm::getConfigName())->get($plugin_id); + } + + /** @var \Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface $provider */ + $provider = parent::createInstance($plugin_id, $configuration); + + return $provider; + } + } diff --git a/src/Plugin/LLModelsProviders/LLMProviderInterface.php b/src/Plugin/LLModelsProviders/LLMProviderInterface.php index d568858..2b4c1e7 100644 --- a/src/Plugin/LLModelsProviders/LLMProviderInterface.php +++ b/src/Plugin/LLModelsProviders/LLMProviderInterface.php @@ -5,16 +5,50 @@ use Drupal\Component\Plugin\PluginInspectionInterface; /** - * LLModelsProviders plugin interface that all plugins are required to implement. + * LLModelsProviders plugin interface. */ interface LLMProviderInterface extends PluginInspectionInterface { /** * List model supported by the provider. * - * @return array + * @return array,> * List of supported language models. */ public function listModels(): array; + /** + * Installs a model. + * + * @param string $modelName + * The name of the model to install. + * + * @return mixed + * The result of installing the model. + */ + public function installModel(string $modelName): mixed; + + /** + * Performs a completion process. + * + * @param array $body + * The body of the completion request. It should contain the necessary data + * for completion. + * + * @return mixed + * The result of the completion process. + */ + public function completion(array $body): mixed; + + /** + * Initiates a chat. + * + * @param array $body + * The body of the chat request. + * + * @return mixed + * The result of the chat initiation. + */ + public function chat(array $body): mixed; + } diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index a07b6a7..c3d8872 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -6,6 +6,8 @@ use Drupal\Core\Form\FormStateInterface; use Drupal\Core\Plugin\PluginBase; use Drupal\Core\Plugin\PluginFormInterface; +use Drupal\llm_services\Client\Ollama as ClientOllama; +use Drupal\llm_services\Exceptions\NotSupportedException; /** * Ollama integration provider. @@ -32,8 +34,38 @@ public function __construct(array $configuration, $plugin_id, $plugin_definition * {@inheritdoc} */ public function listModels(): array { - // TODO: Implement listModels() method. - return ['FAKE LLM' => 'fakeGPT']; + $config = $this->getConfiguration(); + + $client = new ClientOllama($config['url'], $config['port']); + return $client->listLocalModels(); + } + + /** + * {@inheritdoc} + */ + public function installModel(): mixed { + // TODO: Implement installModel() method. + throw new NotSupportedException(); + } + + /** + * {@inheritdoc} + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion + */ + public function completion(array $body): mixed { + // TODO: Implement completions() method. + throw new NotSupportedException(); + } + + /** + * {@inheritdoc} + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion + */ + public function chat(array $body): mixed { + // TODO: Implement chatCompletions() method. + throw new NotSupportedException(); } /** @@ -100,4 +132,5 @@ public function submitConfigurationForm(array &$form, FormStateInterface $form_s $this->setConfiguration($configuration); } } + } From de55b2698aeadd3d6af354eb9c64e34675d11f53 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Fri, 17 May 2024 15:03:53 +0200 Subject: [PATCH 06/17] 1433: Added helper command to download models --- src/Client/Ollama.php | 20 +++++- src/Drush/Commands/LlmServicesCommands.php | 28 +++++++-- src/Exceptions/CommunicationException.php | 10 +++ .../LLMProviderInterface.php | 8 +++ src/Plugin/LLModelsProviders/Ollama.php | 63 ++++++++++++++++--- 5 files changed, 115 insertions(+), 14 deletions(-) create mode 100644 src/Exceptions/CommunicationException.php diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 19a59c9..379c421 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -2,6 +2,8 @@ namespace Drupal\llm_services\Client; +use GuzzleHttp\RequestOptions; + /** * Client to communicate with Ollama. */ @@ -30,7 +32,7 @@ public function __construct( * @throws \GuzzleHttp\Exception\GuzzleException * @throws \JsonException */ - function listLocalModels(): array { + public function listLocalModels(): array { $data = $this->call(method: 'get', uri: '/api/tags'); // @todo: change to value objects. @@ -47,6 +49,22 @@ function listLocalModels(): array { return $models; } + public function install(string $modelName): string { + $this->call(method: 'post', uri: '/api/pull', options: [ + 'json' => [ + 'name' => $modelName, + 'stream' => false, + ], + 'headers' => [ + 'Content-Type' => 'application/json', + ], + RequestOptions::CONNECT_TIMEOUT => 10, + RequestOptions::TIMEOUT => 300, + ]); + + return 'dfs'; + } + /** * Make request to Ollama. * diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php index b4f4ab2..40a462f 100644 --- a/src/Drush/Commands/LlmServicesCommands.php +++ b/src/Drush/Commands/LlmServicesCommands.php @@ -8,7 +8,7 @@ use Symfony\Component\DependencyInjection\ContainerInterface; /** - * A Drush commandfile. + * Drush commands to talk with LLM provider (mostly for testing). */ final class LlmServicesCommands extends DrushCommands { @@ -31,15 +31,16 @@ public static function create(ContainerInterface $container): static { } /** - * Command description here. + * List models from provider. * * @throws \Drupal\Component\Plugin\Exception\PluginException + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ #[CLI\Command(name: 'llm:list:models', aliases: ['llm-list'])] #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] - #[CLI\Usage(name: 'llm:list:models foo', description: 'List moduls available ')] - public function commandName(string $provider): void { - $provider = $this->providerManager->createInstance('ollama'); + #[CLI\Usage(name: 'llm:list:models ollama', description: 'List moduls available ')] + public function listModels(string $provider): void { + $provider = $this->providerManager->createInstance($provider); $models = $provider->listModels(); // @todo output more information. @@ -48,4 +49,21 @@ public function commandName(string $provider): void { } } + /** + * Install model in provider. + * + * @throws \Drupal\Component\Plugin\Exception\PluginException + * @throws \Drupal\llm_services\Exceptions\CommunicationException + */ + #[CLI\Command(name: 'llm:install:model', aliases: ['llm-install'])] + #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] + #[CLI\Argument(name: 'name', description: 'Name of the model to try and download.')] + #[CLI\Usage(name: 'llm:install:model ollama llama2', description: 'Install LLama2 modul in Ollama')] + public function installModel(string $provider, string $name): void { + $provider = $this->providerManager->createInstance($provider); + $models = $provider->installModel($name); + + + } + } diff --git a/src/Exceptions/CommunicationException.php b/src/Exceptions/CommunicationException.php new file mode 100644 index 0000000..8226609 --- /dev/null +++ b/src/Exceptions/CommunicationException.php @@ -0,0 +1,10 @@ +,> * List of supported language models. + * + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ public function listModels(): array; @@ -25,6 +27,8 @@ public function listModels(): array; * * @return mixed * The result of installing the model. + * + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ public function installModel(string $modelName): mixed; @@ -37,6 +41,8 @@ public function installModel(string $modelName): mixed; * * @return mixed * The result of the completion process. + * + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ public function completion(array $body): mixed; @@ -48,6 +54,8 @@ public function completion(array $body): mixed; * * @return mixed * The result of the chat initiation. + * + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ public function chat(array $body): mixed; diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index c3d8872..7af3253 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -2,12 +2,15 @@ namespace Drupal\llm_services\Plugin\LLModelsProviders; +use _PHPStan_49641e245\Nette\Neon\Exception; use Drupal\Component\Plugin\ConfigurableInterface; use Drupal\Core\Form\FormStateInterface; use Drupal\Core\Plugin\PluginBase; use Drupal\Core\Plugin\PluginFormInterface; use Drupal\llm_services\Client\Ollama as ClientOllama; +use Drupal\llm_services\Exceptions\CommunicationException; use Drupal\llm_services\Exceptions\NotSupportedException; +use GuzzleHttp\Exception\GuzzleException; /** * Ollama integration provider. @@ -34,18 +37,30 @@ public function __construct(array $configuration, $plugin_id, $plugin_definition * {@inheritdoc} */ public function listModels(): array { - $config = $this->getConfiguration(); - - $client = new ClientOllama($config['url'], $config['port']); - return $client->listLocalModels(); + try { + return $this->getClient()->listLocalModels(); + } + catch (GuzzleException|\JsonException $exception) { + throw new CommunicationException( + message: 'Error in communicating with LLM services', + previous: $exception, + ); + } } /** * {@inheritdoc} */ - public function installModel(): mixed { - // TODO: Implement installModel() method. - throw new NotSupportedException(); + public function installModel(string $modelName): mixed { + try { + return $this->getClient()->install($modelName); + } + catch (GuzzleException|\JsonException $exception) { + throw new CommunicationException( + message: 'Error in communicating with LLM services', + previous: $exception, + ); + } } /** @@ -116,7 +131,21 @@ public function buildConfigurationForm(array $form, FormStateInterface $form_sta * {@inheritdoc} */ public function validateConfigurationForm(array &$form, FormStateInterface $form_state) { - // TODO: Implement validateConfigurationForm() method. + $values = $form_state->getValues(); + + if (filter_var($values['url'], FILTER_VALIDATE_URL) === FALSE) { + $form_state->setErrorByName('url', $this->t('Invalid URL.')); + } + + $filter_options = [ + 'options' => [ + 'min_range' => 1, + 'max_range' => 65535, + ] + ]; + if (filter_var($values['port'], FILTER_VALIDATE_INT, $filter_options) === FALSE) { + $form_state->setErrorByName('port', $this->t('Invalid port range. Should be between 1 and 65535.')); + } } /** @@ -131,6 +160,24 @@ public function submitConfigurationForm(array &$form, FormStateInterface $form_s ]; $this->setConfiguration($configuration); } + + // Try to connect to Ollama to test the connection. + try { + $this->listModels(); + \Drupal::messenger()->addMessage('Successfully connected to Ollama'); + } catch (\Exception $exception) { + \Drupal::messenger()->addMessage('Error communication with Ollama: ' . $exception->getMessage(), 'error'); + } + } + + /** + * Get a client. + * + * @return \Drupal\llm_services\Client\Ollama + * Client to communicate with Ollama + */ + public function getClient(): ClientOllama { + return new ClientOllama($this->configuration['url'], $this->configuration['port']); } } From 91604ac86842ddca068a41eff66037e154382ee3 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Wed, 22 May 2024 10:09:57 +0200 Subject: [PATCH 07/17] 1452: Added message classes --- src/Client/Ollama.php | 22 ++++++++++++--- src/Model/ChatMessage.php | 36 +++++++++++++++++++++++++ src/Model/Message.php | 35 ++++++++++++++++++++++++ src/Model/MessageRoles.php | 19 +++++++++++++ src/Plugin/LLModelsProviders/Ollama.php | 14 +++++----- 5 files changed, 114 insertions(+), 12 deletions(-) create mode 100644 src/Model/ChatMessage.php create mode 100644 src/Model/Message.php create mode 100644 src/Model/MessageRoles.php diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 379c421..9fc390c 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -7,7 +7,7 @@ /** * Client to communicate with Ollama. */ -class Ollama { +readonly class Ollama { /** * Default constructor. @@ -18,8 +18,8 @@ class Ollama { * The port that Ollama is listing on. */ public function __construct( - private readonly string $url, - private readonly int $port, + private string $url, + private int $port, ) { } @@ -49,6 +49,19 @@ public function listLocalModels(): array { return $models; } + /** + * Install/update model in Ollama. + * + * @param string $modelName + * Name of the model + * + * @return string + * + * @see https://ollama.com/library + * + * @throws \GuzzleHttp\Exception\GuzzleException + * @throws \JsonException + */ public function install(string $modelName): string { $this->call(method: 'post', uri: '/api/pull', options: [ 'json' => [ @@ -62,7 +75,8 @@ public function install(string $modelName): string { RequestOptions::TIMEOUT => 300, ]); - return 'dfs'; + // @todo: change to stream and return status. + return ''; } /** diff --git a/src/Model/ChatMessage.php b/src/Model/ChatMessage.php new file mode 100644 index 0000000..4eeb7f4 --- /dev/null +++ b/src/Model/ChatMessage.php @@ -0,0 +1,36 @@ + + */ + public array $images; + +} diff --git a/src/Model/Message.php b/src/Model/Message.php new file mode 100644 index 0000000..7aa7f26 --- /dev/null +++ b/src/Model/Message.php @@ -0,0 +1,35 @@ + + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#parameters-1 + */ + public array $messages; + + /** + * Additional model parameters. + * + * @var array + * + * @see https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values + */ + public array $options; + +} diff --git a/src/Model/MessageRoles.php b/src/Model/MessageRoles.php new file mode 100644 index 0000000..230ef82 --- /dev/null +++ b/src/Model/MessageRoles.php @@ -0,0 +1,19 @@ +getClient()->listLocalModels(); } - catch (GuzzleException|\JsonException $exception) { + catch (GuzzleException | \JsonException $exception) { throw new CommunicationException( message: 'Error in communicating with LLM services', previous: $exception, @@ -55,7 +53,7 @@ public function installModel(string $modelName): mixed { try { return $this->getClient()->install($modelName); } - catch (GuzzleException|\JsonException $exception) { + catch (GuzzleException | \JsonException $exception) { throw new CommunicationException( message: 'Error in communicating with LLM services', previous: $exception, @@ -139,9 +137,9 @@ public function validateConfigurationForm(array &$form, FormStateInterface $form $filter_options = [ 'options' => [ - 'min_range' => 1, - 'max_range' => 65535, - ] + 'min_range' => 1, + 'max_range' => 65535, + ], ]; if (filter_var($values['port'], FILTER_VALIDATE_INT, $filter_options) === FALSE) { $form_state->setErrorByName('port', $this->t('Invalid port range. Should be between 1 and 65535.')); From 74fc3052d0418a7d23a24eeff4597cd31c8f7e83 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Wed, 22 May 2024 20:14:33 +0200 Subject: [PATCH 08/17] 1452: Added stream completion command to talk with model --- src/Client/Ollama.php | 113 ++++++++++++++++-- src/Drush/Commands/LlmServicesCommands.php | 30 +++++ src/Model/ChatMessage.php | 36 ------ src/Model/Message.php | 27 +++-- src/Model/Payload.php | 35 ++++++ .../LLMProviderInterface.php | 10 +- src/Plugin/LLModelsProviders/Ollama.php | 9 +- 7 files changed, 194 insertions(+), 66 deletions(-) delete mode 100644 src/Model/ChatMessage.php create mode 100644 src/Model/Payload.php diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 9fc390c..6fc6fc5 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -2,12 +2,15 @@ namespace Drupal\llm_services\Client; +use Drupal\llm_services\Model\Payload; use GuzzleHttp\RequestOptions; /** * Client to communicate with Ollama. */ -readonly class Ollama { +class Ollama { + + private string $parserCache = ''; /** * Default constructor. @@ -18,8 +21,8 @@ * The port that Ollama is listing on. */ public function __construct( - private string $url, - private int $port, + private readonly string $url, + private readonly int $port, ) { } @@ -53,7 +56,7 @@ public function listLocalModels(): array { * Install/update model in Ollama. * * @param string $modelName - * Name of the model + * Name of the model. * * @return string * @@ -79,6 +82,100 @@ public function install(string $modelName): string { return ''; } + /** + * Ask a question to the model. + * + * @TODO make call function that can do the stream, if possible. + * + * @param \Drupal\llm_services\Model\Payload $payload + * + * @return \Generator + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion + * + * @throws \GuzzleHttp\Exception\GuzzleException + */ + public function completion(Payload $payload): \Generator { + + $json = [ + 'model' => $payload->model, + 'prompt' => $payload->messages[0]->content, + 'stream' => true, + ]; + + $client = \Drupal::httpClient(); + $response = $client->request( + 'POST', + $this->getURL('/api/generate'), + [ + 'json' => $json, + RequestOptions::CONNECT_TIMEOUT => 10, + RequestOptions::TIMEOUT => 300, + RequestOptions::STREAM => true, + ] + ); + + $body = $response->getBody(); + while (!$body->eof()) { + $data = $body->read(1024); + yield from $this->parse($data);; + } + + } + + /** + * Parse LLM stream. + * + * As the LLM streams the response, and we read them in chunks and given chunk + * of data may not be complete json object. So this function parses the data + * and joins chunks to make it valid parsable json. But at the same time + * yield back json results as soon as possible to make the stream seam as live + * response. + * + * @param string $data + * The data chunk to parse. + * + * @return \Generator + * Yield back json objects. + * + * @todo: should json be converted to valid LLMResObject? + * + * @throws \JsonException + */ + private function parse(string $data): \Generator { + // Split on new-lines. + $strings = explode("\n", $data); + + foreach ($strings as $str) { + if (json_validate($str)) { + // Valid json string lets decode an yield it. + yield json_decode($str, true, flags: JSON_THROW_ON_ERROR); + } + else { + // Ignore empty strings. + if (!empty($str)) { + // If cached partial json object: append else store. + if (empty($this->parserCache)) { + $this->parserCache = $str; + } + else { + $str = $this->parserCache . $str; + if (!json_validate($str)) { + // Still not json, just append until it becomes json. + $this->parserCache .= $str; + + // Nothing to yield, no complet json string yet. + return; + } + // Valid json string, yield, reset cache. + yield json_decode($str, true, flags: JSON_THROW_ON_ERROR); + $this->parserCache = ''; + } + } + } + } + } + /** * Make request to Ollama. * @@ -100,14 +197,14 @@ public function install(string $modelName): string { private function call(string $method, string $uri, array $options = []): mixed { $client = \Drupal::httpClient(); - $request = $client->request($method, $this->getURL($uri), $options); + $response = $client->request($method, $this->getURL($uri), $options); - if ($request->getStatusCode() !== 200) { + if ($response->getStatusCode() !== 200) { throw new \Exception('Request failed'); } - $response = $request->getBody()->getContents(); + $data = $response->getBody()->getContents(); - return json_decode($response, TRUE, 512, JSON_THROW_ON_ERROR); + return json_decode($data, TRUE, 512, JSON_THROW_ON_ERROR); } /** diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php index 40a462f..31613b1 100644 --- a/src/Drush/Commands/LlmServicesCommands.php +++ b/src/Drush/Commands/LlmServicesCommands.php @@ -2,6 +2,8 @@ namespace Drupal\llm_services\Drush\Commands; +use Drupal\llm_services\Model\Message; +use Drupal\llm_services\Model\Payload; use Drupal\llm_services\Plugin\LLModelProviderManager; use Drush\Attributes as CLI; use Drush\Commands\DrushCommands; @@ -63,7 +65,35 @@ public function installModel(string $provider, string $name): void { $provider = $this->providerManager->createInstance($provider); $models = $provider->installModel($name); + // @todo: stream responses. } + /** + * Install model in provider. + * + * @throws \Drupal\Component\Plugin\Exception\PluginException + * @throws \Drupal\llm_services\Exceptions\CommunicationException + */ + #[CLI\Command(name: 'llm:model:completion', aliases: ['llm-completion'])] + #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] + #[CLI\Argument(name: 'name', description: 'Name of the model to use.')] + #[CLI\Argument(name: 'prompt', description: 'The prompt to generate a response for.')] + #[CLI\Usage(name: 'llm:model:completion ollama llama2 "Why is the sky blue?"', description: 'Install LLama2 modul in Ollama')] + public function completion(string $provider, string $name, string $prompt): void { + $provider = $this->providerManager->createInstance($provider); + + $payLoad = new Payload(); + $payLoad->model = $name; + + $msg = new Message(); + $msg->content = $prompt; + $payLoad->messages[] = $msg; + + foreach ($provider->completion($payLoad) as $res) { + $this->output()->write($res['response']); + }; + $this->output()->write("\n"); + + } } diff --git a/src/Model/ChatMessage.php b/src/Model/ChatMessage.php deleted file mode 100644 index 4eeb7f4..0000000 --- a/src/Model/ChatMessage.php +++ /dev/null @@ -1,36 +0,0 @@ - - */ - public array $images; - -} diff --git a/src/Model/Message.php b/src/Model/Message.php index 7aa7f26..a38918c 100644 --- a/src/Model/Message.php +++ b/src/Model/Message.php @@ -3,33 +3,34 @@ namespace Drupal\llm_services\Model; /** - * Represents a message to be sent using a specific model. + * Represents a chat message. + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#parameters-1 */ class Message { /** - * Name of the model to use. + * The role of this message. * - * @var string + * @var \Drupal\llm_services\Model\MessageRoles */ - public string $model; + public MessageRoles $role; /** - * Message(s) to send. - * - * @var array<\Drupal\llm_services\Model\ChatMessage> + * The message content. * - * @see https://github.com/ollama/ollama/blob/main/docs/api.md#parameters-1 + * @var string */ - public array $messages; + public string $content; /** - * Additional model parameters. + * Images base64 encoded. * - * @var array + * Used for multimodal models such as llava. Which can describe the content of + * the image. * - * @see https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values + * @var array */ - public array $options; + public array $images; } diff --git a/src/Model/Payload.php b/src/Model/Payload.php new file mode 100644 index 0000000..20b7f30 --- /dev/null +++ b/src/Model/Payload.php @@ -0,0 +1,35 @@ + + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#parameters-1 + */ + public array $messages; + + /** + * Additional model parameters. + * + * @var array + * + * @see https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values + */ + public array $options; + +} diff --git a/src/Plugin/LLModelsProviders/LLMProviderInterface.php b/src/Plugin/LLModelsProviders/LLMProviderInterface.php index c534b1d..3ca9905 100644 --- a/src/Plugin/LLModelsProviders/LLMProviderInterface.php +++ b/src/Plugin/LLModelsProviders/LLMProviderInterface.php @@ -3,6 +3,8 @@ namespace Drupal\llm_services\Plugin\LLModelsProviders; use Drupal\Component\Plugin\PluginInspectionInterface; +use Drupal\llm_services\Model\Message; +use Drupal\llm_services\Model\Payload; /** * LLModelsProviders plugin interface. @@ -35,7 +37,7 @@ public function installModel(string $modelName): mixed; /** * Performs a completion process. * - * @param array $body + * @param Payload $payload * The body of the completion request. It should contain the necessary data * for completion. * @@ -44,12 +46,12 @@ public function installModel(string $modelName): mixed; * * @throws \Drupal\llm_services\Exceptions\CommunicationException */ - public function completion(array $body): mixed; + public function completion(Payload $payload): mixed; /** * Initiates a chat. * - * @param array $body + * @param Payload $payload * The body of the chat request. * * @return mixed @@ -57,6 +59,6 @@ public function completion(array $body): mixed; * * @throws \Drupal\llm_services\Exceptions\CommunicationException */ - public function chat(array $body): mixed; + public function chat(Payload $payload): mixed; } diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index d7d1d62..4ff42d0 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -2,7 +2,6 @@ namespace Drupal\llm_services\Plugin\LLModelsProviders; -use _PHPStan_49641e245\Nette\Neon\Exception; use Drupal\Component\Plugin\ConfigurableInterface; use Drupal\Core\Form\FormStateInterface; use Drupal\Core\Plugin\PluginBase; @@ -10,6 +9,7 @@ use Drupal\llm_services\Client\Ollama as ClientOllama; use Drupal\llm_services\Exceptions\CommunicationException; use Drupal\llm_services\Exceptions\NotSupportedException; +use Drupal\llm_services\Model\Payload; use GuzzleHttp\Exception\GuzzleException; /** @@ -66,9 +66,8 @@ public function installModel(string $modelName): mixed { * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion */ - public function completion(array $body): mixed { - // TODO: Implement completions() method. - throw new NotSupportedException(); + public function completion(Payload $payload): mixed { + return $this->getClient()->completion($payload); } /** @@ -76,7 +75,7 @@ public function completion(array $body): mixed { * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion */ - public function chat(array $body): mixed { + public function chat(Payload $payload): mixed { // TODO: Implement chatCompletions() method. throw new NotSupportedException(); } From 65514bf317d1d9ebf535d2d8d1f39d0bd944ef38 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Thu, 23 May 2024 14:46:48 +0200 Subject: [PATCH 09/17] 1452: Code clean up and switch to streaming --- src/Client/Ollama.php | 96 ++++++++++--------- src/Drush/Commands/LlmServicesCommands.php | 16 +++- .../LLMProviderInterface.php | 5 +- src/Plugin/LLModelsProviders/Ollama.php | 10 +- 4 files changed, 72 insertions(+), 55 deletions(-) diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 6fc6fc5..0deb7c7 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -2,14 +2,24 @@ namespace Drupal\llm_services\Client; +use Drupal\llm_services\Exceptions\CommunicationException; use Drupal\llm_services\Model\Payload; +use GuzzleHttp\Exception\GuzzleException; use GuzzleHttp\RequestOptions; +use Psr\Http\Message\ResponseInterface; /** * Client to communicate with Ollama. */ class Ollama { + /** + * Cache for stream parsing. + * + * @see parse() + * + * @var string + */ private string $parserCache = ''; /** @@ -36,7 +46,9 @@ public function __construct( * @throws \JsonException */ public function listLocalModels(): array { - $data = $this->call(method: 'get', uri: '/api/tags'); + $response = $this->call(method: 'get', uri: '/api/tags'); + $data = $response->getBody()->getContents(); + $data = json_decode($data, TRUE); // @todo: change to value objects. $models = []; @@ -60,26 +72,30 @@ public function listLocalModels(): array { * * @return string * + * @throws \Drupal\llm_services\Exceptions\CommunicationException + * * @see https://ollama.com/library * - * @throws \GuzzleHttp\Exception\GuzzleException - * @throws \JsonException */ - public function install(string $modelName): string { - $this->call(method: 'post', uri: '/api/pull', options: [ + public function install(string $modelName): \Generator { + $response = $this->call(method: 'post', uri: '/api/pull', options: [ 'json' => [ 'name' => $modelName, - 'stream' => false, + 'stream' => true, ], 'headers' => [ 'Content-Type' => 'application/json', ], RequestOptions::CONNECT_TIMEOUT => 10, RequestOptions::TIMEOUT => 300, + RequestOptions::STREAM => true, ]); - // @todo: change to stream and return status. - return ''; + $body = $response->getBody(); + while (!$body->eof()) { + $data = $body->read(1024); + yield from $this->parse($data);; + } } /** @@ -91,36 +107,30 @@ public function install(string $modelName): string { * * @return \Generator * - * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion + * @throws \Drupal\llm_services\Exceptions\CommunicationException * - * @throws \GuzzleHttp\Exception\GuzzleException + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion */ public function completion(Payload $payload): \Generator { - - $json = [ - 'model' => $payload->model, - 'prompt' => $payload->messages[0]->content, - 'stream' => true, - ]; - - $client = \Drupal::httpClient(); - $response = $client->request( - 'POST', - $this->getURL('/api/generate'), - [ - 'json' => $json, - RequestOptions::CONNECT_TIMEOUT => 10, - RequestOptions::TIMEOUT => 300, - RequestOptions::STREAM => true, - ] - ); + $response = $this->call(method: 'post', uri: '/api/generate', options: [ + 'json' => [ + 'model' => $payload->model, + 'prompt' => $payload->messages[0]->content, + 'stream' => true, + ], + 'headers' => [ + 'Content-Type' => 'application/json', + ], + RequestOptions::CONNECT_TIMEOUT => 10, + RequestOptions::TIMEOUT => 300, + RequestOptions::STREAM => true, + ]); $body = $response->getBody(); while (!$body->eof()) { $data = $body->read(1024); yield from $this->parse($data);; } - } /** @@ -138,8 +148,6 @@ public function completion(Payload $payload): \Generator { * @return \Generator * Yield back json objects. * - * @todo: should json be converted to valid LLMResObject? - * * @throws \JsonException */ private function parse(string $data): \Generator { @@ -186,25 +194,25 @@ private function parse(string $data): \Generator { * @param array $options * Extra options and/or payload to post. * - * @return mixed - * The result of the call. - * - * @todo: what about stream calls? + * @return \Psr\Http\Message\ResponseInterface + * The response object. * - * @throws \GuzzleHttp\Exception\GuzzleException - * @throws \JsonException + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ - private function call(string $method, string $uri, array $options = []): mixed { + private function call(string $method, string $uri, array $options = []): ResponseInterface { $client = \Drupal::httpClient(); - $response = $client->request($method, $this->getURL($uri), $options); - - if ($response->getStatusCode() !== 200) { - throw new \Exception('Request failed'); + try { + $response = $client->request($method, $this->getURL($uri), $options); + if ($response->getStatusCode() !== 200) { + throw new CommunicationException('Request failed', $response->getStatusCode()); + } + } + catch (GuzzleException $exception) { + throw new CommunicationException('Request failed', $exception->getCode(), $exception); } - $data = $response->getBody()->getContents(); - return json_decode($data, TRUE, 512, JSON_THROW_ON_ERROR); + return $response; } /** diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php index 31613b1..20ca1ce 100644 --- a/src/Drush/Commands/LlmServicesCommands.php +++ b/src/Drush/Commands/LlmServicesCommands.php @@ -63,14 +63,23 @@ public function listModels(string $provider): void { #[CLI\Usage(name: 'llm:install:model ollama llama2', description: 'Install LLama2 modul in Ollama')] public function installModel(string $provider, string $name): void { $provider = $this->providerManager->createInstance($provider); - $models = $provider->installModel($name); // @todo: stream responses. + foreach ($provider->installModel($name) as $progress) { + if (isset($progress['total']) && isset($progress['completed'])) { + $percent = ($progress['completed'] / $progress['total']) * 100; + $this->output()->writeln(sprintf('%s (%0.2f%% downloaded)', $progress['status'], $percent)); + } + else { + $this->output()->writeln($progress['status']); + } + } + $this->output()->write("\n"); } /** - * Install model in provider. + * Try out completion with a model. * * @throws \Drupal\Component\Plugin\Exception\PluginException * @throws \Drupal\llm_services\Exceptions\CommunicationException @@ -92,8 +101,7 @@ public function completion(string $provider, string $name, string $prompt): void foreach ($provider->completion($payLoad) as $res) { $this->output()->write($res['response']); - }; + } $this->output()->write("\n"); - } } diff --git a/src/Plugin/LLModelsProviders/LLMProviderInterface.php b/src/Plugin/LLModelsProviders/LLMProviderInterface.php index 3ca9905..beabea9 100644 --- a/src/Plugin/LLModelsProviders/LLMProviderInterface.php +++ b/src/Plugin/LLModelsProviders/LLMProviderInterface.php @@ -3,7 +3,6 @@ namespace Drupal\llm_services\Plugin\LLModelsProviders; use Drupal\Component\Plugin\PluginInspectionInterface; -use Drupal\llm_services\Model\Message; use Drupal\llm_services\Model\Payload; /** @@ -37,7 +36,7 @@ public function installModel(string $modelName): mixed; /** * Performs a completion process. * - * @param Payload $payload + * @param \Drupal\llm_services\Model\Payload $payload * The body of the completion request. It should contain the necessary data * for completion. * @@ -51,7 +50,7 @@ public function completion(Payload $payload): mixed; /** * Initiates a chat. * - * @param Payload $payload + * @param \Drupal\llm_services\Model\Payload $payload * The body of the chat request. * * @return mixed diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index 4ff42d0..4cd3ede 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -53,7 +53,7 @@ public function installModel(string $modelName): mixed { try { return $this->getClient()->install($modelName); } - catch (GuzzleException | \JsonException $exception) { + catch (GuzzleException $exception) { throw new CommunicationException( message: 'Error in communicating with LLM services', previous: $exception, @@ -66,8 +66,10 @@ public function installModel(string $modelName): mixed { * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion */ - public function completion(Payload $payload): mixed { - return $this->getClient()->completion($payload); + public function completion(Payload $payload): \Generator { + foreach ($this->getClient()->completion($payload) as $chunk) { + yield $chunk; + } } /** @@ -101,7 +103,7 @@ public function setConfiguration(array $configuration): static { public function defaultConfiguration(): array { return [ 'url' => 'http://ollama', - 'port' => '11434' + 'port' => '11434', ]; } From 62fb3e40cc85a7ec2c013327f8d432276f30a1cf Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Fri, 24 May 2024 10:56:29 +0200 Subject: [PATCH 10/17] 1452: Added some response value objects --- src/Client/Ollama.php | 44 +++++++------- src/Client/OllamaCompletionResponse.php | 60 +++++++++++++++++++ src/Drush/Commands/LlmServicesCommands.php | 12 ++-- src/Model/CompletionResponseInterface.php | 45 ++++++++++++++ .../LLMProviderInterface.php | 8 +-- src/Plugin/LLModelsProviders/Ollama.php | 19 ++++-- 6 files changed, 152 insertions(+), 36 deletions(-) create mode 100644 src/Client/OllamaCompletionResponse.php create mode 100644 src/Model/CompletionResponseInterface.php diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 0deb7c7..2fd2d97 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -16,9 +16,9 @@ class Ollama { /** * Cache for stream parsing. * - * @see parse() - * * @var string + * + * @see parse() */ private string $parserCache = ''; @@ -50,7 +50,7 @@ public function listLocalModels(): array { $data = $response->getBody()->getContents(); $data = json_decode($data, TRUE); - // @todo: change to value objects. + // @todo Change to value objects. $models = []; foreach ($data['models'] as $item) { $models[$item['model']] = [ @@ -70,44 +70,46 @@ public function listLocalModels(): array { * @param string $modelName * Name of the model. * - * @return string - * - * @throws \Drupal\llm_services\Exceptions\CommunicationException + * @return \Generator + * The progress of installation. * * @see https://ollama.com/library * + * @throws \Drupal\llm_services\Exceptions\CommunicationException + * @throws \JsonException */ public function install(string $modelName): \Generator { $response = $this->call(method: 'post', uri: '/api/pull', options: [ 'json' => [ 'name' => $modelName, - 'stream' => true, + 'stream' => TRUE, ], 'headers' => [ 'Content-Type' => 'application/json', ], RequestOptions::CONNECT_TIMEOUT => 10, RequestOptions::TIMEOUT => 300, - RequestOptions::STREAM => true, + RequestOptions::STREAM => TRUE, ]); $body = $response->getBody(); while (!$body->eof()) { $data = $body->read(1024); - yield from $this->parse($data);; + yield from $this->parse($data); } } /** * Ask a question to the model. * - * @TODO make call function that can do the stream, if possible. - * * @param \Drupal\llm_services\Model\Payload $payload + * The question to ask the module. * * @return \Generator + * The response from the model as it completes. * * @throws \Drupal\llm_services\Exceptions\CommunicationException + * @throws \JsonException * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion */ @@ -116,20 +118,20 @@ public function completion(Payload $payload): \Generator { 'json' => [ 'model' => $payload->model, 'prompt' => $payload->messages[0]->content, - 'stream' => true, + 'stream' => TRUE, ], 'headers' => [ 'Content-Type' => 'application/json', ], RequestOptions::CONNECT_TIMEOUT => 10, RequestOptions::TIMEOUT => 300, - RequestOptions::STREAM => true, + RequestOptions::STREAM => TRUE, ]); $body = $response->getBody(); while (!$body->eof()) { $data = $body->read(1024); - yield from $this->parse($data);; + yield from $this->parse($data); } } @@ -156,8 +158,8 @@ private function parse(string $data): \Generator { foreach ($strings as $str) { if (json_validate($str)) { - // Valid json string lets decode an yield it. - yield json_decode($str, true, flags: JSON_THROW_ON_ERROR); + // Valid json string lets decode an yield it. + yield json_decode($str, TRUE, flags: JSON_THROW_ON_ERROR); } else { // Ignore empty strings. @@ -176,7 +178,7 @@ private function parse(string $data): \Generator { return; } // Valid json string, yield, reset cache. - yield json_decode($str, true, flags: JSON_THROW_ON_ERROR); + yield json_decode($str, TRUE, flags: JSON_THROW_ON_ERROR); $this->parserCache = ''; } } @@ -190,7 +192,7 @@ private function parse(string $data): \Generator { * @param string $method * The method to use (GET/POST). * @param string $uri - * The API endpoint to call + * The API endpoint to call. * @param array $options * Extra options and/or payload to post. * @@ -203,7 +205,7 @@ private function call(string $method, string $uri, array $options = []): Respons $client = \Drupal::httpClient(); try { - $response = $client->request($method, $this->getURL($uri), $options); + $response = $client->request($method, $this->getUrl($uri), $options); if ($response->getStatusCode() !== 200) { throw new CommunicationException('Request failed', $response->getStatusCode()); } @@ -221,10 +223,10 @@ private function call(string $method, string $uri, array $options = []): Respons * @param string $uri * The URI to append to the base URL. Default is an empty string. * - * @return string T + * @return string * The complete URL string. */ - private function getURL(string $uri = ''): string { + private function getUrl(string $uri = ''): string { return $this->url . ':' . $this->port . ($uri ? '/' . ltrim($uri, '/') : ''); } diff --git a/src/Client/OllamaCompletionResponse.php b/src/Client/OllamaCompletionResponse.php new file mode 100644 index 0000000..5fbc683 --- /dev/null +++ b/src/Client/OllamaCompletionResponse.php @@ -0,0 +1,60 @@ +model; + } + + /** + * {@inheritdoc} + */ + public function getResponse(): string { + return $this->response; + } + + /** + * {@inheritdoc} + */ + public function getStatus(): bool { + return $this->done; + } + + /** + * {@inheritdoc} + */ + public function getContext(): string { + return $this->context; + } + +} diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php index 20ca1ce..678d9b2 100644 --- a/src/Drush/Commands/LlmServicesCommands.php +++ b/src/Drush/Commands/LlmServicesCommands.php @@ -45,7 +45,7 @@ public function listModels(string $provider): void { $provider = $this->providerManager->createInstance($provider); $models = $provider->listModels(); - // @todo output more information. + // @todo Output more information. foreach ($models as $model) { $this->writeln($model['name'] . ' (' . $model['modified'] . ')'); } @@ -64,18 +64,17 @@ public function listModels(string $provider): void { public function installModel(string $provider, string $name): void { $provider = $this->providerManager->createInstance($provider); - // @todo: stream responses. + // @todo Stream responses. foreach ($provider->installModel($name) as $progress) { - if (isset($progress['total']) && isset($progress['completed'])) { + if (isset($progress['total']) && isset($progress['completed'])) { $percent = ($progress['completed'] / $progress['total']) * 100; $this->output()->writeln(sprintf('%s (%0.2f%% downloaded)', $progress['status'], $percent)); } - else { + else { $this->output()->writeln($progress['status']); } } $this->output()->write("\n"); - } /** @@ -100,8 +99,9 @@ public function completion(string $provider, string $name, string $prompt): void $payLoad->messages[] = $msg; foreach ($provider->completion($payLoad) as $res) { - $this->output()->write($res['response']); + $this->output()->write($res->getResponse()); } $this->output()->write("\n"); } + } diff --git a/src/Model/CompletionResponseInterface.php b/src/Model/CompletionResponseInterface.php new file mode 100644 index 0000000..d0221f7 --- /dev/null +++ b/src/Model/CompletionResponseInterface.php @@ -0,0 +1,45 @@ + * The result of the completion process. * * @throws \Drupal\llm_services\Exceptions\CommunicationException */ - public function completion(Payload $payload): mixed; + public function completion(Payload $payload): \Generator; /** * Initiates a chat. @@ -53,11 +53,11 @@ public function completion(Payload $payload): mixed; * @param \Drupal\llm_services\Model\Payload $payload * The body of the chat request. * - * @return mixed + * @return \Generator * The result of the chat initiation. * * @throws \Drupal\llm_services\Exceptions\CommunicationException */ - public function chat(Payload $payload): mixed; + public function chat(Payload $payload): \Generator; } diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index 4cd3ede..0107893 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -7,6 +7,7 @@ use Drupal\Core\Plugin\PluginBase; use Drupal\Core\Plugin\PluginFormInterface; use Drupal\llm_services\Client\Ollama as ClientOllama; +use Drupal\llm_services\Client\OllamaCompletionResponse; use Drupal\llm_services\Exceptions\CommunicationException; use Drupal\llm_services\Exceptions\NotSupportedException; use Drupal\llm_services\Model\Payload; @@ -49,7 +50,7 @@ public function listModels(): array { /** * {@inheritdoc} */ - public function installModel(string $modelName): mixed { + public function installModel(string $modelName): \Generator|string { try { return $this->getClient()->install($modelName); } @@ -64,11 +65,18 @@ public function installModel(string $modelName): mixed { /** * {@inheritdoc} * + * @throws \Drupal\llm_services\Exceptions\CommunicationException + * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion */ public function completion(Payload $payload): \Generator { foreach ($this->getClient()->completion($payload) as $chunk) { - yield $chunk; + yield new OllamaCompletionResponse( + $chunk['model'], + $chunk['response'], + $chunk['done'], + $chunk['context'] ?? '', + ); } } @@ -77,8 +85,8 @@ public function completion(Payload $payload): \Generator { * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion */ - public function chat(Payload $payload): mixed { - // TODO: Implement chatCompletions() method. + public function chat(Payload $payload): \Generator { + // @todo Implement chatCompletions() method. throw new NotSupportedException(); } @@ -164,7 +172,8 @@ public function submitConfigurationForm(array &$form, FormStateInterface $form_s try { $this->listModels(); \Drupal::messenger()->addMessage('Successfully connected to Ollama'); - } catch (\Exception $exception) { + } + catch (\Exception $exception) { \Drupal::messenger()->addMessage('Error communication with Ollama: ' . $exception->getMessage(), 'error'); } } From ebcd13d08b4927d1b504a185d7fb6e83edb2952c Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Tue, 28 May 2024 08:41:21 +0200 Subject: [PATCH 11/17] 1452: Code review fixes - mostly typos Co-authored-by: Jeppe Kuhlmann Andersen <78410897+jekuaitk@users.noreply.github.com> --- llm_services.info.yml | 2 +- llm_services.links.task.yml | 2 +- llm_services.routing.yml | 2 +- src/Client/Ollama.php | 2 +- src/Drush/Commands/LlmServicesCommands.php | 2 +- src/Exceptions/LLMException.php | 2 +- src/Form/SettingsForm.php | 4 ++-- src/Model/MessageRoles.php | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/llm_services.info.yml b/llm_services.info.yml index de13cf5..84b3da7 100644 --- a/llm_services.info.yml +++ b/llm_services.info.yml @@ -1,5 +1,5 @@ name: "Large language model services" -description: 'Large language module services to communicat with the models.' +description: 'Large language module services to communicate with the models.' type: module core_version_requirement: ^10 configure: llm_services.plugin_settings_local_tasks diff --git a/llm_services.links.task.yml b/llm_services.links.task.yml index 2ebde1c..3755a6b 100644 --- a/llm_services.links.task.yml +++ b/llm_services.links.task.yml @@ -1,5 +1,5 @@ llm_services.plugin_settings_tasks: - title: 'LLM Services settings' + title: 'LLM services settings' route_name: llm_services.plugin_settings_local_tasks base_route: llm_services.plugin_settings_local_tasks deriver: Drupal\llm_services\Plugin\Derivative\LocalTask diff --git a/llm_services.routing.yml b/llm_services.routing.yml index 2f38d71..2bc1326 100644 --- a/llm_services.routing.yml +++ b/llm_services.routing.yml @@ -2,7 +2,7 @@ llm_services.plugin_settings_local_tasks: path: '/admin/config/llm_services/settings/{type}' defaults: _controller: '\Drupal\llm_services\Controller\LocalTasksController::dynamicTasks' - _title: 'LLM Services settings' + _title: 'LLM services settings' type: '' requirements: _permission: 'administer site' diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 2fd2d97..caa2d58 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -28,7 +28,7 @@ class Ollama { * @param string $url * The URL of the Ollama server. * @param int $port - * The port that Ollama is listing on. + * The port that Ollama is listening on. */ public function __construct( private readonly string $url, diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php index 678d9b2..77a8b7d 100644 --- a/src/Drush/Commands/LlmServicesCommands.php +++ b/src/Drush/Commands/LlmServicesCommands.php @@ -87,7 +87,7 @@ public function installModel(string $provider, string $name): void { #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] #[CLI\Argument(name: 'name', description: 'Name of the model to use.')] #[CLI\Argument(name: 'prompt', description: 'The prompt to generate a response for.')] - #[CLI\Usage(name: 'llm:model:completion ollama llama2 "Why is the sky blue?"', description: 'Install LLama2 modul in Ollama')] + #[CLI\Usage(name: 'llm:model:completion ollama llama2 "Why is the sky blue?"', description: 'Prompt LLama2')] public function completion(string $provider, string $name, string $prompt): void { $provider = $this->providerManager->createInstance($provider); diff --git a/src/Exceptions/LLMException.php b/src/Exceptions/LLMException.php index a176d5e..d999914 100644 --- a/src/Exceptions/LLMException.php +++ b/src/Exceptions/LLMException.php @@ -3,7 +3,7 @@ namespace Drupal\llm_services\Exceptions; /** - * Base execution that all other exceptions should extend. + * Base exception that all other exceptions should extend. * * This will enable other modules to use this exception as an catch all. */ diff --git a/src/Form/SettingsForm.php b/src/Form/SettingsForm.php index fbd02b9..0c673f9 100644 --- a/src/Form/SettingsForm.php +++ b/src/Form/SettingsForm.php @@ -72,8 +72,8 @@ public function buildForm(array $form, FormStateInterface $form_state): array { $form['provider'] = [ '#type' => 'select', - '#title' => $this->t('Log provider'), - '#description' => $this->t('Select the logger provider you which to use'), + '#title' => $this->t('Provider'), + '#description' => $this->t('Select the provider you wish to use'), '#options' => $options, '#default_value' => $config->get('provider'), ]; diff --git a/src/Model/MessageRoles.php b/src/Model/MessageRoles.php index 230ef82..a27e909 100644 --- a/src/Model/MessageRoles.php +++ b/src/Model/MessageRoles.php @@ -14,6 +14,6 @@ */ enum MessageRoles: string { case Assistant = 'assistant'; - case System = "system"; + case System = 'system'; case User = 'user'; } From b6dfe0c47bd8a71cc5b6763e58d775e1b568ee7a Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Tue, 28 May 2024 13:05:45 +0200 Subject: [PATCH 12/17] 1452: Moved into symfony commands --- drush.services.yml | 21 ++++ src/Client/OllamaCompletionResponse.php | 6 +- src/Commands/ModelCompletionCommand.php | 113 +++++++++++++++++++++ src/Commands/ProviderInstallCommand.php | 79 ++++++++++++++ src/Commands/ProviderListCommand.php | 67 ++++++++++++ src/Drush/Commands/LlmServicesCommands.php | 107 ------------------- src/Model/CompletionResponseInterface.php | 4 +- src/Plugin/LLModelsProviders/Ollama.php | 3 +- 8 files changed, 287 insertions(+), 113 deletions(-) create mode 100644 drush.services.yml create mode 100644 src/Commands/ModelCompletionCommand.php create mode 100644 src/Commands/ProviderInstallCommand.php create mode 100644 src/Commands/ProviderListCommand.php delete mode 100644 src/Drush/Commands/LlmServicesCommands.php diff --git a/drush.services.yml b/drush.services.yml new file mode 100644 index 0000000..93be7ad --- /dev/null +++ b/drush.services.yml @@ -0,0 +1,21 @@ +services: + llm.service.completion.command: + class: Drupal\llm_services\Commands\ModelCompletionCommand + arguments: + - '@plugin.manager.llm_services' + tags: + - { name: console.command } + + llm.service.install.command: + class: Drupal\llm_services\Commands\ProviderInstallCommand + arguments: + - '@plugin.manager.llm_services' + tags: + - { name: console.command } + + llm.service.list.command: + class: Drupal\llm_services\Commands\ProviderListCommand + arguments: + - '@plugin.manager.llm_services' + tags: + - { name: console.command } diff --git a/src/Client/OllamaCompletionResponse.php b/src/Client/OllamaCompletionResponse.php index 5fbc683..d1f766d 100644 --- a/src/Client/OllamaCompletionResponse.php +++ b/src/Client/OllamaCompletionResponse.php @@ -18,14 +18,14 @@ * The response from the model. * @param bool $done * The module completion state. - * @param string $context + * @param array $context * The generated context when completed. */ public function __construct( private string $model, private string $response, private bool $done, - private string $context, + private array $context, ) { } @@ -53,7 +53,7 @@ public function getStatus(): bool { /** * {@inheritdoc} */ - public function getContext(): string { + public function getContext(): array { return $this->context; } diff --git a/src/Commands/ModelCompletionCommand.php b/src/Commands/ModelCompletionCommand.php new file mode 100644 index 0000000..552a3ca --- /dev/null +++ b/src/Commands/ModelCompletionCommand.php @@ -0,0 +1,113 @@ +setName('llm:model:completion') + ->setDescription('Make a completion request to a model') + ->addUsage('llm:model:completion ollama llama2 "Why is the sky blue?') + ->addArgument( + name: 'provider', + mode: InputArgument::REQUIRED, + description: 'Name of the provider (plugin).' + ) + ->addArgument( + name: 'name', + mode: InputArgument::REQUIRED, + description: 'Name of the model to use.' + ) + ->addArgument( + name: 'prompt', + mode: InputArgument::REQUIRED, + description: 'The prompt to generate a response for.' + ) + ->addOption( + name: 'temperature', + mode: InputOption::VALUE_REQUIRED, + description: 'The temperature of the model. Increasing the temperature will make the model answer more creatively.', + default: '0.8' + ) + ->addOption( + name: 'top-k', + mode: InputOption::VALUE_REQUIRED, + description: 'Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers.', + default: '40' + ) + ->addOption( + name: 'top-p', + mode: InputOption::VALUE_REQUIRED, + description: 'A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text.', + default: '0.9' + ); + } + + /** + * {@inheritDoc} + */ + protected function execute(InputInterface $input, OutputInterface $output): int { + $providerName = $input->getArgument('provider'); + $name = $input->getArgument('name'); + $prompt = $input->getArgument('prompt'); + + $temperature = $input->getOption('temperature'); + $topK = $input->getOption('top-k'); + $topP = $input->getOption('top-p'); + + $provider = $this->providerManager->createInstance($providerName); + + $payLoad = new Payload(); + $payLoad->model = $name; + $payLoad->options = [ + 'temperature' => $temperature, + 'top_k' => $topK, + 'top_p' => $topP, + ]; + $msg = new Message(); + $msg->content = $prompt; + $payLoad->messages[] = $msg; + + foreach ($provider->completion($payLoad) as $res) { + $output->write($res->getResponse()); + } + $output->write("\n"); + + return Command::SUCCESS; + } + +} diff --git a/src/Commands/ProviderInstallCommand.php b/src/Commands/ProviderInstallCommand.php new file mode 100644 index 0000000..17240cf --- /dev/null +++ b/src/Commands/ProviderInstallCommand.php @@ -0,0 +1,79 @@ +setName('llm:provider:install') + ->setDescription('Install model in provider') + ->addUsage('llm:install:model ollama llama2') + ->addArgument( + name: 'provider', + mode: InputArgument::REQUIRED, + description: 'Name of the provider (plugin).' + ) + ->addArgument( + name: 'name', + mode: InputArgument::REQUIRED, + description: 'Name of the model to use.' + ); + } + + /** + * {@inheritDoc} + * + * @throws \Drupal\Component\Plugin\Exception\PluginException + * @throws \Drupal\llm_services\Exceptions\CommunicationException + */ + protected function execute(InputInterface $input, OutputInterface $output): int { + $providerName = $input->getArgument('provider'); + $name = $input->getArgument('name'); + + $provider = $this->providerManager->createInstance($providerName); + + foreach ($provider->installModel($name) as $progress) { + if (isset($progress['total']) && isset($progress['completed'])) { + $percent = ($progress['completed'] / $progress['total']) * 100; + $output->writeln(sprintf('%s (%0.2f%% downloaded)', $progress['status'], $percent)); + } + else { + $output->writeln($progress['status']); + } + } + $output->write("\n"); + + return Command::SUCCESS; + } + +} diff --git a/src/Commands/ProviderListCommand.php b/src/Commands/ProviderListCommand.php new file mode 100644 index 0000000..3482d4d --- /dev/null +++ b/src/Commands/ProviderListCommand.php @@ -0,0 +1,67 @@ +setName('llm:provider:list') + ->setDescription('Install model in provider') + ->addUsage('llm:install:model ollama llama2') + ->addArgument( + name: 'provider', + mode: InputArgument::REQUIRED, + description: 'Name of the provider (plugin).' + ); + } + + /** + * {@inheritDoc} + * + * @throws \Drupal\Component\Plugin\Exception\PluginException + * @throws \Drupal\llm_services\Exceptions\CommunicationException + */ + protected function execute(InputInterface $input, OutputInterface $output): int { + $providerName = $input->getArgument('provider'); + + $provider = $this->providerManager->createInstance($providerName); + $models = $provider->listModels(); + + foreach ($models as $model) { + $output->writeln($model['name'] . ' (' . $model['modified'] . ')'); + } + + return Command::SUCCESS; + } + +} diff --git a/src/Drush/Commands/LlmServicesCommands.php b/src/Drush/Commands/LlmServicesCommands.php deleted file mode 100644 index 77a8b7d..0000000 --- a/src/Drush/Commands/LlmServicesCommands.php +++ /dev/null @@ -1,107 +0,0 @@ -get('plugin.manager.llm_services'), - ); - } - - /** - * List models from provider. - * - * @throws \Drupal\Component\Plugin\Exception\PluginException - * @throws \Drupal\llm_services\Exceptions\CommunicationException - */ - #[CLI\Command(name: 'llm:list:models', aliases: ['llm-list'])] - #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] - #[CLI\Usage(name: 'llm:list:models ollama', description: 'List moduls available ')] - public function listModels(string $provider): void { - $provider = $this->providerManager->createInstance($provider); - $models = $provider->listModels(); - - // @todo Output more information. - foreach ($models as $model) { - $this->writeln($model['name'] . ' (' . $model['modified'] . ')'); - } - } - - /** - * Install model in provider. - * - * @throws \Drupal\Component\Plugin\Exception\PluginException - * @throws \Drupal\llm_services\Exceptions\CommunicationException - */ - #[CLI\Command(name: 'llm:install:model', aliases: ['llm-install'])] - #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] - #[CLI\Argument(name: 'name', description: 'Name of the model to try and download.')] - #[CLI\Usage(name: 'llm:install:model ollama llama2', description: 'Install LLama2 modul in Ollama')] - public function installModel(string $provider, string $name): void { - $provider = $this->providerManager->createInstance($provider); - - // @todo Stream responses. - foreach ($provider->installModel($name) as $progress) { - if (isset($progress['total']) && isset($progress['completed'])) { - $percent = ($progress['completed'] / $progress['total']) * 100; - $this->output()->writeln(sprintf('%s (%0.2f%% downloaded)', $progress['status'], $percent)); - } - else { - $this->output()->writeln($progress['status']); - } - } - $this->output()->write("\n"); - } - - /** - * Try out completion with a model. - * - * @throws \Drupal\Component\Plugin\Exception\PluginException - * @throws \Drupal\llm_services\Exceptions\CommunicationException - */ - #[CLI\Command(name: 'llm:model:completion', aliases: ['llm-completion'])] - #[CLI\Argument(name: 'provider', description: 'Name of the provider (plugin).')] - #[CLI\Argument(name: 'name', description: 'Name of the model to use.')] - #[CLI\Argument(name: 'prompt', description: 'The prompt to generate a response for.')] - #[CLI\Usage(name: 'llm:model:completion ollama llama2 "Why is the sky blue?"', description: 'Prompt LLama2')] - public function completion(string $provider, string $name, string $prompt): void { - $provider = $this->providerManager->createInstance($provider); - - $payLoad = new Payload(); - $payLoad->model = $name; - - $msg = new Message(); - $msg->content = $prompt; - $payLoad->messages[] = $msg; - - foreach ($provider->completion($payLoad) as $res) { - $this->output()->write($res->getResponse()); - } - $this->output()->write("\n"); - } - -} diff --git a/src/Model/CompletionResponseInterface.php b/src/Model/CompletionResponseInterface.php index d0221f7..afc3b5f 100644 --- a/src/Model/CompletionResponseInterface.php +++ b/src/Model/CompletionResponseInterface.php @@ -37,9 +37,9 @@ public function getStatus(): bool; * This can be seen at chat history in completion requests to make the model * more context aware. * - * @return string + * @return mixed * Context from the completion. */ - public function getContext(): string; + public function getContext(): mixed; } diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index 0107893..6a93c23 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -66,6 +66,7 @@ public function installModel(string $modelName): \Generator|string { * {@inheritdoc} * * @throws \Drupal\llm_services\Exceptions\CommunicationException + * @throws \JsonException * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion */ @@ -75,7 +76,7 @@ public function completion(Payload $payload): \Generator { $chunk['model'], $chunk['response'], $chunk['done'], - $chunk['context'] ?? '', + $chunk['context'] ?? [], ); } } From c58cce24d5e6bedd19f6be2d052327f4f450d6df Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Tue, 28 May 2024 15:04:11 +0200 Subject: [PATCH 13/17] 1452: Added chat support with context --- drush.services.yml | 7 + src/Client/Ollama.php | 66 ++++++++- src/Client/OllamaChatResponse.php | 71 ++++++++++ src/Commands/ModelChatCommand.php | 134 ++++++++++++++++++ src/Commands/ModelCompletionCommand.php | 8 +- src/Commands/ProviderInstallCommand.php | 5 +- src/Commands/ProviderListCommand.php | 5 +- src/Model/ChatResponseInterface.php | 55 +++++++ src/Model/CompletionResponseInterface.php | 4 +- .../LLMProviderInterface.php | 4 +- src/Plugin/LLModelsProviders/Ollama.php | 49 +++---- 11 files changed, 358 insertions(+), 50 deletions(-) create mode 100644 src/Client/OllamaChatResponse.php create mode 100644 src/Commands/ModelChatCommand.php create mode 100644 src/Model/ChatResponseInterface.php diff --git a/drush.services.yml b/drush.services.yml index 93be7ad..21e70be 100644 --- a/drush.services.yml +++ b/drush.services.yml @@ -19,3 +19,10 @@ services: - '@plugin.manager.llm_services' tags: - { name: console.command } + + llm.service.char.command: + class: Drupal\llm_services\Commands\ModelChatCommand + arguments: + - '@plugin.manager.llm_services' + tags: + - { name: console.command } diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index caa2d58..504ad22 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -28,7 +28,7 @@ class Ollama { * @param string $url * The URL of the Ollama server. * @param int $port - * The port that Ollama is listening on. + * The port that Ollama is listening at. */ public function __construct( private readonly string $url, @@ -42,15 +42,13 @@ public function __construct( * @return array> * Basic information about the models. * - * @throws \GuzzleHttp\Exception\GuzzleException - * @throws \JsonException + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ public function listLocalModels(): array { $response = $this->call(method: 'get', uri: '/api/tags'); $data = $response->getBody()->getContents(); $data = json_decode($data, TRUE); - // @todo Change to value objects. $models = []; foreach ($data['models'] as $item) { $models[$item['model']] = [ @@ -135,6 +133,66 @@ public function completion(Payload $payload): \Generator { } } + /** + * Chat with a model. + * + * @param \Drupal\llm_services\Model\Payload $payload + * The question to ask the module and the chat history. + * + * @return \Generator + * The response from the model as it completes it. + * + * @throws \Drupal\llm_services\Exceptions\CommunicationException + * @throws \JsonException + */ + public function chat(Payload $payload): \Generator { + $response = $this->call(method: 'post', uri: '/api/chat', options: [ + 'json' => [ + 'model' => $payload->model, + 'messages' => $this->chatMessagesAsArray($payload), + 'stream' => TRUE, + ], + 'headers' => [ + 'Content-Type' => 'application/json', + ], + RequestOptions::CONNECT_TIMEOUT => 10, + RequestOptions::TIMEOUT => 300, + RequestOptions::STREAM => TRUE, + ]); + + $body = $response->getBody(); + while (!$body->eof()) { + $data = $body->read(1024); + yield from $this->parse($data); + } + } + + /** + * Take all payload messages and change them into an array. + * + * This array of messages is used to give the model some chat context to make + * the interaction appear more like real char with a person. + * + * @param \Drupal\llm_services\Model\Payload $payload + * The payload sent to the chat function. + * + * @return array + * Array of messages to send to Ollama. + * + * @see https://github.com/ollama/ollama/blob/main/docs/api.md#chat-request-with-history + */ + private function chatMessagesAsArray(Payload $payload): array { + $messages = []; + foreach ($payload->messages as $message) { + $messages[] = [ + 'content' => $message->content, + 'role' => $message->role->value, + ]; + } + + return $messages; + } + /** * Parse LLM stream. * diff --git a/src/Client/OllamaChatResponse.php b/src/Client/OllamaChatResponse.php new file mode 100644 index 0000000..d4a688d --- /dev/null +++ b/src/Client/OllamaChatResponse.php @@ -0,0 +1,71 @@ +model; + } + + /** + * {@inheritdoc} + */ + public function getStatus(): bool { + return $this->done; + } + + /** + * {@inheritdoc} + */ + public function getContent(): string { + return $this->content; + } + + /** + * {@inheritdoc} + */ + public function getRole(): MessageRoles { + return $this->role; + } + + /** + * {@inheritdoc} + */ + public function getImages(): array { + return $this->images; + } + +} diff --git a/src/Commands/ModelChatCommand.php b/src/Commands/ModelChatCommand.php new file mode 100644 index 0000000..90a7528 --- /dev/null +++ b/src/Commands/ModelChatCommand.php @@ -0,0 +1,134 @@ +setName('llm:model:chat') + ->setDescription('Chat with model (use ctrl+c to stop chatting)') + ->addUsage('llm:model:chat ollama llama3') + ->addArgument( + name: 'provider', + mode: InputArgument::REQUIRED, + description: 'Name of the provider (plugin).' + ) + ->addArgument( + name: 'name', + mode: InputArgument::REQUIRED, + description: 'Name of the model to use.' + ) + ->addOption( + name: 'system-prompt', + mode: InputOption::VALUE_REQUIRED, + description: 'System message to instruct the llm have to behave.', + default: 'Use the following pieces of context to answer the users question. If you don\'t know the answer, just say that you don\'t know, don\'t try to make up an answer.' + ) + ->addOption( + name: 'temperature', + mode: InputOption::VALUE_REQUIRED, + description: 'The temperature of the model. Increasing the temperature will make the model answer more creatively.', + default: '0.8' + ) + ->addOption( + name: 'top-k', + mode: InputOption::VALUE_REQUIRED, + description: 'Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers.', + default: '40' + ) + ->addOption( + name: 'top-p', + mode: InputOption::VALUE_REQUIRED, + description: 'A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text.', + default: '0.9' + ); + } + + /** + * {@inheritDoc} + */ + protected function execute(InputInterface $input, OutputInterface $output): int { + $providerName = $input->getArgument('provider'); + $name = $input->getArgument('name'); + + $systemPrompt = $input->getOption('system-prompt'); + $temperature = $input->getOption('temperature'); + $topK = $input->getOption('top-k'); + $topP = $input->getOption('top-p'); + + $provider = $this->providerManager->createInstance($providerName); + + // Build configuration. + $payLoad = new Payload(); + $payLoad->model = $name; + $payLoad->options = [ + 'temperature' => $temperature, + 'top_k' => $topK, + 'top_p' => $topP, + ]; + $msg = new Message(); + $msg->role = MessageRoles::System; + $msg->content = $systemPrompt; + $payLoad->messages[] = $msg; + + $helper = $this->getHelper('question'); + $question = new Question('Message: ', ''); + + // Keep cheating with the user. Not optimal, but okay for now. + while (TRUE) { + // Query the next question. + $output->write("\n"); + $msg = new Message(); + $msg->role = MessageRoles::User; + $msg->content = $helper->ask($input, $output, $question); + $payLoad->messages[] = $msg; + $output->write("\n"); + + $answer = ''; + foreach ($provider->chat($payLoad) as $res) { + $output->write($res->getContent()); + $answer .= $res->getContent(); + } + $output->write("\n"); + + // Add answer as context to the next question. + $msg = new Message(); + $msg->role = MessageRoles::Assistant; + $msg->content = $answer; + $payLoad->messages[] = $msg; + } + } + +} diff --git a/src/Commands/ModelCompletionCommand.php b/src/Commands/ModelCompletionCommand.php index 552a3ca..43d53e3 100644 --- a/src/Commands/ModelCompletionCommand.php +++ b/src/Commands/ModelCompletionCommand.php @@ -14,11 +14,7 @@ use Symfony\Component\Console\Output\OutputInterface; /** - * This is a literal copy of the example Symfony Console command - * from the documentation. - * - * See: - * http://symfony.com/doc/2.7/components/console/introduction.html#creating-a-basic-command + * Make a completion request against a provider model. */ class ModelCompletionCommand extends Command { @@ -41,7 +37,7 @@ protected function configure(): void { $this ->setName('llm:model:completion') ->setDescription('Make a completion request to a model') - ->addUsage('llm:model:completion ollama llama2 "Why is the sky blue?') + ->addUsage('llm:model:completion ollama llama3 "Why is the sky blue?') ->addArgument( name: 'provider', mode: InputArgument::REQUIRED, diff --git a/src/Commands/ProviderInstallCommand.php b/src/Commands/ProviderInstallCommand.php index 17240cf..f0c336a 100644 --- a/src/Commands/ProviderInstallCommand.php +++ b/src/Commands/ProviderInstallCommand.php @@ -4,13 +4,10 @@ namespace Drupal\llm_services\Commands; -use Drupal\llm_services\Model\Message; -use Drupal\llm_services\Model\Payload; use Drupal\llm_services\Plugin\LLModelProviderManager; use Symfony\Component\Console\Command\Command; use Symfony\Component\Console\Input\InputArgument; use Symfony\Component\Console\Input\InputInterface; -use Symfony\Component\Console\Input\InputOption; use Symfony\Component\Console\Output\OutputInterface; /** @@ -37,7 +34,7 @@ protected function configure(): void { $this ->setName('llm:provider:install') ->setDescription('Install model in provider') - ->addUsage('llm:install:model ollama llama2') + ->addUsage('llm:provider:install ollama llama3') ->addArgument( name: 'provider', mode: InputArgument::REQUIRED, diff --git a/src/Commands/ProviderListCommand.php b/src/Commands/ProviderListCommand.php index 3482d4d..d1f1518 100644 --- a/src/Commands/ProviderListCommand.php +++ b/src/Commands/ProviderListCommand.php @@ -4,13 +4,10 @@ namespace Drupal\llm_services\Commands; -use Drupal\llm_services\Model\Message; -use Drupal\llm_services\Model\Payload; use Drupal\llm_services\Plugin\LLModelProviderManager; use Symfony\Component\Console\Command\Command; use Symfony\Component\Console\Input\InputArgument; use Symfony\Component\Console\Input\InputInterface; -use Symfony\Component\Console\Input\InputOption; use Symfony\Component\Console\Output\OutputInterface; /** @@ -37,7 +34,7 @@ protected function configure(): void { $this ->setName('llm:provider:list') ->setDescription('Install model in provider') - ->addUsage('llm:install:model ollama llama2') + ->addUsage('llm:provider:list ollama') ->addArgument( name: 'provider', mode: InputArgument::REQUIRED, diff --git a/src/Model/ChatResponseInterface.php b/src/Model/ChatResponseInterface.php new file mode 100644 index 0000000..503deb7 --- /dev/null +++ b/src/Model/ChatResponseInterface.php @@ -0,0 +1,55 @@ + + * String of base64 encoded images. + */ + public function getImages(): array; + + /** + * The completion status. + * + * @return bool + * If false, the model has more to say. + */ + public function getStatus(): bool; + +} diff --git a/src/Model/CompletionResponseInterface.php b/src/Model/CompletionResponseInterface.php index afc3b5f..f2aa649 100644 --- a/src/Model/CompletionResponseInterface.php +++ b/src/Model/CompletionResponseInterface.php @@ -16,10 +16,10 @@ interface CompletionResponseInterface { public function getModel(): string; /** - * The response from the module. + * The response from the model. * * @return string - * The text generated by the modul. + * The text generated by the model. */ public function getResponse(): string; diff --git a/src/Plugin/LLModelsProviders/LLMProviderInterface.php b/src/Plugin/LLModelsProviders/LLMProviderInterface.php index 97a186a..12f2d08 100644 --- a/src/Plugin/LLModelsProviders/LLMProviderInterface.php +++ b/src/Plugin/LLModelsProviders/LLMProviderInterface.php @@ -53,8 +53,8 @@ public function completion(Payload $payload): \Generator; * @param \Drupal\llm_services\Model\Payload $payload * The body of the chat request. * - * @return \Generator - * The result of the chat initiation. + * @return \Generator<\Drupal\llm_services\Model\ChatResponseInterface> + * The result of the chat. * * @throws \Drupal\llm_services\Exceptions\CommunicationException */ diff --git a/src/Plugin/LLModelsProviders/Ollama.php b/src/Plugin/LLModelsProviders/Ollama.php index 6a93c23..6ee0c97 100644 --- a/src/Plugin/LLModelsProviders/Ollama.php +++ b/src/Plugin/LLModelsProviders/Ollama.php @@ -7,11 +7,10 @@ use Drupal\Core\Plugin\PluginBase; use Drupal\Core\Plugin\PluginFormInterface; use Drupal\llm_services\Client\Ollama as ClientOllama; +use Drupal\llm_services\Client\OllamaChatResponse; use Drupal\llm_services\Client\OllamaCompletionResponse; -use Drupal\llm_services\Exceptions\CommunicationException; -use Drupal\llm_services\Exceptions\NotSupportedException; +use Drupal\llm_services\Model\MessageRoles; use Drupal\llm_services\Model\Payload; -use GuzzleHttp\Exception\GuzzleException; /** * Ollama integration provider. @@ -36,36 +35,21 @@ public function __construct(array $configuration, $plugin_id, $plugin_definition * {@inheritdoc} */ public function listModels(): array { - try { - return $this->getClient()->listLocalModels(); - } - catch (GuzzleException | \JsonException $exception) { - throw new CommunicationException( - message: 'Error in communicating with LLM services', - previous: $exception, - ); - } + return $this->getClient()->listLocalModels(); } /** * {@inheritdoc} + * + * @throws \JsonException */ public function installModel(string $modelName): \Generator|string { - try { - return $this->getClient()->install($modelName); - } - catch (GuzzleException $exception) { - throw new CommunicationException( - message: 'Error in communicating with LLM services', - previous: $exception, - ); - } + return $this->getClient()->install($modelName); } /** * {@inheritdoc} * - * @throws \Drupal\llm_services\Exceptions\CommunicationException * @throws \JsonException * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion @@ -73,10 +57,10 @@ public function installModel(string $modelName): \Generator|string { public function completion(Payload $payload): \Generator { foreach ($this->getClient()->completion($payload) as $chunk) { yield new OllamaCompletionResponse( - $chunk['model'], - $chunk['response'], - $chunk['done'], - $chunk['context'] ?? [], + model: $chunk['model'], + response: $chunk['response'], + done: $chunk['done'], + context: $chunk['context'] ?? [], ); } } @@ -84,11 +68,20 @@ public function completion(Payload $payload): \Generator { /** * {@inheritdoc} * + * @throws \JsonException + * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion */ public function chat(Payload $payload): \Generator { - // @todo Implement chatCompletions() method. - throw new NotSupportedException(); + foreach ($this->getClient()->chat($payload) as $chunk) { + yield new OllamaChatResponse( + model: $chunk['model'], + content: $chunk['message']['content'] ?? '', + role: $chunk['message']['role'] ? MessageRoles::from($chunk['message']['role']) : MessageRoles::Assistant, + images: $chunk['message']['images'] ?? [], + done: $chunk['done'], + ); + } } /** From 92dcf153bd4a1e3166dc547ecc4526ebfd12b920 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Tue, 28 May 2024 16:10:46 +0200 Subject: [PATCH 14/17] 1452: Minor clean up in namespaces --- .../{LLModelsProvider.php => LLModelProvider.php} | 4 ++-- src/Plugin/LLModelProviderManager.php | 14 +++++++------- .../LLMProviderInterface.php | 4 ++-- .../Ollama.php | 4 ++-- 4 files changed, 13 insertions(+), 13 deletions(-) rename src/Annotation/{LLModelsProvider.php => LLModelProvider.php} (87%) rename src/Plugin/{LLModelsProviders => LLModelProviders}/LLMProviderInterface.php (94%) rename src/Plugin/{LLModelsProviders => LLModelProviders}/Ollama.php (98%) diff --git a/src/Annotation/LLModelsProvider.php b/src/Annotation/LLModelProvider.php similarity index 87% rename from src/Annotation/LLModelsProvider.php rename to src/Annotation/LLModelProvider.php index 52792e5..2f8ad2b 100644 --- a/src/Annotation/LLModelsProvider.php +++ b/src/Annotation/LLModelProvider.php @@ -6,13 +6,13 @@ use Drupal\Core\Annotation\Translation; /** - * Defines a LLModelsProvider annotation object. + * Defines a LLModelProvider annotation object. * * @see plugin_api * * @Annotation */ -class LLModelsProvider extends Plugin { +class LLModelProvider extends Plugin { /** * The plugin ID. diff --git a/src/Plugin/LLModelProviderManager.php b/src/Plugin/LLModelProviderManager.php index 336eb62..a2b4a91 100644 --- a/src/Plugin/LLModelProviderManager.php +++ b/src/Plugin/LLModelProviderManager.php @@ -7,13 +7,13 @@ use Drupal\Core\Extension\ModuleHandlerInterface; use Drupal\Core\Plugin\DefaultPluginManager; use Drupal\llm_services\Form\PluginSettingsForm; -use Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface; +use Drupal\llm_services\Plugin\LLModelProviders\LLMProviderInterface; /** * Provides the LLM plugin manager. * - * @see \Drupal\llm_services\Annotation\LLModelsProvider - * @see \Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface + * @see \Drupal\llm_services\Annotation\LLModelProvider + * @see \Drupal\llm_services\Plugin\LLModelProviders\LLMProviderInterface * @see plugin_api */ class LLModelProviderManager extends DefaultPluginManager { @@ -37,11 +37,11 @@ public function __construct( $this->configFactory = $configFactory; parent::__construct( - 'Plugin/LLModelsProviders', + 'Plugin/LLModelProviders', $namespaces, $module_handler, - 'Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface', - 'Drupal\llm_services\Annotation\LLModelsProvider', + 'Drupal\llm_services\Plugin\LLModelProviders\LLMProviderInterface', + 'Drupal\llm_services\Annotation\LLModelProvider', ); $this->alterInfo('llm_services_providers_info'); @@ -56,7 +56,7 @@ public function createInstance($plugin_id, array $configuration = []): LLMProvid $configuration = $this->configFactory->get(PluginSettingsForm::getConfigName())->get($plugin_id); } - /** @var \Drupal\llm_services\Plugin\LLModelsProviders\LLMProviderInterface $provider */ + /** @var \Drupal\llm_services\Plugin\LLModelProviders\LLMProviderInterface $provider */ $provider = parent::createInstance($plugin_id, $configuration); return $provider; diff --git a/src/Plugin/LLModelsProviders/LLMProviderInterface.php b/src/Plugin/LLModelProviders/LLMProviderInterface.php similarity index 94% rename from src/Plugin/LLModelsProviders/LLMProviderInterface.php rename to src/Plugin/LLModelProviders/LLMProviderInterface.php index 12f2d08..4a8cf66 100644 --- a/src/Plugin/LLModelsProviders/LLMProviderInterface.php +++ b/src/Plugin/LLModelProviders/LLMProviderInterface.php @@ -1,12 +1,12 @@ Date: Tue, 28 May 2024 16:25:18 +0200 Subject: [PATCH 15/17] 1452: Code analyse fixes --- phpstan.neon | 21 ++++++++++++++++++- src/Client/Ollama.php | 12 ++++++----- src/Client/OllamaChatResponse.php | 2 +- src/Client/OllamaCompletionResponse.php | 5 ++++- src/Commands/ModelChatCommand.php | 1 + src/Plugin/LLModelProviderManager.php | 7 +------ .../LLModelProviders/LLMProviderInterface.php | 2 +- src/Plugin/LLModelProviders/Ollama.php | 16 +++++++++----- 8 files changed, 46 insertions(+), 20 deletions(-) diff --git a/phpstan.neon b/phpstan.neon index d228014..29de114 100644 --- a/phpstan.neon +++ b/phpstan.neon @@ -9,4 +9,23 @@ parameters: ignoreErrors: # This is how drupal works.... - '#Unsafe usage of new static\(\).#' - + - '#\Drupal calls should be avoided in classes, use dependency injection instead#' + - '#getEditableConfigNames\(\) return type has no value type specified in iterable type array#' + - '#buildForm\(\) has parameter \$form with no value type specified in iterable type array.#' + - '#buildForm\(\) return type has no value type specified in iterable type array.#' + - '#validateForm\(\) has parameter \$form with no value type specified in iterable type array.#' + - '#submitForm\(\) has parameter \$form with no value type specified in iterable type array.#' + - '#getDerivativeDefinitions\(\) has parameter \$base_plugin_definition with no value type specified in iterable type array.#' + - '#getDerivativeDefinitions\(\) return type has no value type specified in iterable type array.#' + - '#__construct\(\) has parameter \$configuration with no value type specified in iterable type array.#' + - '#getConfiguration\(\) return type has no value type specified in iterable type array.#' + - '#setConfiguration\(\) has parameter \$configuration with no value type specified in iterable type array.#' + - '#buildConfigurationForm\(\) has parameter \$form with no value type specified in iterable type array.#' + - '#buildConfigurationForm\(\) return type has no value type specified in iterable type array.#' + - '#validateConfigurationForm\(\) has parameter \$form with no value type specified in iterable type array.#' + - '#submitConfigurationForm\(\) has parameter \$form with no value type specified in iterable type array.#' + - '#getForm\(\) invoked with 2 parameters, 1 required.#' + - '#While loop condition is always true.#' + - '#has parameter \$configuration with no value type specified in iterable type array.#' + - '#has parameter \$namespaces with no value type specified in iterable type Traversable.#' + - '#Call to an undefined method Symfony\\Component\\Console\\Helper\\HelperInterface::ask\(\).#' diff --git a/src/Client/Ollama.php b/src/Client/Ollama.php index 504ad22..7a00411 100644 --- a/src/Client/Ollama.php +++ b/src/Client/Ollama.php @@ -4,6 +4,7 @@ use Drupal\llm_services\Exceptions\CommunicationException; use Drupal\llm_services\Model\Payload; +use GuzzleHttp\ClientInterface; use GuzzleHttp\Exception\GuzzleException; use GuzzleHttp\RequestOptions; use Psr\Http\Message\ResponseInterface; @@ -29,10 +30,13 @@ class Ollama { * The URL of the Ollama server. * @param int $port * The port that Ollama is listening at. + * @param \GuzzleHttp\ClientInterface $client + * The http client used to interact with ollama. */ public function __construct( private readonly string $url, private readonly int $port, + private readonly ClientInterface $client, ) { } @@ -176,7 +180,7 @@ public function chat(Payload $payload): \Generator { * @param \Drupal\llm_services\Model\Payload $payload * The payload sent to the chat function. * - * @return array + * @return array{content: string, role: string}[] * Array of messages to send to Ollama. * * @see https://github.com/ollama/ollama/blob/main/docs/api.md#chat-request-with-history @@ -251,7 +255,7 @@ private function parse(string $data): \Generator { * The method to use (GET/POST). * @param string $uri * The API endpoint to call. - * @param array $options + * @param array $options * Extra options and/or payload to post. * * @return \Psr\Http\Message\ResponseInterface @@ -260,10 +264,8 @@ private function parse(string $data): \Generator { * @throws \Drupal\llm_services\Exceptions\CommunicationException */ private function call(string $method, string $uri, array $options = []): ResponseInterface { - $client = \Drupal::httpClient(); - try { - $response = $client->request($method, $this->getUrl($uri), $options); + $response = $this->client->request($method, $this->getUrl($uri), $options); if ($response->getStatusCode() !== 200) { throw new CommunicationException('Request failed', $response->getStatusCode()); } diff --git a/src/Client/OllamaChatResponse.php b/src/Client/OllamaChatResponse.php index d4a688d..229a80a 100644 --- a/src/Client/OllamaChatResponse.php +++ b/src/Client/OllamaChatResponse.php @@ -19,7 +19,7 @@ * The content of the message from the model. * @param \Drupal\llm_services\Model\MessageRoles $role * The role of the message. - * @param array $images + * @param array $images * Base64 encoded array of images. * @param bool $done * The module completion state. diff --git a/src/Client/OllamaCompletionResponse.php b/src/Client/OllamaCompletionResponse.php index d1f766d..3158ca3 100644 --- a/src/Client/OllamaCompletionResponse.php +++ b/src/Client/OllamaCompletionResponse.php @@ -18,7 +18,7 @@ * The response from the model. * @param bool $done * The module completion state. - * @param array $context + * @param array $context * The generated context when completed. */ public function __construct( @@ -52,6 +52,9 @@ public function getStatus(): bool { /** * {@inheritdoc} + * + * @return array + * The context given from the model. */ public function getContext(): array { return $this->context; diff --git a/src/Commands/ModelChatCommand.php b/src/Commands/ModelChatCommand.php index 90a7528..0ba1fa8 100644 --- a/src/Commands/ModelChatCommand.php +++ b/src/Commands/ModelChatCommand.php @@ -103,6 +103,7 @@ protected function execute(InputInterface $input, OutputInterface $output): int $msg->content = $systemPrompt; $payLoad->messages[] = $msg; + /** @var \Symfony\Component\Console\Helper\HelperInterface $helper */ $helper = $this->getHelper('question'); $question = new Question('Message: ', ''); diff --git a/src/Plugin/LLModelProviderManager.php b/src/Plugin/LLModelProviderManager.php index a2b4a91..84301f8 100644 --- a/src/Plugin/LLModelProviderManager.php +++ b/src/Plugin/LLModelProviderManager.php @@ -28,12 +28,7 @@ class LLModelProviderManager extends DefaultPluginManager { /** * Constructor for LLModelProviderManager. */ - public function __construct( - \Traversable $namespaces, - CacheBackendInterface $cache_backend, - ModuleHandlerInterface $module_handler, - ConfigFactoryInterface $configFactory, - ) { + public function __construct(\Traversable $namespaces, CacheBackendInterface $cache_backend, ModuleHandlerInterface $module_handler, ConfigFactoryInterface $configFactory) { $this->configFactory = $configFactory; parent::__construct( diff --git a/src/Plugin/LLModelProviders/LLMProviderInterface.php b/src/Plugin/LLModelProviders/LLMProviderInterface.php index 4a8cf66..225ffaf 100644 --- a/src/Plugin/LLModelProviders/LLMProviderInterface.php +++ b/src/Plugin/LLModelProviders/LLMProviderInterface.php @@ -13,7 +13,7 @@ interface LLMProviderInterface extends PluginInspectionInterface { /** * List model supported by the provider. * - * @return array,> + * @return array> * List of supported language models. * * @throws \Drupal\llm_services\Exceptions\CommunicationException diff --git a/src/Plugin/LLModelProviders/Ollama.php b/src/Plugin/LLModelProviders/Ollama.php index 7059818..ba10782 100644 --- a/src/Plugin/LLModelProviders/Ollama.php +++ b/src/Plugin/LLModelProviders/Ollama.php @@ -33,6 +33,9 @@ public function __construct(array $configuration, $plugin_id, $plugin_definition /** * {@inheritdoc} + * + * @return array + * List of models. */ public function listModels(): array { return $this->getClient()->listLocalModels(); @@ -101,6 +104,9 @@ public function setConfiguration(array $configuration): static { /** * {@inheritdoc} + * + * @return array + * Default configuration array. */ public function defaultConfiguration(): array { return [ @@ -131,7 +137,7 @@ public function buildConfigurationForm(array $form, FormStateInterface $form_sta /** * {@inheritdoc} */ - public function validateConfigurationForm(array &$form, FormStateInterface $form_state) { + public function validateConfigurationForm(array &$form, FormStateInterface $form_state): void { $values = $form_state->getValues(); if (filter_var($values['url'], FILTER_VALIDATE_URL) === FALSE) { @@ -165,10 +171,10 @@ public function submitConfigurationForm(array &$form, FormStateInterface $form_s // Try to connect to Ollama to test the connection. try { $this->listModels(); - \Drupal::messenger()->addMessage('Successfully connected to Ollama'); + $this->messenger->addMessage('Successfully connected to Ollama'); } catch (\Exception $exception) { - \Drupal::messenger()->addMessage('Error communication with Ollama: ' . $exception->getMessage(), 'error'); + $this->messenger->addMessage('Error communication with Ollama: ' . $exception->getMessage(), 'error'); } } @@ -176,10 +182,10 @@ public function submitConfigurationForm(array &$form, FormStateInterface $form_s * Get a client. * * @return \Drupal\llm_services\Client\Ollama - * Client to communicate with Ollama + * Client to communicate with Ollama. */ public function getClient(): ClientOllama { - return new ClientOllama($this->configuration['url'], $this->configuration['port']); + return new ClientOllama($this->configuration['url'], $this->configuration['port'], \Drupal::httpClient()); } } From 10412635cf9bb37ce33a12c2f95dd0c0c7f12382 Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Wed, 29 May 2024 08:50:57 +0200 Subject: [PATCH 16/17] 1452: Added some value validation to commands --- src/Commands/ModelChatCommand.php | 6 ++++++ src/Commands/ModelCompletionCommand.php | 13 +++++++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/src/Commands/ModelChatCommand.php b/src/Commands/ModelChatCommand.php index 0ba1fa8..7acbd67 100644 --- a/src/Commands/ModelChatCommand.php +++ b/src/Commands/ModelChatCommand.php @@ -88,6 +88,12 @@ protected function execute(InputInterface $input, OutputInterface $output): int $topK = $input->getOption('top-k'); $topP = $input->getOption('top-p'); + if (!is_numeric($temperature) || !is_numeric($topK) || !is_numeric($topP)) { + $output->writeln('Invalid input. Temperature, top-k, and top-p must be numeric values.'); + + return Command::FAILURE; + } + $provider = $this->providerManager->createInstance($providerName); // Build configuration. diff --git a/src/Commands/ModelCompletionCommand.php b/src/Commands/ModelCompletionCommand.php index 43d53e3..3352967 100644 --- a/src/Commands/ModelCompletionCommand.php +++ b/src/Commands/ModelCompletionCommand.php @@ -75,18 +75,25 @@ protected function configure(): void { /** * {@inheritDoc} + * + * @throws \Drupal\llm_services\Exceptions\CommunicationException */ protected function execute(InputInterface $input, OutputInterface $output): int { $providerName = $input->getArgument('provider'); $name = $input->getArgument('name'); $prompt = $input->getArgument('prompt'); - $temperature = $input->getOption('temperature'); $topK = $input->getOption('top-k'); $topP = $input->getOption('top-p'); - $provider = $this->providerManager->createInstance($providerName); + if (!is_numeric($temperature) || !is_numeric($topK) || !is_numeric($topP)) { + $output->writeln('Invalid input. Temperature, top-k, and top-p must be numeric values.'); + return Command::FAILURE; + } + + // Build configuration. + $provider = $this->providerManager->createInstance($providerName); $payLoad = new Payload(); $payLoad->model = $name; $payLoad->options = [ @@ -94,6 +101,8 @@ protected function execute(InputInterface $input, OutputInterface $output): int 'top_k' => $topK, 'top_p' => $topP, ]; + + // Create a completion message. $msg = new Message(); $msg->content = $prompt; $payLoad->messages[] = $msg; From 19a331bb48a17e29307f5194266cb6cd91a555dc Mon Sep 17 00:00:00 2001 From: Jesper Kristensen Date: Wed, 29 May 2024 08:14:08 +0200 Subject: [PATCH 17/17] 1452: Typos found during review Co-authored-by: Jeppe Kuhlmann Andersen <78410897+jekuaitk@users.noreply.github.com> --- src/Commands/ModelChatCommand.php | 2 +- src/Commands/ProviderListCommand.php | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Commands/ModelChatCommand.php b/src/Commands/ModelChatCommand.php index 7acbd67..0c8b148 100644 --- a/src/Commands/ModelChatCommand.php +++ b/src/Commands/ModelChatCommand.php @@ -113,7 +113,7 @@ protected function execute(InputInterface $input, OutputInterface $output): int $helper = $this->getHelper('question'); $question = new Question('Message: ', ''); - // Keep cheating with the user. Not optimal, but okay for now. + // Keep chatting with the user. Not optimal, but okay for now. while (TRUE) { // Query the next question. $output->write("\n"); diff --git a/src/Commands/ProviderListCommand.php b/src/Commands/ProviderListCommand.php index d1f1518..5050619 100644 --- a/src/Commands/ProviderListCommand.php +++ b/src/Commands/ProviderListCommand.php @@ -33,7 +33,7 @@ public function __construct( protected function configure(): void { $this ->setName('llm:provider:list') - ->setDescription('Install model in provider') + ->setDescription('Lists installed models in provider') ->addUsage('llm:provider:list ollama') ->addArgument( name: 'provider',