diff --git a/CHANGELOG.md b/CHANGELOG.md index c0110fb10b41d..e109603ddedfa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this p ## [Unreleased] +### Added + +- Adds a `gitlens.experimental.OpenAIModel` setting to specify the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command — closes [#]() thanks to [PR #]() by sadasant ([@sadasant](https://github.com/sadasant)) + ## [13.5.0] - 2023-04-07 ### Added diff --git a/package.json b/package.json index b0861275afa82..a09311d37d0e9 100644 --- a/package.json +++ b/package.json @@ -3684,6 +3684,13 @@ "scope": "window", "order": 55 }, + "gitlens.experimental.OpenAIModel": { + "type": "string", + "default": "gpt-3.5-turbo", + "markdownDescription": "Specifies the OpenAI model to use to generate commit messages when using the `GitLens: Generate Commit Message` command", + "scope": "window", + "order": 56 + }, "gitlens.advanced.externalDiffTool": { "type": [ "string", diff --git a/src/ai/aiProviderService.ts b/src/ai/aiProviderService.ts index 78da6b384c2df..0fffd0a2dd7c3 100644 --- a/src/ai/aiProviderService.ts +++ b/src/ai/aiProviderService.ts @@ -8,6 +8,7 @@ import { uncommittedStaged } from '../git/models/constants'; import type { GitRevisionReference } from '../git/models/reference'; import type { Repository } from '../git/models/repository'; import { isRepository } from '../git/models/repository'; +import { configuration } from '../system/configuration'; import type { Storage } from '../system/storage'; import { OpenAIProvider } from './openaiProvider'; @@ -23,7 +24,8 @@ export class AIProviderService implements Disposable { private _provider: AIProvider; constructor(private readonly container: Container) { - this._provider = new OpenAIProvider(container); + const model = configuration.get('experimental.OpenAIModel'); + this._provider = new OpenAIProvider(container, model); } dispose() { diff --git a/src/ai/openaiProvider.ts b/src/ai/openaiProvider.ts index 92c382994f6a7..a9e91e8465086 100644 --- a/src/ai/openaiProvider.ts +++ b/src/ai/openaiProvider.ts @@ -13,10 +13,14 @@ export class OpenAIProvider implements AIProvider { readonly id = 'openai'; readonly name = 'OpenAI'; - constructor(private readonly container: Container) {} + constructor(private readonly container: Container, private model: OpenAIChatCompletionModels = 'gpt-3.5-turbo') {} dispose() {} + public setModel(model: OpenAIChatCompletionModels) { + this.model = model; + } + async generateCommitMessage(diff: string, options?: { context?: string }): Promise { const openaiApiKey = await getApiKey(this.container.storage); if (openaiApiKey == null) return undefined; @@ -34,7 +38,7 @@ export class OpenAIProvider implements AIProvider { } const data: OpenAIChatCompletionRequest = { - model: 'gpt-3.5-turbo', + model: this.model, messages: [ { role: 'system', @@ -90,7 +94,7 @@ export class OpenAIProvider implements AIProvider { } const data: OpenAIChatCompletionRequest = { - model: 'gpt-3.5-turbo', + model: this.model, messages: [ { role: 'system', @@ -195,8 +199,10 @@ async function getApiKey(storage: Storage): Promise { return openaiApiKey; } +export type OpenAIChatCompletionModels = 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314'; + interface OpenAIChatCompletionRequest { - model: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301'; + model: OpenAIChatCompletionModels; messages: { role: 'system' | 'user' | 'assistant'; content: string }[]; temperature?: number; top_p?: number; diff --git a/src/config.ts b/src/config.ts index 5c050037d9cd5..aed21cd5abbe0 100644 --- a/src/config.ts +++ b/src/config.ts @@ -49,6 +49,7 @@ export interface Config { detectNestedRepositories: boolean; experimental: { generateCommitMessagePrompt: string; + OpenAIModel: 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-32k' | 'gpt-4-32k-0314'; }; fileAnnotations: { command: string | null;