Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

adding autoedits support #5845

Merged
merged 13 commits into from
Oct 16, 2024
18 changes: 18 additions & 0 deletions lib/shared/src/configuration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,23 @@ export interface AuthCredentials {
tokenSource?: TokenSource | undefined
}

export interface AutoEditsTokenLimit {
prefixTokens: number
suffixTokens: number
maxPrefixLinesInArea: number
maxSuffixLinesInArea: number
codeToRewritePrefixLines: number
codeToRewriteSuffixLines: number
contextSpecificTokenLimit: Record<string, number>
}

export interface AutoEditsModelConfig {
provider: string
model: string
apiKey: string
tokenLimit: AutoEditsTokenLimit
}

export interface NetConfiguration {
mode?: string | undefined | null
proxy?: {
Expand Down Expand Up @@ -62,6 +79,7 @@ interface RawClientConfiguration {

experimentalTracing: boolean
experimentalSupercompletions: boolean
experimentalAutoedits: AutoEditsModelConfig | undefined
experimentalCommitMessage: boolean
experimentalNoodle: boolean
experimentalMinionAnthropicKey: string | undefined
Expand Down
4 changes: 4 additions & 0 deletions lib/shared/src/prompt/prompt-string.ts
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,10 @@ export class PromptString {
return internal_createPromptString(document.getText(range), [document.uri])
}

public static fromStructuredGitDiff(uri: vscode.Uri, diff: string) {
return internal_createPromptString(diff, [uri])
}

public static fromGitDiff(uri: vscode.Uri, oldContent: string, newContent: string) {
const diff = createGitDiff(displayPath(uri), oldContent, newContent)
return internal_createPromptString(diff, [uri])
Expand Down
15 changes: 15 additions & 0 deletions vscode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -729,6 +729,21 @@
"args": ["previous"],
"key": "shift+ctrl+up",
"when": "cody.activated && !editorReadonly && cody.hasActionableSupercompletion"
},
{
"command": "cody.supersuggest.accept",
"key": "tab",
"when": "editorTextFocus && cody.activated && cody.supersuggest.active"
},
{
"command": "cody.supersuggest.dismiss",
"key": "escape",
"when": "editorTextFocus && cody.activated && cody.supersuggest.active"
},
{
"command": "cody.experimental.suggest",
"key": "ctrl+shift+enter",
"when": "cody.activated"
}
],
"submenus": [
Expand Down
110 changes: 110 additions & 0 deletions vscode/src/autoedits/autoedits-provider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import {
type AutoEditsTokenLimit,
type DocumentContext,
logDebug,
tokensToChars,
} from '@sourcegraph/cody-shared'
import { Observable } from 'observable-fns'
import * as vscode from 'vscode'
import { ContextMixer } from '../completions/context/context-mixer'
import { DefaultContextStrategyFactory } from '../completions/context/context-strategy'
import { getCurrentDocContext } from '../completions/get-current-doc-context'
import { getConfiguration } from '../configuration'
import type { PromptProvider } from './prompt-provider'
import { DeepSeekPromptProvider } from './providers/deepseek'
import { OpenAIPromptProvider } from './providers/openai'
import { AutoEditsRenderer } from './renderer'

const AUTOEDITS_CONTEXT_STRATEGY = 'auto-edits'

export interface AutoEditsProviderOptions {
document: vscode.TextDocument
position: vscode.Position
}

export class AutoeditsProvider implements vscode.Disposable {
private disposables: vscode.Disposable[] = []
private contextMixer: ContextMixer = new ContextMixer({
strategyFactory: new DefaultContextStrategyFactory(Observable.of(AUTOEDITS_CONTEXT_STRATEGY)),
dataCollectionEnabled: false,
})
private autoEditsTokenLimit: AutoEditsTokenLimit | undefined
private provider: PromptProvider | undefined
private model: string | undefined
private apiKey: string | undefined
private renderer: AutoEditsRenderer = new AutoEditsRenderer()

constructor() {
const config = getConfiguration().experimentalAutoedits
if (config === undefined) {
logDebug('AutoEdits', 'No Configuration found in the settings')
return
}
this.initizlizePromptProvider(config.provider)
this.autoEditsTokenLimit = config.tokenLimit as AutoEditsTokenLimit
this.model = config.model
this.apiKey = config.apiKey
this.disposables.push(
this.contextMixer,
this.renderer,
vscode.commands.registerCommand('cody.experimental.suggest', () => this.getAutoedit())
)
}

private initizlizePromptProvider(provider: string) {
if (provider === 'openai') {
this.provider = new OpenAIPromptProvider()
} else if (provider === 'deepseek') {
this.provider = new DeepSeekPromptProvider()
} else {
logDebug('AutoEdits', `provider ${provider} not supported`)
}
}

public getAutoedit() {
this.predictAutoeditAtDocAndPosition({
document: vscode.window.activeTextEditor!.document,
position: vscode.window.activeTextEditor!.selection.active,
})
}

public async predictAutoeditAtDocAndPosition(options: AutoEditsProviderOptions) {
if (!this.provider || !this.autoEditsTokenLimit || !this.model || !this.apiKey) {
logDebug('AutoEdits', 'No Provider or Token Limit found in the settings')
return
}
const start = Date.now()
const docContext = this.getDocContext(options.document, options.position)
const { context } = await this.contextMixer.getContext({
document: options.document,
position: options.position,
docContext: docContext,
maxChars: 100000,
})
const { codeToReplace, promptResponse: prompt } = this.provider.getPrompt(
docContext,
options.document,
context,
this.autoEditsTokenLimit
)
const response = await this.provider.getModelResponse(this.model, this.apiKey, prompt)
const timeToResponse = Date.now() - start
logDebug('AutoEdits: (Time LLM Query):', timeToResponse.toString())
await this.renderer.render(options, codeToReplace, response)
}

private getDocContext(document: vscode.TextDocument, position: vscode.Position): DocumentContext {
return getCurrentDocContext({
document,
position,
maxPrefixLength: tokensToChars(this.autoEditsTokenLimit?.prefixTokens ?? 0),
maxSuffixLength: tokensToChars(this.autoEditsTokenLimit?.suffixTokens ?? 0),
})
}

public dispose() {
for (const disposable of this.disposables) {
disposable.dispose()
}
}
}
50 changes: 50 additions & 0 deletions vscode/src/autoedits/prompt-provider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import type { AutoEditsTokenLimit, PromptString } from '@sourcegraph/cody-shared'
import type * as vscode from 'vscode'
import type {
AutocompleteContextSnippet,
DocumentContext,
} from '../../../lib/shared/src/completions/types'
import type * as utils from './prompt-utils'
export type CompletionsPrompt = PromptString
export type ChatPrompt = {
role: 'system' | 'user' | 'assistant'
content: PromptString
}[]
export type PromptProviderResponse = CompletionsPrompt | ChatPrompt

export interface PromptResponseData {
codeToReplace: utils.CodeToReplaceData
promptResponse: PromptProviderResponse
}

export interface PromptProvider {
getPrompt(
docContext: DocumentContext,
document: vscode.TextDocument,
context: AutocompleteContextSnippet[],
tokenBudget: AutoEditsTokenLimit
): PromptResponseData

postProcessResponse(completion: string | null): string

getModelResponse(model: string, apiKey: string, prompt: PromptProviderResponse): Promise<string>
}

export async function getModelResponse(url: string, body: string, apiKey: string): Promise<any> {
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
body: body,
})
if (response.status !== 200) {
const errorText = await response.text()
throw new Error(`HTTP error! status: ${response.status}, message: ${errorText}`)
}
const data = await response.json()
return data
}

// ################################################################################################################
Loading
Loading