Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added Ollama integration #22

Merged
merged 17 commits into from
Oct 2, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions extensions/void/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,15 @@
"default": "",
"description": "Greptile - Github PAT (gives Greptile access to your repo)"
},
"void.ollamaSettings": {
"void.ollamaSettings.endpoint": {
"type": "string",
"default": "",
"description": "Ollama settings (coming soon...)"
"description": "Ollama Endpoint - Local API server can be started with `OLLAMA_ORIGINS=* ollama serve`"
},
"void.ollamaSettings.model": {
"type": "string",
"default": "",
"description": "Ollama model to use"
}
}
},
Expand Down Expand Up @@ -126,7 +131,6 @@
"eslint-plugin-react-hooks": "^4.6.2",
"globals": "^15.9.0",
"marked": "^14.1.0",
"ollama": "^0.5.8",
"postcss": "^8.4.41",
"react": "^18.3.1",
"react-dom": "^18.3.1",
Expand Down
5 changes: 3 additions & 2 deletions extensions/void/src/SidebarWebviewProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,9 @@ export class SidebarWebviewProvider implements vscode.WebviewViewProvider {

const nonce = getNonce(); // only scripts with the nonce are allowed to run, this is a recommended security measure


const allowed_urls = ['https://api.anthropic.com', 'https://api.openai.com', 'https://api.greptile.com']
// Allow Ollama endpoint
const ollamaEndpoint = vscode.workspace.getConfiguration('void').get('ollamaSettings.endpoint') || 'http://localhost:11434'
const allowed_urls = ['https://api.anthropic.com', 'https://api.openai.com', 'https://api.greptile.com', ollamaEndpoint ]
webview.html = `<!DOCTYPE html>
<html lang="en">
<head>
Expand Down
138 changes: 78 additions & 60 deletions extensions/void/src/common/sendLLMMessage.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import Anthropic from '@anthropic-ai/sdk';
import OpenAI from 'openai';

// import ollama from 'ollama'

export type ApiConfig = {
anthropic: {
apikey: string,
Expand All @@ -20,7 +18,8 @@ export type ApiConfig = {
}
},
ollama: {
// TODO
endpoint: string,
model: string
},
whichApi: string
}
Expand Down Expand Up @@ -220,66 +219,85 @@ const sendGreptileMsg: SendLLMMessageFnTypeInternal = ({ messages, onText, onFin
export const sendLLMMessage: SendLLMMessageFnTypeExternal = ({ messages, onText, onFinalMessage, apiConfig }) => {
if (!apiConfig) return { abort: () => { } }

const whichApi = apiConfig.whichApi

if (whichApi === 'anthropic') {
return sendClaudeMsg({ messages, onText, onFinalMessage, apiConfig })
}
else if (whichApi === 'openai') {
return sendOpenAIMsg({ messages, onText, onFinalMessage, apiConfig })
}
else if (whichApi === 'greptile') {
return sendGreptileMsg({ messages, onText, onFinalMessage, apiConfig })
const whichApi = apiConfig.whichApi;

switch (whichApi) {
case 'anthropic':
return sendClaudeMsg({ messages, onText, onFinalMessage, apiConfig });
case 'openai':
return sendOpenAIMsg({ messages, onText, onFinalMessage, apiConfig });
case 'greptile':
return sendGreptileMsg({ messages, onText, onFinalMessage, apiConfig });
case 'ollama':
return sendOllamaMsg({ messages, onText, onFinalMessage, apiConfig });
default:
console.error(`Error: whichApi was ${whichApi}, which is not recognized!`);
return sendClaudeMsg({ messages, onText, onFinalMessage, apiConfig }); // TODO
}
else if (whichApi === 'ollama') {
return sendClaudeMsg({ messages, onText, onFinalMessage, apiConfig }) // TODO
}
else {
console.error(`Error: whichApi was ${whichApi}, which is not recognized!`)
return sendClaudeMsg({ messages, onText, onFinalMessage, apiConfig }) // TODO
}

}


// Ollama
// const sendOllamaMsg: sendMsgFnType = ({ messages, onText, onFinalMessage }) => {

// let did_abort = false
// let fullText = ''

// // if abort is called, onFinalMessage is NOT called, and no later onTexts are called either
// let abort: () => void = () => {
// did_abort = true
// }

// ollama.chat({ model: 'llama3.1', messages: messages, stream: true })
// .then(async response => {

// abort = () => {
// // response.abort() // this isn't needed now, to keep consistency with claude will leave it commented for now
// did_abort = true;
// }

// // when receive text
// try {
// for await (const part of response) {
// if (did_abort) return
// let newText = part.message.content
// fullText += newText
// onText(newText, fullText)
// }
// }
// // when error/fail
// catch (e) {
// onFinalMessage(fullText)
// return
// }

// // when we get the final message on this stream
// onFinalMessage(fullText)
// })

// return { abort };
// };
export const sendOllamaMsg: SendLLMMessageFnTypeInternal = ({ messages, onText, onFinalMessage, apiConfig }) => {
let didAbort = false;
let fullText = "";

// if abort is called, onFinalMessage is NOT called, and no later onTexts are called either
const abort = () => {
didAbort = true;
};

const handleError = (error: any) => {
console.error('Error:', error);
onFinalMessage(fullText);
};

if (apiConfig.ollama.endpoint.endsWith('/')) {
apiConfig.ollama.endpoint = apiConfig.ollama.endpoint.slice(0, -1);
}

fetch(`${apiConfig.ollama.endpoint}/api/chat`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
model: apiConfig.ollama.model,
messages: messages,
stream: true,
}),
})
.then(response => {
if (didAbort) return;
const reader = response.body?.getReader();
if (!reader) {
onFinalMessage(fullText);
return;
}
return reader;
})
.then(reader => {
if (!reader) return;

const readStream = async () => {
try {
let done, value;
while ({ done, value } = await reader.read(), !done) {
if (didAbort) return;
const stringedResponse = new TextDecoder().decode(value);
const newText = JSON.parse(stringedResponse).message.content;
fullText += newText;
onText(newText, fullText);
}
onFinalMessage(fullText);
} catch (error) {
handleError(error);
}
};

readStream();
})
.catch(handleError);

return { abort };
};
3 changes: 2 additions & 1 deletion extensions/void/src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ const getApiConfig = () => {
}
},
ollama: {
// apikey: vscode.workspace.getConfiguration('void').get('ollamaSettings') ?? '',
endpoint: vscode.workspace.getConfiguration('void').get('ollamaSettings.endpoint') ?? '',
model: vscode.workspace.getConfiguration('void').get('ollamaSettings.model') ?? '',
},
whichApi: vscode.workspace.getConfiguration('void').get('whichApi') ?? ''
}
Expand Down