Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Provider registry #50

Merged
merged 9 commits into from
Mar 7, 2025
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion schema/ai-provider.json → schema/provider-registry.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"title": "AI provider",
"description": "Provider settings",
"description": "Provider registry settings",
"jupyter.lab.setting-icon": "@jupyterlite/ai:jupyternaut-lite",
"jupyter.lab.setting-icon-label": "JupyterLite AI Chat",
"type": "object",
Expand Down
6 changes: 3 additions & 3 deletions scripts/settings-generator.js
Original file line number Diff line number Diff line change
Expand Up @@ -140,19 +140,19 @@ Object.entries(providers).forEach(([name, desc], index) => {
});

// Build the index.ts file
const indexContent = [];
const indexContent = ["import { IDict } from '../../token';", ''];
Object.keys(providers).forEach(name => {
indexContent.push(`import ${name} from './_generated/${name}.json';`);
});

indexContent.push('', 'const ProviderSettings: { [name: string]: any } = {');
indexContent.push('', 'const ProviderSettings: IDict<any> = {');

Object.keys(providers).forEach((name, index) => {
indexContent.push(
` ${name}` + (index < Object.keys(providers).length - 1 ? ',' : '')
);
});
indexContent.push('};', '', 'export default ProviderSettings;', '');
indexContent.push('};', '', 'export { ProviderSettings };', '');
fs.writeFile(
path.join(schemasDir, 'index.ts'),
indexContent.join('\n'),
Expand Down
29 changes: 16 additions & 13 deletions src/chat-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@ import {
SystemMessage
} from '@langchain/core/messages';
import { UUID } from '@lumino/coreutils';
import { getErrorMessage } from './llm-models';
import { chatSystemPrompt } from './provider';
import { IAIProvider } from './token';
import { IAIProviderRegistry } from './token';
import { jupyternautLiteIcon } from './icons';

/**
Expand All @@ -37,17 +36,21 @@ export type ConnectionMessage = {
export class ChatHandler extends ChatModel {
constructor(options: ChatHandler.IOptions) {
super(options);
this._aiProvider = options.aiProvider;
this._prompt = chatSystemPrompt({ provider_name: this._aiProvider.name });
this._providerRegistry = options.providerRegistry;
this._prompt = chatSystemPrompt({
provider_name: this._providerRegistry.currentName
});

this._aiProvider.providerChanged.connect(() => {
this._errorMessage = this._aiProvider.chatError;
this._prompt = chatSystemPrompt({ provider_name: this._aiProvider.name });
this._providerRegistry.providerChanged.connect(() => {
this._errorMessage = this._providerRegistry.chatError;
this._prompt = chatSystemPrompt({
provider_name: this._providerRegistry.currentName
});
});
}

get provider(): BaseChatModel | null {
return this._aiProvider.chatModel;
return this._providerRegistry.currentChatModel;
}

/**
Expand Down Expand Up @@ -95,7 +98,7 @@ export class ChatHandler extends ChatModel {
};
this.messageAdded(msg);

if (this._aiProvider.chatModel === null) {
if (this._providerRegistry.currentChatModel === null) {
const errorMsg: IChatMessage = {
id: UUID.uuid4(),
body: `**${this._errorMessage ? this._errorMessage : this._defaultErrorMessage}**`,
Expand Down Expand Up @@ -134,7 +137,7 @@ export class ChatHandler extends ChatModel {
let content = '';

try {
for await (const chunk of await this._aiProvider.chatModel.stream(
for await (const chunk of await this._providerRegistry.currentChatModel.stream(
messages
)) {
content += chunk.content ?? chunk;
Expand All @@ -144,7 +147,7 @@ export class ChatHandler extends ChatModel {
this._history.messages.push(botMsg);
return true;
} catch (reason) {
const error = getErrorMessage(this._aiProvider.name, reason);
const error = this._providerRegistry.formatErrorMessage(reason);
const errorMsg: IChatMessage = {
id: UUID.uuid4(),
body: `**${error}**`,
Expand All @@ -171,7 +174,7 @@ export class ChatHandler extends ChatModel {
super.messageAdded(message);
}

private _aiProvider: IAIProvider;
private _providerRegistry: IAIProviderRegistry;
private _personaName = 'AI';
private _prompt: string;
private _errorMessage: string = '';
Expand All @@ -181,6 +184,6 @@ export class ChatHandler extends ChatModel {

export namespace ChatHandler {
export interface IOptions extends ChatModel.IOptions {
aiProvider: IAIProvider;
providerRegistry: IAIProviderRegistry;
}
}
14 changes: 7 additions & 7 deletions src/completion-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import {
} from '@jupyterlab/completer';

import { IBaseCompleter } from './llm-models';
import { IAIProvider } from './token';
import { IAIProviderRegistry } from './token';

/**
* The generic completion provider to register to the completion provider manager.
Expand All @@ -14,10 +14,10 @@ export class CompletionProvider implements IInlineCompletionProvider {
readonly identifier = '@jupyterlite/ai';

constructor(options: CompletionProvider.IOptions) {
this._aiProvider = options.aiProvider;
this._providerRegistry = options.providerRegistry;
this._requestCompletion = options.requestCompletion;

this._aiProvider.providerChanged.connect(() => {
this._providerRegistry.providerChanged.connect(() => {
if (this.completer) {
this.completer.requestCompletion = this._requestCompletion;
}
Expand All @@ -28,14 +28,14 @@ export class CompletionProvider implements IInlineCompletionProvider {
* Get the current completer name.
*/
get name(): string {
return this._aiProvider.name;
return this._providerRegistry.currentName;
}

/**
* Get the current completer.
*/
get completer(): IBaseCompleter | null {
return this._aiProvider.completer;
return this._providerRegistry.currentCompleter;
}

async fetch(
Expand All @@ -45,13 +45,13 @@ export class CompletionProvider implements IInlineCompletionProvider {
return this.completer?.fetch(request, context);
}

private _aiProvider: IAIProvider;
private _providerRegistry: IAIProviderRegistry;
private _requestCompletion: () => void;
}

export namespace CompletionProvider {
export interface IOptions {
aiProvider: IAIProvider;
providerRegistry: IAIProviderRegistry;
requestCompletion: () => void;
}
}
46 changes: 25 additions & 21 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,11 @@ import { ReadonlyPartialJSONObject } from '@lumino/coreutils';

import { ChatHandler } from './chat-handler';
import { CompletionProvider } from './completion-provider';
import { AIProvider } from './provider';
import { AIProviders } from './llm-models';
import { AIProviderRegistry } from './provider';
import { aiSettingsRenderer } from './settings/panel';
import { renderSlashCommandOption } from './slash-commands';
import { IAIProvider } from './token';
import { IAIProviderRegistry } from './token';

const autocompletionRegistryPlugin: JupyterFrontEndPlugin<IAutocompletionRegistry> =
{
Expand Down Expand Up @@ -57,11 +58,11 @@ const chatPlugin: JupyterFrontEndPlugin<void> = {
id: '@jupyterlite/ai:chat',
description: 'LLM chat extension',
autoStart: true,
requires: [IAIProvider, IRenderMimeRegistry, IAutocompletionRegistry],
requires: [IAIProviderRegistry, IRenderMimeRegistry, IAutocompletionRegistry],
optional: [INotebookTracker, ISettingRegistry, IThemeManager],
activate: async (
app: JupyterFrontEnd,
aiProvider: IAIProvider,
providerRegistry: IAIProviderRegistry,
rmRegistry: IRenderMimeRegistry,
autocompletionRegistry: IAutocompletionRegistry,
notebookTracker: INotebookTracker | null,
Expand All @@ -77,8 +78,8 @@ const chatPlugin: JupyterFrontEndPlugin<void> = {
}

const chatHandler = new ChatHandler({
aiProvider: aiProvider,
activeCellManager: activeCellManager
providerRegistry,
activeCellManager
});

let sendWithShiftEnter = false;
Expand Down Expand Up @@ -135,47 +136,47 @@ const chatPlugin: JupyterFrontEndPlugin<void> = {
const completerPlugin: JupyterFrontEndPlugin<void> = {
id: '@jupyterlite/ai:completer',
autoStart: true,
requires: [IAIProvider, ICompletionProviderManager],
requires: [IAIProviderRegistry, ICompletionProviderManager],
activate: (
app: JupyterFrontEnd,
aiProvider: IAIProvider,
providerRegistry: IAIProviderRegistry,
manager: ICompletionProviderManager
): void => {
const completer = new CompletionProvider({
aiProvider,
providerRegistry,
requestCompletion: () => app.commands.execute('inline-completer:invoke')
});
manager.registerInlineProvider(completer);
}
};

const aiProviderPlugin: JupyterFrontEndPlugin<IAIProvider> = {
id: '@jupyterlite/ai:ai-provider',
const providerRegistryPlugin: JupyterFrontEndPlugin<IAIProviderRegistry> = {
id: '@jupyterlite/ai:provider-registry',
autoStart: true,
requires: [IFormRendererRegistry, ISettingRegistry],
optional: [IRenderMimeRegistry],
provides: IAIProvider,
provides: IAIProviderRegistry,
activate: (
app: JupyterFrontEnd,
editorRegistry: IFormRendererRegistry,
settingRegistry: ISettingRegistry,
rmRegistry?: IRenderMimeRegistry
): IAIProvider => {
const aiProvider = new AIProvider();
): IAIProviderRegistry => {
const providerRegistry = new AIProviderRegistry();

editorRegistry.addRenderer(
'@jupyterlite/ai:ai-provider.AIprovider',
aiSettingsRenderer({ rmRegistry })
'@jupyterlite/ai:provider-registry.AIprovider',
aiSettingsRenderer({ providerRegistry, rmRegistry })
);
settingRegistry
.load(aiProviderPlugin.id)
.load(providerRegistryPlugin.id)
.then(settings => {
const updateProvider = () => {
// Update the settings to the AI providers.
const providerSettings = (settings.get('AIprovider').composite ?? {
provider: 'None'
}) as ReadonlyPartialJSONObject;
aiProvider.setProvider(
providerRegistry.setProvider(
providerSettings.provider as string,
providerSettings
);
Expand All @@ -186,17 +187,20 @@ const aiProviderPlugin: JupyterFrontEndPlugin<IAIProvider> = {
})
.catch(reason => {
console.error(
`Failed to load settings for ${aiProviderPlugin.id}`,
`Failed to load settings for ${providerRegistryPlugin.id}`,
reason
);
});

return aiProvider;
// Initialize the registry with the default providers
AIProviders.forEach(provider => providerRegistry.add(provider));

return providerRegistry;
}
};

export default [
aiProviderPlugin,
providerRegistryPlugin,
autocompletionRegistryPlugin,
chatPlugin,
completerPlugin
Expand Down
51 changes: 49 additions & 2 deletions src/llm-models/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,50 @@
import { ChatAnthropic } from '@langchain/anthropic';
import { ChromeAI } from '@langchain/community/experimental/llms/chrome_ai';
import { ChatMistralAI } from '@langchain/mistralai';
import { ChatOpenAI } from '@langchain/openai';

import { AnthropicCompleter } from './anthropic-completer';
import { CodestralCompleter } from './codestral-completer';
import { ChromeCompleter } from './chrome-completer';
import { OpenAICompleter } from './openai-completer';

import { instructions } from '../settings/instructions';
import { ProviderSettings } from '../settings/schemas';

import { IAIProvider } from '../token';

export * from './base-completer';
export * from './codestral-completer';
export * from './utils';

const AIProviders: IAIProvider[] = [
{
name: 'Anthropic',
chatModel: ChatAnthropic,
completer: AnthropicCompleter,
settingsSchema: ProviderSettings.Anthropic,
errorMessage: (error: any) => error.error.error.message
},
{
name: 'ChromeAI',
// TODO: fix
// @ts-expect-error: missing properties
chatModel: ChromeAI,
completer: ChromeCompleter,
instructions: instructions.ChromeAI,
settingsSchema: ProviderSettings.ChromeAI
},
{
name: 'MistralAI',
chatModel: ChatMistralAI,
completer: CodestralCompleter,
instructions: instructions.MistralAI,
settingsSchema: ProviderSettings.MistralAI
},
{
name: 'OpenAI',
chatModel: ChatOpenAI,
completer: OpenAICompleter,
settingsSchema: ProviderSettings.OpenAI
}
];

export { AIProviders };
Loading