Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"@anthropic-ai/sdk": "^0.39.0",
"@aws-sdk/client-bedrock": "^3.787.0",
"@aws-sdk/client-bedrock-runtime": "^3.785.0",
"@google/genai": "^0.9.0",
"@google/genai": "^0.13.0",
"@modelcontextprotocol/sdk": "^1.7.0",
"@types/react": "^19.0.12",
"@types/react-dom": "^19.0.4",
Expand Down
25 changes: 22 additions & 3 deletions src/main/llm/geminiLLM.ts
Original file line number Diff line number Diff line change
Expand Up @@ -135,9 +135,7 @@ export class GeminiLLM implements ILLM {
}
}

async getModels(): Promise<ILLMModel[]> {
// Currently no support for listModels in the Node SDK - may be coming: https://github.com/google-gemini/generative-ai-js/issues/54
// For now we're going to make a hardcoded list of current models.
async getModelsStatic(): Promise<ILLMModel[]> {
// This seems like the best source for models and description: https://ai.google.dev/gemini-api/docs/
const models: ILLMModel[] = [
{
Expand Down Expand Up @@ -201,6 +199,27 @@ export class GeminiLLM implements ILLM {
return models;
}

async getModels(): Promise<ILLMModel[]> {
const returnModels: ILLMModel[] = []
const models = await this.genAI.models.list();

// You might want to filter or sort this list in some way. There's some
// models that may not make sense, and you might want the "good" ones first.

for await (const model of models) {
const newModel: ILLMModel = {
provider: LLMType.Gemini,
// May not need to remove the models/ prefix here in case you like it
id: model.name ? model.name.replace(/^models\//, '') : '',
name: model.displayName ?? '',
description: model.description || '',
modelSource: 'Google'
};
returnModels.push(newModel);
}
return returnModels;
}

async generateResponse(session: ChatSession, messages: ChatMessage[]): Promise<ModelReply> {
const modelReply: ModelReply = {
timestamp: Date.now(),
Expand Down