From 1378ec4e50bc90b7c17608e6cb5272416e73498f Mon Sep 17 00:00:00 2001 From: Alex Yang Date: Wed, 5 Jun 2024 22:44:52 -0700 Subject: [PATCH] feat: set default model to `gpt-4o` (#911) --- .changeset/giant-otters-deliver.md | 5 +++++ packages/core/src/llm/openai.ts | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 .changeset/giant-otters-deliver.md diff --git a/.changeset/giant-otters-deliver.md b/.changeset/giant-otters-deliver.md new file mode 100644 index 0000000000..b25c199479 --- /dev/null +++ b/.changeset/giant-otters-deliver.md @@ -0,0 +1,5 @@ +--- +"llamaindex": patch +--- + +feat: set default model to `gpt-4o` diff --git a/packages/core/src/llm/openai.ts b/packages/core/src/llm/openai.ts index 53f6cf7c62..1d1bd245b5 100644 --- a/packages/core/src/llm/openai.ts +++ b/packages/core/src/llm/openai.ts @@ -110,7 +110,6 @@ export const GPT4_MODELS = { "gpt-4-1106-preview": { contextWindow: 128000 }, "gpt-4-0125-preview": { contextWindow: 128000 }, "gpt-4-vision-preview": { contextWindow: 128000 }, - // fixme: wait for openai documentation "gpt-4o": { contextWindow: 128000 }, "gpt-4o-2024-05-13": { contextWindow: 128000 }, }; @@ -185,7 +184,7 @@ export class OpenAI extends ToolCallLLM { }, ) { super(); - this.model = init?.model ?? "gpt-3.5-turbo"; + this.model = init?.model ?? "gpt-4o"; this.temperature = init?.temperature ?? 0.1; this.topP = init?.topP ?? 1; this.maxTokens = init?.maxTokens ?? undefined;