Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
133 changes: 127 additions & 6 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -517,6 +517,8 @@ export namespace Provider {
source: z.enum(["env", "config", "custom", "api"]),
env: z.string().array(),
key: z.string().optional(),
npm: z.string().optional(),
api: z.string().optional(),
options: z.record(z.string(), z.any()),
models: z.record(z.string(), Model),
})
Expand Down Expand Up @@ -597,6 +599,8 @@ export namespace Provider {
source: "custom",
name: provider.name,
env: provider.env ?? [],
npm: provider.npm,
api: provider.api,
options: {},
models: mapValues(provider.models, (model) => fromModelsDevModel(provider, model)),
}
Expand Down Expand Up @@ -662,6 +666,8 @@ export namespace Provider {
id: providerID,
name: provider.name ?? existing?.name ?? providerID,
env: provider.env ?? existing?.env ?? [],
npm: provider.npm ?? existing?.npm,
api: provider.api ?? existing?.api,
options: mergeDeep(existing?.options ?? {}, provider.options ?? {}),
source: "config",
models: existing?.models ?? {},
Expand Down Expand Up @@ -870,8 +876,13 @@ export namespace Provider {
}

if (Object.keys(provider.models).length === 0) {
delete providers[providerID]
continue
const npm = provider.npm
const base = provider.options["baseURL"] ?? provider.api
const keep = npm === "@ai-sdk/openai-compatible" && Boolean(base)
if (!keep) {
delete providers[providerID]
continue
}
}

log.info("found", { providerID })
Expand All @@ -885,8 +896,118 @@ export namespace Provider {
}
})

function normalizeModelsURL(base: string): string {
const url = new URL(base)
const path = url.pathname.replace(/\/+$/, "")
if (path.endsWith("/v1")) {
url.pathname = path + "/models"
return url.toString()
}
url.pathname = path + "/v1/models"
return url.toString()
}

function discoveredModel(provider: Info, modelID: string, baseURL: string): Model {
const model: Model = {
id: modelID,
providerID: provider.id,
api: {
id: modelID,
npm: provider.npm ?? "@ai-sdk/openai-compatible",
url: baseURL,
},
name: modelID,
family: "",
capabilities: {
temperature: false,
reasoning: false,
attachment: false,
toolcall: true,
input: {
text: true,
audio: false,
image: false,
video: false,
pdf: false,
},
output: {
text: true,
audio: false,
image: false,
video: false,
pdf: false,
},
interleaved: false,
},
cost: {
input: 0,
output: 0,
cache: {
read: 0,
write: 0,
},
},
limit: {
context: 128000,
output: 8192,
},
status: "active",
options: {},
headers: {},
release_date: "",
variants: {},
}

model.variants = mapValues(ProviderTransform.variants(model), (v) => v)

return model
}

async function discoverOpenAICompatibleModels(provider: Info): Promise<Info> {
const base = provider.options["baseURL"] ?? provider.api
if (!base) return provider
if (provider.npm !== "@ai-sdk/openai-compatible") return provider

try {
const url = normalizeModelsURL(String(base))
const result = await fetch(url)
if (!result.ok) return provider

const json = (await result.json()) as { data?: { id?: string }[] }
const ids = (json.data ?? []).map((item) => item.id).filter(Boolean) as string[]

const next: Info = {
...provider,
models: { ...provider.models },
}

for (const id of ids) {
if (next.models[id]) continue
next.models[id] = discoveredModel(provider, id, String(base))
}

return next
} catch (e) {
log.debug("openai-compatible discovery failed", {
providerID: provider.id,
error: e instanceof Error ? e.message : String(e),
})
return provider
}
}

export async function list() {
return state().then((state) => state.providers)
const s = await state()
const providers = { ...s.providers }

await Promise.all(
Object.entries(providers).map(async ([providerID, provider]) => {
const next = await discoverOpenAICompatibleModels(provider)
providers[providerID] = next
}),
)

return providers
}

async function getSDK(model: Model) {
Expand Down Expand Up @@ -979,10 +1100,10 @@ export namespace Provider {
}

export async function getModel(providerID: string, modelID: string) {
const s = await state()
const provider = s.providers[providerID]
const providers = await list()
const provider = providers[providerID]
if (!provider) {
const availableProviders = Object.keys(s.providers)
const availableProviders = Object.keys(providers)
const matches = fuzzysort.go(providerID, availableProviders, { limit: 3, threshold: -10000 })
const suggestions = matches.map((m) => m.target)
throw new ModelNotFoundError({ providerID, modelID, suggestions })
Expand Down
188 changes: 187 additions & 1 deletion packages/opencode/test/provider/provider.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { test, expect } from "bun:test"
import path from "path"
import http from "node:http"
import path from "node:path"
import { tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { Provider } from "../../src/provider/provider"
Expand Down Expand Up @@ -204,6 +205,191 @@ test("custom model alias via config", async () => {
})
})

test("openai-compatible discovers models from /v1/models", async () => {
let requests: string[] = []
const server = http.createServer((req, res) => {
requests.push(req.url ?? "")
if (req.url === "/v1/models") {
res.writeHead(200, { "content-type": "application/json" })
res.end(JSON.stringify({ data: [{ id: "model-a" }, { id: "model-b" }] }))
return
}
res.writeHead(404)
res.end()
})

await new Promise<void>((resolve) => server.listen(0, "127.0.0.1", () => resolve()))
const address = server.address()
if (!address || typeof address === "string") throw new Error("server address not found")
const baseURL = `http://127.0.0.1:${address.port}`

await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"local-llm": {
name: "Local LLM",
npm: "@ai-sdk/openai-compatible",
api: baseURL,
env: [],
models: {},
options: {
apiKey: "not-needed",
baseURL: baseURL,
},
},
},
}),
)
},
dispose: async () => {
await new Promise<void>((resolve) => server.close(() => resolve()))
},
})

await Instance.provide({
directory: tmp.path,
fn: async () => {
const providers = await Provider.list()
expect(providers["local-llm"]).toBeDefined()
expect(providers["local-llm"].models["model-a"]).toBeDefined()
expect(providers["local-llm"].models["model-b"]).toBeDefined()
expect(providers["local-llm"].models["model-a"].limit.output).toBe(8192)
},
})

expect(requests).toContain("/v1/models")
})

test("openai-compatible normalizes baseURL with /v1", async () => {
let requests: string[] = []
const server = http.createServer((req, res) => {
requests.push(req.url ?? "")
if (req.url === "/v1/models") {
res.writeHead(200, { "content-type": "application/json" })
res.end(JSON.stringify({ data: [{ id: "model-c" }] }))
return
}
res.writeHead(404)
res.end()
})

await new Promise<void>((resolve) => server.listen(0, "127.0.0.1", () => resolve()))
const address = server.address()
if (!address || typeof address === "string") throw new Error("server address not found")
const baseURL = `http://127.0.0.1:${address.port}/v1`

await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"local-llm": {
name: "Local LLM",
npm: "@ai-sdk/openai-compatible",
api: baseURL,
env: [],
models: {},
options: {
apiKey: "not-needed",
baseURL: baseURL,
},
},
},
}),
)
},
dispose: async () => {
await new Promise<void>((resolve) => server.close(() => resolve()))
},
})

await Instance.provide({
directory: tmp.path,
fn: async () => {
const providers = await Provider.list()
expect(providers["local-llm"].models["model-c"]).toBeDefined()
},
})

expect(requests).toContain("/v1/models")
})

test("openai-compatible discovery does not override config models", async () => {
// regression: discovered models must also resolve via Provider.getModel()
// (chat/message path uses getModel, not Provider.list())

const server = http.createServer((req, res) => {
if (req.url === "/v1/models") {
res.writeHead(200, { "content-type": "application/json" })
res.end(JSON.stringify({ data: [{ id: "model-a" }] }))
return
}
res.writeHead(404)
res.end()
})

await new Promise<void>((resolve) => server.listen(0, "127.0.0.1", () => resolve()))
const address = server.address()
if (!address || typeof address === "string") throw new Error("server address not found")
const baseURL = `http://127.0.0.1:${address.port}`

await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(
path.join(dir, "opencode.json"),
JSON.stringify({
$schema: "https://opencode.ai/config.json",
provider: {
"local-llm": {
name: "Local LLM",
npm: "@ai-sdk/openai-compatible",
api: baseURL,
env: [],
models: {
"model-a": {
name: "Configured A",
tool_call: false,
limit: {
context: 32000,
output: 2048,
},
},
},
options: {
apiKey: "not-needed",
baseURL: baseURL,
},
},
},
}),
)
},
dispose: async () => {
await new Promise<void>((resolve) => server.close(() => resolve()))
},
})

await Instance.provide({
directory: tmp.path,
fn: async () => {
const providers = await Provider.list()
const model = providers["local-llm"].models["model-a"]
expect(model.name).toBe("Configured A")
expect(model.capabilities.toolcall).toBe(false)
expect(model.limit.output).toBe(2048)

const resolved = await Provider.getModel("local-llm", "model-a")
expect(resolved.id).toBe("model-a")
},
})
})

test("custom provider with npm package", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
Expand Down