diff --git a/packages/opencode/src/auth/index.ts b/packages/opencode/src/auth/index.ts index b9c8a78caf9c..6642a07429d5 100644 --- a/packages/opencode/src/auth/index.ts +++ b/packages/opencode/src/auth/index.ts @@ -3,6 +3,8 @@ import { Global } from "../global" import fs from "fs/promises" import z from "zod" +export const OAUTH_DUMMY_KEY = "opencode-oauth-dummy-key" + export namespace Auth { export const Oauth = z .object({ diff --git a/packages/opencode/src/plugin/codex.ts b/packages/opencode/src/plugin/codex.ts new file mode 100644 index 000000000000..f098a3967171 --- /dev/null +++ b/packages/opencode/src/plugin/codex.ts @@ -0,0 +1,417 @@ +import type { Hooks, PluginInput } from "@opencode-ai/plugin" +import { Log } from "../util/log" +import { OAUTH_DUMMY_KEY } from "../auth" + +const log = Log.create({ service: "plugin.codex" }) + +const CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann" +const ISSUER = "https://auth.openai.com" +const CODEX_API_ENDPOINT = "https://chatgpt.com/backend-api/codex/responses" +const OAUTH_PORT = 1455 + +interface PkceCodes { + verifier: string + challenge: string +} + +async function generatePKCE(): Promise { + const verifier = generateRandomString(43) + const encoder = new TextEncoder() + const data = encoder.encode(verifier) + const hash = await crypto.subtle.digest("SHA-256", data) + const challenge = base64UrlEncode(hash) + return { verifier, challenge } +} + +function generateRandomString(length: number): string { + const chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" + const bytes = crypto.getRandomValues(new Uint8Array(length)) + return Array.from(bytes) + .map((b) => chars[b % chars.length]) + .join("") +} + +function base64UrlEncode(buffer: ArrayBuffer): string { + const bytes = new Uint8Array(buffer) + const binary = String.fromCharCode(...bytes) + return btoa(binary).replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "") +} + +function generateState(): string { + return base64UrlEncode(crypto.getRandomValues(new Uint8Array(32)).buffer) +} + +function buildAuthorizeUrl(redirectUri: string, pkce: PkceCodes, state: string): string { + const params = new URLSearchParams({ + response_type: "code", + client_id: CLIENT_ID, + redirect_uri: redirectUri, + scope: "openid profile email offline_access", + code_challenge: pkce.challenge, + code_challenge_method: "S256", + id_token_add_organizations: "true", + codex_cli_simplified_flow: "true", + state, + originator: "opencode", + }) + return `${ISSUER}/oauth/authorize?${params.toString()}` +} + +interface TokenResponse { + id_token: string + access_token: string + refresh_token: string + expires_in?: number +} + +async function exchangeCodeForTokens(code: string, redirectUri: string, pkce: PkceCodes): Promise { + const response = await fetch(`${ISSUER}/oauth/token`, { + method: "POST", + headers: { "Content-Type": "application/x-www-form-urlencoded" }, + body: new URLSearchParams({ + grant_type: "authorization_code", + code, + redirect_uri: redirectUri, + client_id: CLIENT_ID, + code_verifier: pkce.verifier, + }).toString(), + }) + if (!response.ok) { + throw new Error(`Token exchange failed: ${response.status}`) + } + return response.json() +} + +async function refreshAccessToken(refreshToken: string): Promise { + const response = await fetch(`${ISSUER}/oauth/token`, { + method: "POST", + headers: { "Content-Type": "application/x-www-form-urlencoded" }, + body: new URLSearchParams({ + grant_type: "refresh_token", + refresh_token: refreshToken, + client_id: CLIENT_ID, + }).toString(), + }) + if (!response.ok) { + throw new Error(`Token refresh failed: ${response.status}`) + } + return response.json() +} + +const HTML_SUCCESS = ` + + + OpenCode - Codex Authorization Successful + + + +
+

Authorization Successful

+

You can close this window and return to OpenCode.

+
+ + +` + +const HTML_ERROR = (error: string) => ` + + + OpenCode - Codex Authorization Failed + + + +
+

Authorization Failed

+

An error occurred during authorization.

+
${error}
+
+ +` + +interface PendingOAuth { + pkce: PkceCodes + state: string + resolve: (tokens: TokenResponse) => void + reject: (error: Error) => void +} + +let oauthServer: ReturnType | undefined +let pendingOAuth: PendingOAuth | undefined + +async function startOAuthServer(): Promise<{ port: number; redirectUri: string }> { + if (oauthServer) { + return { port: OAUTH_PORT, redirectUri: `http://localhost:${OAUTH_PORT}/auth/callback` } + } + + oauthServer = Bun.serve({ + port: OAUTH_PORT, + fetch(req) { + const url = new URL(req.url) + + if (url.pathname === "/auth/callback") { + const code = url.searchParams.get("code") + const state = url.searchParams.get("state") + const error = url.searchParams.get("error") + const errorDescription = url.searchParams.get("error_description") + + if (error) { + const errorMsg = errorDescription || error + pendingOAuth?.reject(new Error(errorMsg)) + pendingOAuth = undefined + return new Response(HTML_ERROR(errorMsg), { + headers: { "Content-Type": "text/html" }, + }) + } + + if (!code) { + const errorMsg = "Missing authorization code" + pendingOAuth?.reject(new Error(errorMsg)) + pendingOAuth = undefined + return new Response(HTML_ERROR(errorMsg), { + status: 400, + headers: { "Content-Type": "text/html" }, + }) + } + + if (!pendingOAuth || state !== pendingOAuth.state) { + const errorMsg = "Invalid state - potential CSRF attack" + pendingOAuth?.reject(new Error(errorMsg)) + pendingOAuth = undefined + return new Response(HTML_ERROR(errorMsg), { + status: 400, + headers: { "Content-Type": "text/html" }, + }) + } + + const current = pendingOAuth + pendingOAuth = undefined + + exchangeCodeForTokens(code, `http://localhost:${OAUTH_PORT}/auth/callback`, current.pkce) + .then((tokens) => current.resolve(tokens)) + .catch((err) => current.reject(err)) + + return new Response(HTML_SUCCESS, { + headers: { "Content-Type": "text/html" }, + }) + } + + if (url.pathname === "/cancel") { + pendingOAuth?.reject(new Error("Login cancelled")) + pendingOAuth = undefined + return new Response("Login cancelled", { status: 200 }) + } + + return new Response("Not found", { status: 404 }) + }, + }) + + log.info("codex oauth server started", { port: OAUTH_PORT }) + return { port: OAUTH_PORT, redirectUri: `http://localhost:${OAUTH_PORT}/auth/callback` } +} + +function stopOAuthServer() { + if (oauthServer) { + oauthServer.stop() + oauthServer = undefined + log.info("codex oauth server stopped") + } +} + +function waitForOAuthCallback(pkce: PkceCodes, state: string): Promise { + return new Promise((resolve, reject) => { + const timeout = setTimeout( + () => { + if (pendingOAuth) { + pendingOAuth = undefined + reject(new Error("OAuth callback timeout - authorization took too long")) + } + }, + 5 * 60 * 1000, + ) // 5 minute timeout + + pendingOAuth = { + pkce, + state, + resolve: (tokens) => { + clearTimeout(timeout) + resolve(tokens) + }, + reject: (error) => { + clearTimeout(timeout) + reject(error) + }, + } + }) +} + +export async function CodexAuthPlugin(input: PluginInput): Promise { + return { + auth: { + provider: "openai", + async loader(getAuth, provider) { + const auth = await getAuth() + if (auth.type !== "oauth") return {} + + // Filter models to only allowed Codex models for OAuth + const allowedModels = new Set(["gpt-5.1-codex-max", "gpt-5.1-codex-mini", "gpt-5.2", "gpt-5.2-codex"]) + for (const modelId of Object.keys(provider.models)) { + if (!allowedModels.has(modelId)) { + delete provider.models[modelId] + } + } + + if (!provider.models["gpt-5.2-codex"]) { + provider.models["gpt-5.2-codex"] = { + id: "gpt-5.2-codex", + providerID: "openai", + api: { + id: "gpt-5.2-codex", + url: "https://chatgpt.com/backend-api/codex", + npm: "@ai-sdk/openai", + }, + name: "GPT-5.2 Codex", + capabilities: { + temperature: false, + reasoning: true, + attachment: true, + toolcall: true, + input: { text: true, audio: false, image: true, video: false, pdf: false }, + output: { text: true, audio: false, image: false, video: false, pdf: false }, + }, + cost: { input: 0, output: 0, cache: { read: 0, write: 0 } }, + limit: { context: 400000, output: 128000 }, + status: "active", + options: {}, + headers: {}, + } + } + + // Zero out costs for Codex (included with ChatGPT subscription) + for (const model of Object.values(provider.models)) { + model.cost = { + input: 0, + output: 0, + cache: { read: 0, write: 0 }, + } + } + + return { + apiKey: OAUTH_DUMMY_KEY, + async fetch(requestInput: RequestInfo | URL, init?: RequestInit) { + // Remove dummy API key authorization header + if (init?.headers) { + if (init.headers instanceof Headers) { + init.headers.delete("authorization") + init.headers.delete("Authorization") + } else if (Array.isArray(init.headers)) { + init.headers = init.headers.filter(([key]) => key.toLowerCase() !== "authorization") + } else { + delete init.headers["authorization"] + delete init.headers["Authorization"] + } + } + + const currentAuth = await getAuth() + if (currentAuth.type !== "oauth") return fetch(requestInput, init) + + // Check if token needs refresh + if (!currentAuth.access || currentAuth.expires < Date.now()) { + log.info("refreshing codex access token") + const tokens = await refreshAccessToken(currentAuth.refresh) + await input.client.auth.set({ + path: { id: "codex" }, + body: { + type: "oauth", + refresh: tokens.refresh_token, + access: tokens.access_token, + expires: Date.now() + (tokens.expires_in ?? 3600) * 1000, + }, + }) + currentAuth.access = tokens.access_token + } + + // Build headers + const headers = new Headers() + if (init?.headers) { + if (init.headers instanceof Headers) { + init.headers.forEach((value, key) => headers.set(key, value)) + } else if (Array.isArray(init.headers)) { + for (const [key, value] of init.headers) { + if (value !== undefined) headers.set(key, String(value)) + } + } else { + for (const [key, value] of Object.entries(init.headers)) { + if (value !== undefined) headers.set(key, String(value)) + } + } + } + + // Set authorization header with access token + headers.set("authorization", `Bearer ${currentAuth.access}`) + + // Rewrite URL to Codex endpoint + let url: URL + if (typeof requestInput === "string") { + url = new URL(requestInput) + } else if (requestInput instanceof URL) { + url = requestInput + } else { + url = new URL(requestInput.url) + } + + // If this is a messages/responses request, redirect to Codex endpoint + if (url.pathname.includes("/v1/responses") || url.pathname.includes("/chat/completions")) { + url = new URL(CODEX_API_ENDPOINT) + } + + return fetch(url, { + ...init, + headers, + }) + }, + } + }, + methods: [ + { + label: "ChatGPT Pro/Plus", + type: "oauth", + authorize: async () => { + const { redirectUri } = await startOAuthServer() + const pkce = await generatePKCE() + const state = generateState() + const authUrl = buildAuthorizeUrl(redirectUri, pkce, state) + + const callbackPromise = waitForOAuthCallback(pkce, state) + + return { + url: authUrl, + instructions: "Complete authorization in your browser. This window will close automatically.", + method: "auto" as const, + callback: async () => { + const tokens = await callbackPromise + stopOAuthServer() + return { + type: "success" as const, + refresh: tokens.refresh_token, + access: tokens.access_token, + expires: Date.now() + (tokens.expires_in ?? 3600) * 1000, + } + }, + } + }, + }, + ], + }, + } +} diff --git a/packages/opencode/src/plugin/index.ts b/packages/opencode/src/plugin/index.ts index f2ee91122a7b..4912b8f74ba5 100644 --- a/packages/opencode/src/plugin/index.ts +++ b/packages/opencode/src/plugin/index.ts @@ -7,12 +7,16 @@ import { Server } from "../server/server" import { BunProc } from "../bun" import { Instance } from "../project/instance" import { Flag } from "../flag/flag" +import { CodexAuthPlugin } from "./codex" export namespace Plugin { const log = Log.create({ service: "plugin" }) const BUILTIN = ["opencode-copilot-auth@0.0.11", "opencode-anthropic-auth@0.0.8"] + // Built-in plugins that are directly imported (not installed from npm) + const INTERNAL_PLUGINS: PluginInstance[] = [CodexAuthPlugin] + const state = Instance.state(async () => { const client = createOpencodeClient({ baseUrl: "http://localhost:4096", @@ -20,7 +24,7 @@ export namespace Plugin { fetch: async (...args) => Server.App().fetch(...args), }) const config = await Config.get() - const hooks = [] + const hooks: Hooks[] = [] const input: PluginInput = { client, project: Instance.project, @@ -29,11 +33,23 @@ export namespace Plugin { serverUrl: Server.url(), $: Bun.$, } + + // Load internal plugins first + if (!Flag.OPENCODE_DISABLE_DEFAULT_PLUGINS) { + for (const plugin of INTERNAL_PLUGINS) { + log.info("loading internal plugin", { name: plugin.name }) + const init = await plugin(input) + hooks.push(init) + } + } + const plugins = [...(config.plugin ?? [])] if (!Flag.OPENCODE_DISABLE_DEFAULT_PLUGINS) { plugins.push(...BUILTIN) } for (let plugin of plugins) { + // ignore old codex plugin since it is supported first party now + if (plugin.includes("opencode-openai-codex-auth")) continue log.info("loading plugin", { path: plugin }) if (!plugin.startsWith("file://")) { const lastAtIndex = plugin.lastIndexOf("@") diff --git a/packages/opencode/src/server/server.ts b/packages/opencode/src/server/server.ts index c7baec778c62..cdf12e6fa88f 100644 --- a/packages/opencode/src/server/server.ts +++ b/packages/opencode/src/server/server.ts @@ -74,6 +74,7 @@ export namespace Server { const app = new Hono() export const App: () => Hono = lazy( () => + // TODO: Break server.ts into smaller route files to fix type inference app .onError((err, c) => { log.error("failed", { diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index 0db453a22290..c5792c16dea0 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -1,3 +1,5 @@ +import os from "os" +import { Installation } from "@/installation" import { Provider } from "@/provider/provider" import { Log } from "@/util/log" import { @@ -19,6 +21,7 @@ import { Plugin } from "@/plugin" import { SystemPrompt } from "./system" import { Flag } from "@/flag/flag" import { PermissionNext } from "@/permission/next" +import { Auth } from "@/auth" export namespace LLM { const log = Log.create({ service: "llm" }) @@ -82,12 +85,24 @@ export namespace LLM { } const provider = await Provider.getProvider(input.model.providerID) + const auth = await Auth.get(input.model.providerID) + const isCodex = provider.id === "openai" && auth?.type === "oauth" + const variant = !input.small && input.model.variants && input.user.variant ? input.model.variants[input.user.variant] : {} const base = input.small ? ProviderTransform.smallOptions(input.model) : ProviderTransform.options(input.model, input.sessionID, provider.options) - const options = pipe(base, mergeDeep(input.model.options), mergeDeep(input.agent.options), mergeDeep(variant)) + const options: Record = pipe( + base, + mergeDeep(input.model.options), + mergeDeep(input.agent.options), + mergeDeep(variant), + ) + if (isCodex) { + options.instructions = SystemPrompt.instructions() + options.store = false + } const params = await Plugin.trigger( "chat.params", @@ -108,16 +123,14 @@ export namespace LLM { }, ) - l.info("params", { - params, - }) - - const maxOutputTokens = ProviderTransform.maxOutputTokens( - input.model.api.npm, - params.options, - input.model.limit.output, - OUTPUT_TOKEN_MAX, - ) + const maxOutputTokens = isCodex + ? undefined + : ProviderTransform.maxOutputTokens( + input.model.api.npm, + params.options, + input.model.limit.output, + OUTPUT_TOKEN_MAX, + ) const tools = await resolveTools(input) @@ -157,6 +170,13 @@ export namespace LLM { maxOutputTokens, abortSignal: input.abort, headers: { + ...(isCodex + ? { + originator: "opencode", + "User-Agent": `opencode/${Installation.VERSION} (${os.platform()} ${os.release()}; ${os.arch()})`, + session_id: input.sessionID, + } + : undefined), ...(input.model.providerID.startsWith("opencode") ? { "x-opencode-project": Instance.project.id, @@ -169,12 +189,19 @@ export namespace LLM { }, maxRetries: input.retries ?? 0, messages: [ - ...system.map( - (x): ModelMessage => ({ - role: "system", - content: x, - }), - ), + ...(isCodex + ? [ + { + role: "user", + content: system.join("\n\n"), + } as ModelMessage, + ] + : system.map( + (x): ModelMessage => ({ + role: "system", + content: x, + }), + )), ...input.messages, ], model: wrapLanguageModel({ diff --git a/packages/opencode/src/session/prompt/codex_header.txt b/packages/opencode/src/session/prompt/codex_header.txt new file mode 100644 index 000000000000..70c2c6555ff3 --- /dev/null +++ b/packages/opencode/src/session/prompt/codex_header.txt @@ -0,0 +1 @@ +You are a coding agent running in the opencode, a terminal-based coding assistant. opencode is an open source project. You are expected to be precise, safe, and helpful. diff --git a/packages/opencode/src/session/system.ts b/packages/opencode/src/session/system.ts index fe8c32f0323b..fff90808864b 100644 --- a/packages/opencode/src/session/system.ts +++ b/packages/opencode/src/session/system.ts @@ -14,6 +14,7 @@ import PROMPT_GEMINI from "./prompt/gemini.txt" import PROMPT_ANTHROPIC_SPOOF from "./prompt/anthropic_spoof.txt" import PROMPT_CODEX from "./prompt/codex.txt" +import PROMPT_CODEX_INSTRUCTIONS from "./prompt/codex_header.txt" import type { Provider } from "@/provider/provider" import { Flag } from "@/flag/flag" @@ -23,6 +24,10 @@ export namespace SystemPrompt { return [] } + export function instructions() { + return PROMPT_CODEX_INSTRUCTIONS.trim() + } + export function provider(model: Provider.Model) { if (model.api.id.includes("gpt-5")) return [PROMPT_CODEX] if (model.api.id.includes("gpt-") || model.api.id.includes("o1") || model.api.id.includes("o3"))