diff --git a/messages/en/settings.json b/messages/en/settings.json index 23541ad95..f77fe5fd0 100644 --- a/messages/en/settings.json +++ b/messages/en/settings.json @@ -788,6 +788,28 @@ "openai": "OpenAI", "gemini": "Gemini" }, + "fetchModels": { + "fetchModels": "Fetch Models", + "fetching": "Fetching...", + "fetchSuccess": "Fetch Success", + "fetchFailed": "Fetch Failed", + "fetchedCount": "Fetched {count}", + "modelsFound": "Found {count} available models", + "noModelsFound": "No models found", + "fillUrlFirst": "Please fill in provider URL first", + "invalidUrl": "Invalid provider URL, only http/https supported", + "fillKeyFirst": "Please fill in API key first", + "errors": { + "noPermission": "You don't have permission to fetch models.", + "invalidUrl": "Provider URL is invalid{reason}.", + "invalidProxy": "Proxy URL is invalid. Allowed formats: http://, https://, socks5://, socks4://", + "httpError": "Fetching models failed: HTTP {status}{detail}", + "empty": "No models returned by the provider.", + "timeout": "Request timed out. Please check the network or provider address.", + "jsonCredsNotSupported": "Gemini JSON credentials are not supported for fetching models. Please use an API key.", + "unknown": "Failed to fetch models. Please try again." + } + }, "modelRedirect": { "currentRules": "Current Rules ({count})", "addNewRule": "Add New Rule", diff --git a/messages/zh-CN/settings.json b/messages/zh-CN/settings.json index 7fd6f8212..3612088de 100644 --- a/messages/zh-CN/settings.json +++ b/messages/zh-CN/settings.json @@ -393,6 +393,28 @@ "openai": "OpenAI", "gemini": "Gemini" }, + "fetchModels": { + "fetchModels": "获取模型", + "fetching": "获取中...", + "fetchSuccess": "获取成功", + "fetchFailed": "获取失败", + "fetchedCount": "已获取 {count} 个", + "modelsFound": "找到 {count} 个可用模型", + "noModelsFound": "未找到可用模型", + "fillUrlFirst": "请先填写供应商 URL", + "invalidUrl": "供应商 URL 无效,仅支持 http/https", + "fillKeyFirst": "请先填写 API 密钥", + "errors": { + "noPermission": "当前账号无权限获取模型列表", + "invalidUrl": "供应商 URL 无效{reason}", + "invalidProxy": "代理地址格式无效,仅支持 http://、https://、socks5://、socks4://", + "httpError": "获取模型失败:HTTP {status}{detail}", + "empty": "供应商没有返回任何模型", + "timeout": "请求超时,请检查网络连接或供应商地址", + "jsonCredsNotSupported": "暂不支持使用 Gemini JSON 凭证获取模型,请使用 API Key", + "unknown": "获取模型失败,请稍后再试" + } + }, "modelRedirect": { "currentRules": "当前规则 ({count})", "addNewRule": "添加新规则", diff --git a/src/actions/providers.ts b/src/actions/providers.ts index 62aae4c8e..ffcec8b08 100644 --- a/src/actions/providers.ts +++ b/src/actions/providers.ts @@ -2449,7 +2449,7 @@ export async function testProviderUnified(data: UnifiedTestArgs): Promise; +}; + +/** + * Gemini 模型列表响应类型 + */ +type GeminiModelsResponse = { + models: Array<{ + name: string; + displayName?: string; + description?: string; + supportedGenerationMethods?: string[]; + }>; +}; + +/** + * 从供应商 API 获取可用模型列表 + * + * 支持: + * - OpenAI Compatible / Codex: GET /v1/models + * - Claude / Claude-Auth: GET /v1/models (部分中继服务支持) + * - Gemini / Gemini-CLI: GET /v1beta/models + */ +export async function fetchProviderModels( + data: FetchModelsArgs +): Promise> { + const session = await getSession(); + if (!session || session.user.role !== "admin") { + return { + ok: false, + error: "无权限执行此操作", + errorCode: "FETCH_MODELS_NO_PERMISSION", + }; + } + + // 验证 URL + const urlValidation = validateProviderUrlForConnectivity(data.providerUrl); + if (!urlValidation.valid) { + return { + ok: false, + error: urlValidation.error.message, + errorCode: "FETCH_MODELS_INVALID_URL", + errorParams: { + reason: urlValidation.error.details?.error ?? "", + }, + }; + } + + // 如果提供了代理 URL,验证代理 URL + if (data.proxyUrl && !isValidProxyUrl(data.proxyUrl)) { + return { + ok: false, + error: "代理地址格式无效,支持格式: http://, https://, socks5://, socks4://", + errorCode: "FETCH_MODELS_INVALID_PROXY", + }; + } + + const isGeminiProvider = data.providerType === "gemini" || data.providerType === "gemini-cli"; + if (isGeminiProvider && data.apiKey.trim().startsWith("{")) { + return { + ok: false, + error: "Gemini JSON 凭证暂不支持获取模型列表,请使用 API Key", + errorCode: "FETCH_MODELS_GEMINI_JSON_CREDS", + }; + } + + const normalizedUrl = urlValidation.normalizedUrl.replace(/\/$/, ""); + + try { + // 根据供应商类型构建请求配置 + const { endpoint, headers } = getModelsApiConfig( + data.providerType, + data.apiKey, + normalizedUrl + ); + + const url = normalizedUrl + endpoint; + + // 如需要,创建代理代理 + const tempProvider: ProviderProxyConfig = { + id: -1, + name: "fetch-models", + proxyUrl: data.proxyUrl ?? null, + proxyFallbackToDirect: data.proxyFallbackToDirect ?? false, + }; + + const proxyConfig = createProxyAgentForProvider(tempProvider, url); + + interface UndiciFetchOptions extends RequestInit { + dispatcher?: unknown; + } + + const init: UndiciFetchOptions = { + method: "GET", + headers: { + ...headers, + Accept: "application/json", + }, + signal: AbortSignal.timeout(API_TEST_CONFIG.TIMEOUT_MS), + }; + + if (proxyConfig) { + init.dispatcher = proxyConfig.agent; + } + + logger.debug("fetchProviderModels: Fetching models", { + providerType: data.providerType, + endpoint, + url: url.replace(/key=([^&]*)/gi, "key=***").replace(/:\/\/[^@]*@/, "://***@"), + }); + + const response = await fetch(url, init); + + if (!response.ok) { + const errorText = await response.text(); + let errorDetail: string | undefined; + + try { + const errorJson = JSON.parse(errorText); + errorDetail = extractErrorMessage(errorJson); + } catch { + errorDetail = errorText.substring(0, 200); + } + + logger.warn("fetchProviderModels: API error", { + status: response.status, + errorDetail, + }); + + return { + ok: false, + error: `获取模型列表失败: HTTP ${response.status}${errorDetail ? ` - ${errorDetail}` : ""}`, + errorCode: "FETCH_MODELS_HTTP_ERROR", + errorParams: { + status: String(response.status), + detail: errorDetail ?? "", + }, + }; + } + + const responseData = await response.json(); + + // 根据供应商类型解析模型 + const models = parseModelsResponse(data.providerType, responseData); + + if (models.length === 0) { + return { + ok: false, + error: "未找到可用模型", + errorCode: "FETCH_MODELS_EMPTY", + }; + } + + logger.info("fetchProviderModels: Success", { + providerType: data.providerType, + modelCount: models.length, + }); + + return { + ok: true, + data: { models }, + }; + } catch (error) { + if (error instanceof Error && isClientAbortError(error)) { + logger.warn("fetchProviderModels timeout", { + providerType: data.providerType, + error: error.message, + }); + return { + ok: false, + error: "请求超时,请检查网络连接或供应商地址", + errorCode: "FETCH_MODELS_TIMEOUT", + }; + } + + logger.error("fetchProviderModels error", { + error: error instanceof Error ? error.message : String(error), + }); + + return { + ok: false, + error: error instanceof Error ? error.message : "获取模型列表失败", + errorCode: "FETCH_MODELS_UNKNOWN", + }; + } +} + +/** + * 根据供应商类型获取模型 API 配置 + */ +function getModelsApiConfig( + providerType: ProviderType, + apiKey: string, + providerUrl: string +): { endpoint: string; headers: Record } { + switch (providerType) { + case "gemini": + case "gemini-cli": { + // Gemini 使用 /v1beta/models 端点 + return { + endpoint: "/v1beta/models", + headers: { + "x-goog-api-key": apiKey, + }, + }; + } + + case "claude": + case "claude-auth": { + // Claude 中继服务可能支持 /v1/models + const hostname = getHostnameFromUrl(providerUrl); + const isOfficialAnthropic = hostname + ? hostname.endsWith("anthropic.com") || hostname.endsWith("claude.ai") + : false; + + const headers: Record = { + "anthropic-version": "2023-06-01", + }; + + if (isOfficialAnthropic) { + headers["x-api-key"] = apiKey; + } else { + // 对于中继服务,尝试两种认证方法 + headers["x-api-key"] = apiKey; + headers["Authorization"] = `Bearer ${apiKey}`; + } + + return { + endpoint: "/v1/models", + headers, + }; + } + + case "openai-compatible": + case "codex": + default: { + // OpenAI 兼容服务使用 /v1/models + return { + endpoint: "/v1/models", + headers: { + Authorization: `Bearer ${apiKey}`, + }, + }; + } + } +} + +/** + * 根据供应商类型解析模型响应 + */ +function parseModelsResponse(providerType: ProviderType, responseData: unknown): string[] { + if (Array.isArray(responseData)) { + return (responseData as unknown[]) + .map((item) => { + if (typeof item === "string") return item; + if (typeof item === "object" && item !== null) { + const obj = item as Record; + return (obj.id || obj.name || obj.model) as string; + } + return ""; + }) + .filter((name) => typeof name === "string" && name.length > 0) + .sort(); + } + + if (!responseData || typeof responseData !== "object") { + return []; + } + + const data = responseData as Record; + + // Gemini 格式: { models: [{ name: "models/gemini-pro", ... }] } + if (providerType === "gemini" || providerType === "gemini-cli") { + const geminiResponse = data as GeminiModelsResponse; + if (Array.isArray(geminiResponse.models)) { + return geminiResponse.models + .map((model) => { + // Gemini 模型名称类似于 "models/gemini-pro",仅提取模型名称 + const name = model.name || ""; + return name.startsWith("models/") ? name.slice(7) : name; + }) + .filter((name) => name.length > 0) + .sort(); + } + return []; + } + + // OpenAI 格式: { object: "list", data: [{ id: "gpt-4", ... }] } + const openaiResponse = data as OpenAIModelsResponse; + if (Array.isArray(openaiResponse.data)) { + return openaiResponse.data + .map((model) => model.id) + .filter((id) => typeof id === "string" && id.length > 0) + .sort(); + } + + // 尝试处理其他格式 + // 部分供应商返回 { models: ["model1", "model2"] } 或 { models: [{id: "model1"}, ...] } + if (Array.isArray(data.models)) { + return (data.models as unknown[]) + .map((m) => { + if (typeof m === "string") return m; + if (m && typeof m === "object" && "id" in m) return (m as { id: string }).id; + if (m && typeof m === "object" && "name" in m) return (m as { name: string }).name; + return null; + }) + .filter((id): id is string => typeof id === "string" && id.length > 0) + .sort(); + } + + return []; +} diff --git a/src/app/[locale]/settings/providers/_components/forms/fetch-models-button.tsx b/src/app/[locale]/settings/providers/_components/forms/fetch-models-button.tsx new file mode 100644 index 000000000..b4b35f60a --- /dev/null +++ b/src/app/[locale]/settings/providers/_components/forms/fetch-models-button.tsx @@ -0,0 +1,229 @@ +"use client"; + +import { useEffect, useRef, useState } from "react"; +import { Button } from "@/components/ui/button"; +import { Loader2, Download, CheckCircle2, XCircle } from "lucide-react"; +import { fetchProviderModels, getUnmaskedProviderKey } from "@/actions/providers"; +import { toast } from "sonner"; +import { useTranslations } from "next-intl"; +import { isValidUrl } from "@/lib/utils/validation"; +import type { ProviderType } from "@/types/provider"; + +interface FetchModelsButtonProps { + providerUrl: string; + apiKey: string; + providerType: ProviderType; + proxyUrl?: string | null; + proxyFallbackToDirect?: boolean; + disabled?: boolean; + providerId?: number; + onModelsLoaded: (models: string[]) => void; +} + +type FetchStatus = "idle" | "loading" | "success" | "error"; +type FetchModelsResponse = Awaited>; +type FetchModelsErrorResponse = Extract; + +/** + * 从 provider API 获取可用模型的 Button 组件 + * + * 从 provider 的 /models 端点获取模型列表,并通过 onModelsLoaded + * callback 将结果传递给父组件。 + */ +export function FetchModelsButton({ + providerUrl, + apiKey, + providerType, + proxyUrl, + proxyFallbackToDirect = false, + disabled = false, + providerId, + onModelsLoaded, +}: FetchModelsButtonProps) { + const t = useTranslations("settings.providers.form.fetchModels"); + const [status, setStatus] = useState("idle"); + const [lastFetchCount, setLastFetchCount] = useState(0); + const timeoutRefs = useRef>>([]); + + const scheduleStatusReset = () => { + const timer = setTimeout(() => { + setStatus("idle"); + timeoutRefs.current = timeoutRefs.current.filter((item) => item !== timer); + }, 3000); + timeoutRefs.current.push(timer); + }; + + useEffect(() => { + return () => { + timeoutRefs.current.forEach((timer) => clearTimeout(timer)); + timeoutRefs.current = []; + }; + }, []); + + const getFetchErrorMessage = (result: FetchModelsErrorResponse): string => { + if (!result.errorCode) { + return result.error || t("fetchFailed"); + } + + switch (result.errorCode) { + case "FETCH_MODELS_NO_PERMISSION": + return t("errors.noPermission"); + case "FETCH_MODELS_INVALID_URL": { + const reason = result.errorParams?.reason ? ` (${result.errorParams.reason})` : ""; + return t("errors.invalidUrl", { reason }); + } + case "FETCH_MODELS_INVALID_PROXY": + return t("errors.invalidProxy"); + case "FETCH_MODELS_HTTP_ERROR": { + const status = result.errorParams?.status ?? "unknown"; + const detail = result.errorParams?.detail ? ` - ${result.errorParams.detail}` : ""; + return t("errors.httpError", { status, detail }); + } + case "FETCH_MODELS_EMPTY": + return t("errors.empty"); + case "FETCH_MODELS_TIMEOUT": + return t("errors.timeout"); + case "FETCH_MODELS_GEMINI_JSON_CREDS": + return t("errors.jsonCredsNotSupported"); + case "FETCH_MODELS_UNKNOWN": + return t("errors.unknown"); + default: + return result.error || t("fetchFailed"); + } + }; + + const handleFetch = async () => { + // 验证 URL + if (!providerUrl.trim()) { + toast.error(t("fillUrlFirst")); + return; + } + + if (!isValidUrl(providerUrl.trim()) || !/^https?:\/\//.test(providerUrl.trim())) { + toast.error(t("invalidUrl")); + return; + } + + setStatus("loading"); + + try { + // 解析 API key:优先使用表单输入,如果提供了 providerId 则回退到数据库 + let resolvedKey = apiKey.trim(); + + if (!resolvedKey && providerId) { + const result = await getUnmaskedProviderKey(providerId); + if (!result.ok) { + toast.error(result.error || t("fillKeyFirst")); + setStatus("error"); + scheduleStatusReset(); + return; + } + + if (!result.data?.key) { + toast.error(t("fillKeyFirst")); + setStatus("error"); + scheduleStatusReset(); + return; + } + + resolvedKey = result.data.key; + } + + if (!resolvedKey) { + toast.error(t("fillKeyFirst")); + setStatus("error"); + scheduleStatusReset(); + return; + } + + // 从 provider 获取模型 + const response = await fetchProviderModels({ + providerUrl: providerUrl.trim(), + apiKey: resolvedKey, + providerType, + proxyUrl: proxyUrl?.trim() || null, + proxyFallbackToDirect, + }); + + if (!response.ok) { + toast.error(getFetchErrorMessage(response)); + setStatus("error"); + scheduleStatusReset(); + return; + } + + if (!response.data?.models || response.data.models.length === 0) { + toast.warning(t("noModelsFound")); + setStatus("success"); + setLastFetchCount(0); + scheduleStatusReset(); + return; + } + + const { models } = response.data; + setLastFetchCount(models.length); + setStatus("success"); + + // 通知父组件 + onModelsLoaded(models); + + toast.success(t("fetchSuccess"), { + description: t("modelsFound", { count: models.length }), + }); + + // 3 秒后重置状态 + scheduleStatusReset(); + } catch (error) { + console.error("Fetch models failed:", error); + toast.error(t("fetchFailed")); + setStatus("error"); + scheduleStatusReset(); + } + }; + + const getButtonContent = () => { + switch (status) { + case "loading": + return ( + <> + + {t("fetching")} + + ); + case "success": + return ( + <> + + {t("fetchedCount", { count: lastFetchCount })} + + ); + case "error": + return ( + <> + + {t("fetchFailed")} + + ); + default: + return ( + <> + + {t("fetchModels")} + + ); + } + }; + + return ( + + ); +} diff --git a/src/app/[locale]/settings/providers/_components/forms/provider-form.tsx b/src/app/[locale]/settings/providers/_components/forms/provider-form.tsx index d48c8f451..81a4edc75 100644 --- a/src/app/[locale]/settings/providers/_components/forms/provider-form.tsx +++ b/src/app/[locale]/settings/providers/_components/forms/provider-form.tsx @@ -39,6 +39,7 @@ import { ModelMultiSelect } from "../model-multi-select"; import { ModelRedirectEditor } from "../model-redirect-editor"; import { ProxyTestButton } from "./proxy-test-button"; import { ApiTestButton } from "./api-test-button"; +import { FetchModelsButton } from "./fetch-models-button"; import { UrlPreview } from "./url-preview"; import { ChevronDown } from "lucide-react"; import { useTranslations } from "next-intl"; @@ -609,7 +610,10 @@ export function ProviderForm({