diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js index b678880b0d9..77949dacd3c 100644 --- a/api/cache/getLogStores.js +++ b/api/cache/getLogStores.js @@ -1,7 +1,8 @@ const Keyv = require('keyv'); const keyvMongo = require('./keyvMongo'); const keyvRedis = require('./keyvRedis'); -const { math, isEnabled } = require('../server/utils'); +const { CacheKeys } = require('~/common/enums'); +const { math, isEnabled } = require('~/server/utils'); const { logFile, violationFile } = require('./keyvFiles'); const { BAN_DURATION, USE_REDIS } = process.env ?? {}; @@ -17,7 +18,12 @@ const pending_req = isEnabled(USE_REDIS) ? new Keyv({ store: keyvRedis }) : new Keyv({ namespace: 'pending_req' }); +const config = isEnabled(USE_REDIS) + ? new Keyv({ store: keyvRedis }) + : new Keyv({ namespace: CacheKeys.CONFIG }); + const namespaces = { + config, pending_req, ban: new Keyv({ store: keyvMongo, namespace: 'bans', ttl: duration }), general: new Keyv({ store: logFile, namespace: 'violations' }), diff --git a/api/common/enums.js b/api/common/enums.js new file mode 100644 index 00000000000..dc1c757b2f1 --- /dev/null +++ b/api/common/enums.js @@ -0,0 +1,15 @@ +/** + * @typedef {Object} CacheKeys + * @property {'config'} CONFIG - Key for the config cache. + * @property {'modelsConfig'} MODELS_CONFIG - Key for the model config cache. + * @property {'defaultConfig'} DEFAULT_CONFIG - Key for the default config cache. + * @property {'overrideConfig'} OVERRIDE_CONFIG - Key for the override config cache. + */ +const CacheKeys = { + CONFIG: 'config', + MODELS_CONFIG: 'modelsConfig', + DEFAULT_CONFIG: 'defaultConfig', + OVERRIDE_CONFIG: 'overrideConfig', +}; + +module.exports = { CacheKeys }; diff --git a/api/server/controllers/EndpointController.js b/api/server/controllers/EndpointController.js index 7ace52510a3..0cc21f96ac3 100644 --- a/api/server/controllers/EndpointController.js +++ b/api/server/controllers/EndpointController.js @@ -1,95 +1,17 @@ -const { EModelEndpoint } = require('~/server/routes/endpoints/schemas'); -const { availableTools } = require('~/app/clients/tools'); -const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs'); -const { - openAIApiKey, - azureOpenAIApiKey, - useAzurePlugins, - userProvidedOpenAI, - palmKey, - openAI, - // assistant, - azureOpenAI, - bingAI, - chatGPTBrowser, - anthropic, -} = require('~/server/services/EndpointService').config; +const { getLogStores } = require('~/cache'); +const { CacheKeys } = require('~/common/enums'); +const { loadDefaultEndpointsConfig } = require('~/server/services/Config'); -let i = 0; async function endpointController(req, res) { - let key, palmUser; - try { - key = require('~/data/auth.json'); - } catch (e) { - if (i === 0) { - i++; - } + const cache = getLogStores(CacheKeys.CONFIG); + const config = await cache.get(CacheKeys.DEFAULT_CONFIG); + if (config) { + res.send(config); + return; } - - if (palmKey === 'user_provided') { - palmUser = true; - if (i <= 1) { - i++; - } - } - - const tools = await addOpenAPISpecs(availableTools); - function transformToolsToMap(tools) { - return tools.reduce((map, obj) => { - map[obj.pluginKey] = obj.name; - return map; - }, {}); - } - const plugins = transformToolsToMap(tools); - - const google = key || palmUser ? { userProvide: palmUser } : false; - - const gptPlugins = - openAIApiKey || azureOpenAIApiKey - ? { - plugins, - availableAgents: ['classic', 'functions'], - userProvide: userProvidedOpenAI, - azure: useAzurePlugins, - } - : false; - - let enabledEndpoints = [ - EModelEndpoint.openAI, - EModelEndpoint.azureOpenAI, - EModelEndpoint.google, - EModelEndpoint.bingAI, - EModelEndpoint.chatGPTBrowser, - EModelEndpoint.gptPlugins, - EModelEndpoint.anthropic, - ]; - - const endpointsEnv = process.env.ENDPOINTS || ''; - if (endpointsEnv) { - enabledEndpoints = endpointsEnv - .split(',') - .filter((endpoint) => endpoint?.trim()) - .map((endpoint) => endpoint.trim()); - } - - const endpointConfig = { - [EModelEndpoint.openAI]: openAI, - [EModelEndpoint.azureOpenAI]: azureOpenAI, - [EModelEndpoint.google]: google, - [EModelEndpoint.bingAI]: bingAI, - [EModelEndpoint.chatGPTBrowser]: chatGPTBrowser, - [EModelEndpoint.gptPlugins]: gptPlugins, - [EModelEndpoint.anthropic]: anthropic, - }; - - const orderedAndFilteredEndpoints = enabledEndpoints.reduce((config, key, index) => { - if (endpointConfig[key]) { - config[key] = { ...(endpointConfig[key] ?? {}), order: index }; - } - return config; - }, {}); - - res.send(JSON.stringify(orderedAndFilteredEndpoints)); + const defaultConfig = await loadDefaultEndpointsConfig(); + await cache.set(CacheKeys.DEFAULT_CONFIG, defaultConfig); + res.send(JSON.stringify(defaultConfig)); } module.exports = endpointController; diff --git a/api/server/controllers/ModelController.js b/api/server/controllers/ModelController.js index 2b683a6e598..61ca82ecf03 100644 --- a/api/server/controllers/ModelController.js +++ b/api/server/controllers/ModelController.js @@ -1,35 +1,17 @@ -const { EModelEndpoint } = require('../routes/endpoints/schemas'); -const { - getOpenAIModels, - getChatGPTBrowserModels, - getAnthropicModels, -} = require('../services/ModelService'); - -const { useAzurePlugins } = require('../services/EndpointService').config; - -const fitlerAssistantModels = (str) => { - return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str); -}; +const { getLogStores } = require('~/cache'); +const { CacheKeys } = require('~/common/enums'); +const { loadDefaultModels } = require('~/server/services/Config'); async function modelController(req, res) { - const openAI = await getOpenAIModels(); - const azureOpenAI = await getOpenAIModels({ azure: true }); - const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true }); - const chatGPTBrowser = getChatGPTBrowserModels(); - const anthropic = getAnthropicModels(); - - res.send( - JSON.stringify({ - [EModelEndpoint.openAI]: openAI, - [EModelEndpoint.azureOpenAI]: azureOpenAI, - [EModelEndpoint.assistant]: openAI.filter(fitlerAssistantModels), - [EModelEndpoint.google]: ['chat-bison', 'text-bison', 'codechat-bison'], - [EModelEndpoint.bingAI]: ['BingAI', 'Sydney'], - [EModelEndpoint.chatGPTBrowser]: chatGPTBrowser, - [EModelEndpoint.gptPlugins]: gptPlugins, - [EModelEndpoint.anthropic]: anthropic, - }), - ); + const cache = getLogStores(CacheKeys.CONFIG); + let modelConfig = await cache.get(CacheKeys.MODELS_CONFIG); + if (modelConfig) { + res.send(modelConfig); + return; + } + modelConfig = await loadDefaultModels(); + await cache.set(CacheKeys.MODELS_CONFIG, modelConfig); + res.send(modelConfig); } module.exports = modelController; diff --git a/api/server/controllers/OverrideController.js b/api/server/controllers/OverrideController.js new file mode 100644 index 00000000000..0abd27a7a24 --- /dev/null +++ b/api/server/controllers/OverrideController.js @@ -0,0 +1,27 @@ +const { getLogStores } = require('~/cache'); +const { CacheKeys } = require('~/common/enums'); +const { loadOverrideConfig } = require('~/server/services/Config'); + +async function overrideController(req, res) { + const cache = getLogStores(CacheKeys.CONFIG); + let overrideConfig = await cache.get(CacheKeys.OVERRIDE_CONFIG); + if (overrideConfig) { + res.send(overrideConfig); + return; + } else if (overrideConfig === false) { + res.send(false); + return; + } + overrideConfig = await loadOverrideConfig(); + const { endpointsConfig, modelsConfig } = overrideConfig; + if (endpointsConfig) { + await cache.set(CacheKeys.DEFAULT_CONFIG, endpointsConfig); + } + if (modelsConfig) { + await cache.set(CacheKeys.MODELS_CONFIG, modelsConfig); + } + await cache.set(CacheKeys.OVERRIDE_CONFIG, overrideConfig); + res.send(JSON.stringify(overrideConfig)); +} + +module.exports = overrideController; diff --git a/api/server/routes/endpoints.js b/api/server/routes/endpoints.js index a75c2e2f959..5e4405faa95 100644 --- a/api/server/routes/endpoints.js +++ b/api/server/routes/endpoints.js @@ -1,7 +1,9 @@ const express = require('express'); const router = express.Router(); -const endpointController = require('../controllers/EndpointController'); +const endpointController = require('~/server/controllers/EndpointController'); +const overrideController = require('~/server/controllers/OverrideController'); router.get('/', endpointController); +router.get('/config/override', overrideController); module.exports = router; diff --git a/api/server/services/EndpointService.js b/api/server/services/Config/EndpointService.js similarity index 93% rename from api/server/services/EndpointService.js rename to api/server/services/Config/EndpointService.js index ddcc65e9458..a4fb93da4c3 100644 --- a/api/server/services/EndpointService.js +++ b/api/server/services/Config/EndpointService.js @@ -1,4 +1,4 @@ -const { EModelEndpoint } = require('../routes/endpoints/schemas'); +const { EModelEndpoint } = require('~/server/routes/endpoints/schemas'); const { OPENAI_API_KEY: openAIApiKey, diff --git a/api/server/services/Config/index.js b/api/server/services/Config/index.js new file mode 100644 index 00000000000..13cbc09f3b3 --- /dev/null +++ b/api/server/services/Config/index.js @@ -0,0 +1,13 @@ +const { config } = require('./EndpointService'); +const loadDefaultModels = require('./loadDefaultModels'); +const loadOverrideConfig = require('./loadOverrideConfig'); +const loadAsyncEndpoints = require('./loadAsyncEndpoints'); +const loadDefaultEndpointsConfig = require('./loadDefaultEConfig'); + +module.exports = { + config, + loadDefaultModels, + loadOverrideConfig, + loadAsyncEndpoints, + loadDefaultEndpointsConfig, +}; diff --git a/api/server/services/Config/loadAsyncEndpoints.js b/api/server/services/Config/loadAsyncEndpoints.js new file mode 100644 index 00000000000..fc54497498c --- /dev/null +++ b/api/server/services/Config/loadAsyncEndpoints.js @@ -0,0 +1,51 @@ +const { availableTools } = require('~/app/clients/tools'); +const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs'); +const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, palmKey } = + require('./EndpointService').config; + +/** + * Load async endpoints and return a configuration object + */ +async function loadAsyncEndpoints() { + let i = 0; + let key, palmUser; + try { + key = require('~/data/auth.json'); + } catch (e) { + if (i === 0) { + i++; + } + } + + if (palmKey === 'user_provided') { + palmUser = true; + if (i <= 1) { + i++; + } + } + + const tools = await addOpenAPISpecs(availableTools); + function transformToolsToMap(tools) { + return tools.reduce((map, obj) => { + map[obj.pluginKey] = obj.name; + return map; + }, {}); + } + const plugins = transformToolsToMap(tools); + + const google = key || palmUser ? { userProvide: palmUser } : false; + + const gptPlugins = + openAIApiKey || azureOpenAIApiKey + ? { + plugins, + availableAgents: ['classic', 'functions'], + userProvide: userProvidedOpenAI, + azure: useAzurePlugins, + } + : false; + + return { google, gptPlugins }; +} + +module.exports = loadAsyncEndpoints; diff --git a/api/server/services/Config/loadDefaultEConfig.js b/api/server/services/Config/loadDefaultEConfig.js new file mode 100644 index 00000000000..833fcf34b5a --- /dev/null +++ b/api/server/services/Config/loadDefaultEConfig.js @@ -0,0 +1,52 @@ +const { EModelEndpoint } = require('~/server/routes/endpoints/schemas'); +const loadAsyncEndpoints = require('./loadAsyncEndpoints'); +const { config } = require('./EndpointService'); + +/** + * Load async endpoints and return a configuration object + * @function loadDefaultEndpointsConfig + * @returns {Promise>} An object whose keys are endpoint names and values are objects that contain the endpoint configuration and an order. + */ +async function loadDefaultEndpointsConfig() { + const { google, gptPlugins } = await loadAsyncEndpoints(); + const { openAI, bingAI, anthropic, azureOpenAI, chatGPTBrowser } = config; + + let enabledEndpoints = [ + EModelEndpoint.openAI, + EModelEndpoint.azureOpenAI, + EModelEndpoint.google, + EModelEndpoint.bingAI, + EModelEndpoint.chatGPTBrowser, + EModelEndpoint.gptPlugins, + EModelEndpoint.anthropic, + ]; + + const endpointsEnv = process.env.ENDPOINTS || ''; + if (endpointsEnv) { + enabledEndpoints = endpointsEnv + .split(',') + .filter((endpoint) => endpoint?.trim()) + .map((endpoint) => endpoint.trim()); + } + + const endpointConfig = { + [EModelEndpoint.openAI]: openAI, + [EModelEndpoint.azureOpenAI]: azureOpenAI, + [EModelEndpoint.google]: google, + [EModelEndpoint.bingAI]: bingAI, + [EModelEndpoint.chatGPTBrowser]: chatGPTBrowser, + [EModelEndpoint.gptPlugins]: gptPlugins, + [EModelEndpoint.anthropic]: anthropic, + }; + + const orderedAndFilteredEndpoints = enabledEndpoints.reduce((config, key, index) => { + if (endpointConfig[key]) { + config[key] = { ...(endpointConfig[key] ?? {}), order: index }; + } + return config; + }, {}); + + return orderedAndFilteredEndpoints; +} + +module.exports = loadDefaultEndpointsConfig; diff --git a/api/server/services/Config/loadDefaultModels.js b/api/server/services/Config/loadDefaultModels.js new file mode 100644 index 00000000000..41a1bac68cc --- /dev/null +++ b/api/server/services/Config/loadDefaultModels.js @@ -0,0 +1,32 @@ +const { + getOpenAIModels, + getChatGPTBrowserModels, + getAnthropicModels, +} = require('~/server/services/ModelService'); +const { EModelEndpoint } = require('~/server/routes/endpoints/schemas'); +const { useAzurePlugins } = require('~/server/services/Config/EndpointService').config; + +const fitlerAssistantModels = (str) => { + return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str); +}; + +async function loadDefaultModels() { + const openAI = await getOpenAIModels(); + const anthropic = getAnthropicModels(); + const chatGPTBrowser = getChatGPTBrowserModels(); + const azureOpenAI = await getOpenAIModels({ azure: true }); + const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true }); + + return { + [EModelEndpoint.openAI]: openAI, + [EModelEndpoint.azureOpenAI]: azureOpenAI, + [EModelEndpoint.assistant]: openAI.filter(fitlerAssistantModels), + [EModelEndpoint.google]: ['chat-bison', 'text-bison', 'codechat-bison'], + [EModelEndpoint.bingAI]: ['BingAI', 'Sydney'], + [EModelEndpoint.chatGPTBrowser]: chatGPTBrowser, + [EModelEndpoint.gptPlugins]: gptPlugins, + [EModelEndpoint.anthropic]: anthropic, + }; +} + +module.exports = loadDefaultModels; diff --git a/api/server/services/Config/loadOverrideConfig.js b/api/server/services/Config/loadOverrideConfig.js new file mode 100644 index 00000000000..1a90e814f56 --- /dev/null +++ b/api/server/services/Config/loadOverrideConfig.js @@ -0,0 +1,6 @@ +// fetch some remote config +async function loadOverrideConfig() { + return false; +} + +module.exports = loadOverrideConfig; diff --git a/api/server/services/ModelService.js b/api/server/services/ModelService.js index d103bd1c170..009e9c65937 100644 --- a/api/server/services/ModelService.js +++ b/api/server/services/ModelService.js @@ -1,11 +1,11 @@ const HttpsProxyAgent = require('https-proxy-agent'); const axios = require('axios'); const Keyv = require('keyv'); -const { isEnabled } = require('../utils'); -const { extractBaseURL } = require('../../utils'); -const keyvRedis = require('../../cache/keyvRedis'); -// const { getAzureCredentials, genAzureChatCompletion } = require('../../utils/'); -const { openAIApiKey, userProvidedOpenAI } = require('./EndpointService').config; +const { isEnabled } = require('~/server/utils'); +const { extractBaseURL } = require('~/utils'); +const keyvRedis = require('~/cache/keyvRedis'); +// const { getAzureCredentials, genAzureChatCompletion } = require('~/utils/'); +const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService').config; const modelsCache = isEnabled(process.env.USE_REDIS) ? new Keyv({ store: keyvRedis }) diff --git a/api/typedefs.js b/api/typedefs.js index f91350752d5..d1796f8051d 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -239,3 +239,68 @@ * @typedef {AgentAction & { toolCallId: string; run_id: string; thread_id: string; }} OpenAIAssistantAction * @memberof typedefs */ + +/** + * @exports EndpointServiceConfig + * @typedef {Object} EndpointServiceConfig + * @property {string} openAIApiKey - The API key for OpenAI. + * @property {string} azureOpenAIApiKey - The API key for Azure OpenAI. + * @property {boolean} useAzurePlugins - Flag to indicate if Azure plugins are used. + * @property {boolean} userProvidedOpenAI - Flag to indicate if OpenAI API key is user provided. + * @property {string} palmKey - The Palm key. + * @property {boolean|{userProvide: boolean}} [openAI] - Flag to indicate if OpenAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [assistant] - Flag to indicate if Assistant endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [azureOpenAI] - Flag to indicate if Azure OpenAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [chatGPTBrowser] - Flag to indicate if ChatGPT Browser endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [anthropic] - Flag to indicate if Anthropic endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. + * @memberof typedefs + */ + +/** + * @exports Plugin + * @typedef {Object} Plugin + * @property {string} pluginKey - The key of the plugin. + * @property {string} name - The name of the plugin. + * @memberof typedefs + */ + +/** + * @exports GptPlugins + * @typedef {Object} GptPlugins + * @property {Plugin[]} plugins - An array of plugins available. + * @property {string[]} availableAgents - Available agents, 'classic' or 'functions'. + * @property {boolean} userProvide - A flag indicating if the user has provided the data. + * @property {boolean} azure - A flag indicating if azure plugins are used. + * @memberof typedefs + */ + +/** + * @exports DefaultConfig + * @typedef {Object} DefaultConfig + * @property {boolean|{userProvide: boolean}} [openAI] - Flag to indicate if OpenAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [assistant] - Flag to indicate if Assistant endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [azureOpenAI] - Flag to indicate if Azure OpenAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [chatGPTBrowser] - Flag to indicate if ChatGPT Browser endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [anthropic] - Flag to indicate if Anthropic endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [bingAI] - Flag to indicate if BingAI endpoint is user provided, or its configuration. + * @property {boolean|{userProvide: boolean}} [google] - Flag to indicate if Google endpoint is user provided, or its configuration. + * @property {boolean|GptPlugins} [gptPlugins] - Configuration for GPT plugins. + * @memberof typedefs + */ + +/** + * @exports EndpointConfig + * @typedef {boolean|{userProvide: boolean}|GptPlugins} EndpointConfig + * @memberof typedefs + */ + +/** + * @exports EndpointWithOrder + * @typedef {Object} EndpointWithOrder + * @property {EndpointConfig} config - The configuration of the endpoint. + * @property {number} order - The order of the endpoint. + * @memberof typedefs + */ diff --git a/client/src/data-provider/index.ts b/client/src/data-provider/index.ts index d5bc710cf49..684ad1668bb 100644 --- a/client/src/data-provider/index.ts +++ b/client/src/data-provider/index.ts @@ -1 +1,2 @@ export * from './mutations'; +export * from './queries'; diff --git a/client/src/data-provider/queries.ts b/client/src/data-provider/queries.ts new file mode 100644 index 00000000000..ec10c9245de --- /dev/null +++ b/client/src/data-provider/queries.ts @@ -0,0 +1,17 @@ +import { UseQueryOptions, useQuery, QueryObserverResult } from '@tanstack/react-query'; +import { QueryKeys, dataService } from 'librechat-data-provider'; + +export const useGetEndpointsConfigOverride = ( + config?: UseQueryOptions, +): QueryObserverResult => { + return useQuery( + [QueryKeys.endpointsConfigOverride], + () => dataService.getEndpointsConfigOverride(), + { + refetchOnWindowFocus: false, + refetchOnReconnect: false, + refetchOnMount: false, + ...config, + }, + ); +}; diff --git a/client/src/hooks/Config/index.ts b/client/src/hooks/Config/index.ts new file mode 100644 index 00000000000..4665f58d45a --- /dev/null +++ b/client/src/hooks/Config/index.ts @@ -0,0 +1 @@ +export { default as useConfigOverride } from './useConfigOverride'; diff --git a/client/src/hooks/Config/useConfigOverride.ts b/client/src/hooks/Config/useConfigOverride.ts new file mode 100644 index 00000000000..b593c11d5cb --- /dev/null +++ b/client/src/hooks/Config/useConfigOverride.ts @@ -0,0 +1,47 @@ +import { useSetRecoilState } from 'recoil'; +import { useEffect, useCallback } from 'react'; +import { useQueryClient } from '@tanstack/react-query'; +import type { TEndpointsConfig, TModelsConfig } from 'librechat-data-provider'; +import { useGetEndpointsConfigOverride } from '~/data-provider'; +import { QueryKeys } from 'librechat-data-provider'; +import store from '~/store'; + +type TempOverrideType = Record & { + endpointsConfig: TEndpointsConfig; + modelsConfig: TModelsConfig; + combinedOptions: unknown[]; + combined: boolean; +}; + +export default function useConfigOverride() { + const setModelsConfig = useSetRecoilState(store.modelsConfig); + const setEndpointsQueryEnabled = useSetRecoilState(store.endpointsQueryEnabled); + const overrideQuery = useGetEndpointsConfigOverride({ + staleTime: Infinity, + }); + + const queryClient = useQueryClient(); + + const handleOverride = useCallback( + async (data: unknown | boolean) => { + const { endpointsConfig, modelsConfig } = data as TempOverrideType; + if (endpointsConfig) { + setEndpointsQueryEnabled(false); + await queryClient.cancelQueries([QueryKeys.endpoints]); + queryClient.setQueryData([QueryKeys.endpoints], endpointsConfig); + } + if (modelsConfig) { + await queryClient.cancelQueries([QueryKeys.models]); + queryClient.setQueryData([QueryKeys.models], modelsConfig); + setModelsConfig(modelsConfig); + } + }, + [queryClient, setEndpointsQueryEnabled, setModelsConfig], + ); + + useEffect(() => { + if (overrideQuery.data) { + handleOverride(overrideQuery.data); + } + }, [overrideQuery.data, handleOverride]); +} diff --git a/client/src/hooks/index.ts b/client/src/hooks/index.ts index dabf59055e0..649ea9c0bdc 100644 --- a/client/src/hooks/index.ts +++ b/client/src/hooks/index.ts @@ -1,4 +1,5 @@ export * from './Messages'; +export * from './Config'; export * from './Input'; export * from './Conversations'; diff --git a/client/src/routes/ChatRoute.tsx b/client/src/routes/ChatRoute.tsx index 458c31022ab..c9dbe8da988 100644 --- a/client/src/routes/ChatRoute.tsx +++ b/client/src/routes/ChatRoute.tsx @@ -1,20 +1,23 @@ +import { useRecoilValue } from 'recoil'; import { useEffect, useRef } from 'react'; import { useParams } from 'react-router-dom'; import { useGetConvoIdQuery, useGetModelsQuery } from 'librechat-data-provider'; +import { useNewConvo, useConfigOverride } from '~/hooks'; import ChatView from '~/components/Chat/ChatView'; import useAuthRedirect from './useAuthRedirect'; -import { useNewConvo } from '~/hooks'; import store from '~/store'; export default function ChatRoute() { const index = 0; + useConfigOverride(); const { conversationId } = useParams(); const { conversation } = store.useCreateConversationAtom(index); + const modelsQueryEnabled = useRecoilValue(store.modelsQueryEnabled); const { isAuthenticated } = useAuthRedirect(); const { newConversation } = useNewConvo(); const hasSetConversation = useRef(false); - const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated }); + const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled }); const initialConvoQuery = useGetConvoIdQuery(conversationId ?? '', { enabled: isAuthenticated && conversationId !== 'new', }); diff --git a/client/src/routes/Root.tsx b/client/src/routes/Root.tsx index 7fe5fc40096..992bf507329 100644 --- a/client/src/routes/Root.tsx +++ b/client/src/routes/Root.tsx @@ -20,11 +20,12 @@ export default function Root() { const submission = useRecoilValue(store.submission); useServerStream(submission ?? null); + const modelsQueryEnabled = useRecoilValue(store.modelsQueryEnabled); const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled); const setModelsConfig = useSetRecoilState(store.modelsConfig); const searchEnabledQuery = useGetSearchEnabledQuery({ enabled: isAuthenticated }); - const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated }); + const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled }); useEffect(() => { localStorage.setItem('navVisible', JSON.stringify(navVisible)); diff --git a/client/src/store/endpoints.ts b/client/src/store/endpoints.ts index aadaebe8698..15d68e1e2af 100644 --- a/client/src/store/endpoints.ts +++ b/client/src/store/endpoints.ts @@ -17,6 +17,11 @@ const endpointsConfig = atom({ default: defaultConfig, }); +const endpointsQueryEnabled = atom({ + key: 'endpointsQueryEnabled', + default: true, +}); + const plugins = selector({ key: 'plugins', get: ({ get }) => { @@ -62,4 +67,5 @@ export default { endpointsFilter, availableEndpoints, defaultConfig, + endpointsQueryEnabled, }; diff --git a/client/src/store/models.ts b/client/src/store/models.ts index c9faa0420ba..c05dd0c55b5 100644 --- a/client/src/store/models.ts +++ b/client/src/store/models.ts @@ -25,6 +25,12 @@ const modelsConfig = atom({ }, }); +const modelsQueryEnabled = atom({ + key: 'modelsQueryEnabled', + default: true, +}); + export default { modelsConfig, + modelsQueryEnabled, }; diff --git a/packages/data-provider/src/api-endpoints.ts b/packages/data-provider/src/api-endpoints.ts index e927e8bdcf9..380a0dafaf4 100644 --- a/packages/data-provider/src/api-endpoints.ts +++ b/packages/data-provider/src/api-endpoints.ts @@ -38,6 +38,8 @@ export const deletePreset = () => '/api/presets/delete'; export const aiEndpoints = () => '/api/endpoints'; +export const endpointsConfigOverride = () => '/api/endpoints/config/override'; + export const models = () => '/api/models'; export const tokenizer = () => '/api/tokenizer'; diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index d49ab599beb..105783d395d 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -105,14 +105,6 @@ export const searchConversations = async ( return request.get(endpoints.search(q, pageNumber)); }; -export const getAIEndpoints = (): Promise => { - return request.get(endpoints.aiEndpoints()); -}; - -export const getModels = async (): Promise => { - return request.get(endpoints.models()); -}; - export const updateTokenCount = (text: string) => { return request.post(endpoints.tokenizer(), { arg: text }); }; @@ -154,10 +146,24 @@ export const updateUserPlugins = (payload: t.TUpdateUserPlugins) => { return request.post(endpoints.userPlugins(), payload); }; +/* Config */ + export const getStartupConfig = (): Promise => { return request.get(endpoints.config()); }; +export const getAIEndpoints = (): Promise => { + return request.get(endpoints.aiEndpoints()); +}; + +export const getModels = async (): Promise => { + return request.get(endpoints.models()); +}; + +export const getEndpointsConfigOverride = (): Promise => { + return request.get(endpoints.endpointsConfigOverride()); +}; + /* Assistants */ export const createAssistant = (data: a.AssistantCreateParams): Promise => { diff --git a/packages/data-provider/src/keys.ts b/packages/data-provider/src/keys.ts index f4c9f3ce588..72faf08c35d 100644 --- a/packages/data-provider/src/keys.ts +++ b/packages/data-provider/src/keys.ts @@ -15,6 +15,7 @@ export enum QueryKeys { startupConfig = 'startupConfig', assistants = 'assistants', assistant = 'assistant', + endpointsConfigOverride = 'endpointsConfigOverride', } export enum MutationKeys { diff --git a/packages/data-provider/src/react-query-service.ts b/packages/data-provider/src/react-query-service.ts index 3f2dd778d03..e73023d151d 100644 --- a/packages/data-provider/src/react-query-service.ts +++ b/packages/data-provider/src/react-query-service.ts @@ -257,6 +257,7 @@ export const useGetEndpointsQuery = ( [QueryKeys.endpoints], () => dataService.getAIEndpoints(), { + staleTime: Infinity, refetchOnWindowFocus: false, refetchOnReconnect: false, refetchOnMount: false, @@ -269,6 +270,7 @@ export const useGetModelsQuery = ( config?: UseQueryOptions, ): QueryObserverResult => { return useQuery([QueryKeys.models], () => dataService.getModels(), { + staleTime: Infinity, refetchOnWindowFocus: false, refetchOnReconnect: false, refetchOnMount: false,