Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
import { Input } from "@hypr/ui/components/ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@hypr/ui/components/ui/select";
import { cn } from "@hypr/ui/lib/utils";
import { useQuery } from "@tanstack/react-query";
import { SharedCustomEndpointProps } from "./shared";

const openaiModels = [
Expand Down Expand Up @@ -143,6 +144,67 @@ export function LLMCustomView({
setSelectedLLMModel("");
};

// temporary fix for fetching models smoothly
const othersModels = useQuery({
queryKey: ["others-direct-models", customForm.watch("api_base"), customForm.watch("api_key")?.slice(0, 8)],
queryFn: async (): Promise<string[]> => {
const apiBase = customForm.getValues("api_base");
const apiKey = customForm.getValues("api_key");

const url = new URL(apiBase);
url.pathname = "/v1/models";

const headers: Record<string, string> = {
"Content-Type": "application/json",
};

if (apiKey && apiKey.trim().length > 0) {
headers["Authorization"] = `Bearer ${apiKey}`;
}

const response = await fetch(url.toString(), {
method: "GET",
headers,
});

if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}

const data = await response.json();

if (!data.data || !Array.isArray(data.data)) {
throw new Error("Invalid response format");
}

const models = data.data
.map((model: any) => model.id)
.filter((id: string) => {
const excludeKeywords = ["dall-e"];
return !excludeKeywords.some(keyword => id.includes(keyword));
});

return models;
},
enabled: (() => {
const apiBase = customForm.watch("api_base");
const apiKey = customForm.watch("api_key");
const isLocal = apiBase?.includes("localhost") || apiBase?.includes("127.0.0.1");

try {
// Only enable if URL looks complete (ends with common patterns)
const validEndings = ["/v1", "/v1/", ":11434/v1", ":8080/v1"];
const looksComplete = validEndings.some(ending => apiBase?.endsWith(ending));

return Boolean(apiBase && new URL(apiBase) && looksComplete && (isLocal || apiKey));
} catch {
return false;
}
})(),
retry: 1,
refetchInterval: false,
});
Comment on lines +147 to +206
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Review the query implementation for potential improvements

The othersModels query implementation looks functional but has several areas that could be improved:

  1. Error handling: The query throws errors but doesn't provide user-friendly error states in the UI
  2. Security: The fetch request uses user-provided URLs without additional validation beyond basic URL parsing
  3. Performance: The query key includes a slice of the API key which could cause unnecessary re-fetches

Consider these improvements:

const othersModels = useQuery({
-  queryKey: ["others-direct-models", customForm.watch("api_base"), customForm.watch("api_key")?.slice(0, 8)],
+  queryKey: ["others-direct-models", customForm.watch("api_base"), Boolean(customForm.watch("api_key"))],
  queryFn: async (): Promise<string[]> => {
    const apiBase = customForm.getValues("api_base");
    const apiKey = customForm.getValues("api_key");

+    // Additional URL validation
+    const url = new URL(apiBase);
+    if (!['http:', 'https:'].includes(url.protocol)) {
+      throw new Error('Only HTTP and HTTPS protocols are allowed');
+    }
-    const url = new URL(apiBase);
    url.pathname = "/v1/models";

    // ... rest of implementation
  },
  enabled: (() => {
    // ... existing logic
  })(),
  retry: 1,
  refetchInterval: false,
+ staleTime: 5 * 60 * 1000, // Cache for 5 minutes
});
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
// temporary fix for fetching models smoothly
const othersModels = useQuery({
queryKey: ["others-direct-models", customForm.watch("api_base"), customForm.watch("api_key")?.slice(0, 8)],
queryFn: async (): Promise<string[]> => {
const apiBase = customForm.getValues("api_base");
const apiKey = customForm.getValues("api_key");
const url = new URL(apiBase);
url.pathname = "/v1/models";
const headers: Record<string, string> = {
"Content-Type": "application/json",
};
if (apiKey && apiKey.trim().length > 0) {
headers["Authorization"] = `Bearer ${apiKey}`;
}
const response = await fetch(url.toString(), {
method: "GET",
headers,
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
if (!data.data || !Array.isArray(data.data)) {
throw new Error("Invalid response format");
}
const models = data.data
.map((model: any) => model.id)
.filter((id: string) => {
const excludeKeywords = ["dall-e"];
return !excludeKeywords.some(keyword => id.includes(keyword));
});
return models;
},
enabled: (() => {
const apiBase = customForm.watch("api_base");
const apiKey = customForm.watch("api_key");
const isLocal = apiBase?.includes("localhost") || apiBase?.includes("127.0.0.1");
try {
// Only enable if URL looks complete (ends with common patterns)
const validEndings = ["/v1", "/v1/", ":11434/v1", ":8080/v1"];
const looksComplete = validEndings.some(ending => apiBase?.endsWith(ending));
return Boolean(apiBase && new URL(apiBase) && looksComplete && (isLocal || apiKey));
} catch {
return false;
}
})(),
retry: 1,
refetchInterval: false,
});
// temporary fix for fetching models smoothly
const othersModels = useQuery({
queryKey: [
"others-direct-models",
customForm.watch("api_base"),
Boolean(customForm.watch("api_key"))
],
queryFn: async (): Promise<string[]> => {
const apiBase = customForm.getValues("api_base");
const apiKey = customForm.getValues("api_key");
// Additional URL validation
const url = new URL(apiBase);
if (!['http:', 'https:'].includes(url.protocol)) {
throw new Error('Only HTTP and HTTPS protocols are allowed');
}
url.pathname = "/v1/models";
const headers: Record<string, string> = {
"Content-Type": "application/json",
};
if (apiKey && apiKey.trim().length > 0) {
headers["Authorization"] = `Bearer ${apiKey}`;
}
const response = await fetch(url.toString(), {
method: "GET",
headers,
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
if (!data.data || !Array.isArray(data.data)) {
throw new Error("Invalid response format");
}
const models = data.data
.map((model: any) => model.id)
.filter((id: string) => {
const excludeKeywords = ["dall-e"];
return !excludeKeywords.some(keyword => id.includes(keyword));
});
return models;
},
enabled: (() => {
const apiBase = customForm.watch("api_base");
const apiKey = customForm.watch("api_key");
const isLocal =
apiBase?.includes("localhost") || apiBase?.includes("127.0.0.1");
try {
const validEndings = ["/v1", "/v1/", ":11434/v1", ":8080/v1"];
const looksComplete = validEndings.some((ending) =>
apiBase?.endsWith(ending)
);
return Boolean(
apiBase && new URL(apiBase) && looksComplete && (isLocal || apiKey)
);
} catch {
return false;
}
})(),
retry: 1,
refetchInterval: false,
staleTime: 5 * 60 * 1000, // Cache for 5 minutes
});
🤖 Prompt for AI Agents
In apps/desktop/src/components/settings/components/ai/llm-custom-view.tsx around
lines 147 to 206, improve the othersModels query by removing the API key slice
from the queryKey to prevent unnecessary refetches and enhance security. Add
more robust validation for the apiBase URL to restrict it to allowed domains or
patterns before making the fetch call. Implement user-friendly error handling by
catching errors in the queryFn and setting an error state that can be displayed
in the UI instead of throwing raw errors. These changes will improve security,
performance, and user experience.


return (
<div className="space-y-6">
<div className="flex items-center gap-2">
Expand Down Expand Up @@ -546,13 +608,13 @@ export function LLMCustomView({
</Trans>
</FormDescription>
<FormControl>
{availableLLMModels.isLoading && !field.value
{othersModels.isLoading && !field.value
? (
<div className="py-1 text-sm text-neutral-500">
<Trans>Loading available models...</Trans>
</div>
)
: availableLLMModels.data && availableLLMModels.data.length > 0
: othersModels.data && othersModels.data.length > 0
? (
<Select
value={field.value}
Expand All @@ -562,7 +624,7 @@ export function LLMCustomView({
<SelectValue placeholder="Select model" />
</SelectTrigger>
<SelectContent>
{availableLLMModels.data.map((model) => (
{othersModels.data.map((model) => (
<SelectItem key={model} value={model}>
{model}
</SelectItem>
Expand All @@ -573,7 +635,7 @@ export function LLMCustomView({
: (
<Input
{...field}
placeholder="Enter model name (e.g., gpt-4, llama3.2:3b)"
placeholder="Enter model name (endpoint has no discoverable models)"
/>
)}
</FormControl>
Expand Down
5 changes: 2 additions & 3 deletions apps/desktop/src/components/settings/views/ai.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,7 @@ export default function LocalAI() {
const availableLLMModels = useQuery({
queryKey: ["available-llm-models"],
queryFn: async () => {
console.log("available models being loaded");
return await connectorCommands.listCustomLlmModels();
},
});
Expand Down Expand Up @@ -566,9 +567,7 @@ export default function LocalAI() {
setOpenrouterModelMutation.mutate(config.model);
} else if (config.provider === "others") {
setOthersApiBaseMutation.mutate(config.api_base);
if (config.api_key) {
setOthersApiKeyMutation.mutate(config.api_key);
}
setOthersApiKeyMutation.mutate(config.api_key || "");
setOthersModelMutation.mutate(config.model);
}

Expand Down
54 changes: 27 additions & 27 deletions apps/desktop/src/locales/en/messages.po
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ msgstr "(Beta) Detect meetings automatically"
msgid "(Beta) Upcoming meeting notifications"
msgstr "(Beta) Upcoming meeting notifications"

#: src/components/settings/components/ai/llm-custom-view.tsx:518
#: src/components/settings/components/ai/llm-custom-view.tsx:580
msgid "(Optional for localhost)"
msgstr "(Optional for localhost)"

Expand Down Expand Up @@ -320,7 +320,7 @@ msgstr "Access granted"
msgid "Access Granted"
msgstr "Access Granted"

#: src/components/settings/components/ai/llm-custom-view.tsx:383
#: src/components/settings/components/ai/llm-custom-view.tsx:445
msgid "Access multiple AI models through OpenRouter with your API key"
msgstr "Access multiple AI models through OpenRouter with your API key"

Expand Down Expand Up @@ -386,15 +386,15 @@ msgstr "and {0} more members"
msgid "Anyone with the link can view this page"
msgstr "Anyone with the link can view this page"

#: src/components/settings/components/ai/llm-custom-view.tsx:493
#: src/components/settings/components/ai/llm-custom-view.tsx:555
msgid "API Base URL"
msgstr "API Base URL"

#: src/components/settings/views/integrations.tsx:197
#: src/components/settings/components/ai/llm-custom-view.tsx:201
#: src/components/settings/components/ai/llm-custom-view.tsx:297
#: src/components/settings/components/ai/llm-custom-view.tsx:403
#: src/components/settings/components/ai/llm-custom-view.tsx:515
#: src/components/settings/components/ai/llm-custom-view.tsx:263
#: src/components/settings/components/ai/llm-custom-view.tsx:359
#: src/components/settings/components/ai/llm-custom-view.tsx:465
#: src/components/settings/components/ai/llm-custom-view.tsx:577
msgid "API Key"
msgstr "API Key"

Expand Down Expand Up @@ -423,7 +423,7 @@ msgstr "Audio Permissions"
#~ msgid "Auto (Default)"
#~ msgstr "Auto (Default)"

#: src/components/settings/views/ai.tsx:826
#: src/components/settings/views/ai.tsx:825
msgid "Autonomy Selector"
msgstr "Autonomy Selector"

Expand Down Expand Up @@ -532,7 +532,7 @@ msgstr "Company name"
#~ msgid "Connect"
#~ msgstr "Connect"

#: src/components/settings/components/ai/llm-custom-view.tsx:473
#: src/components/settings/components/ai/llm-custom-view.tsx:535
msgid "Connect to a self-hosted or third-party LLM endpoint (OpenAI API compatible)"
msgstr "Connect to a self-hosted or third-party LLM endpoint (OpenAI API compatible)"

Expand Down Expand Up @@ -579,7 +579,7 @@ msgstr "Continue"
#~ msgid "Continue Setup"
#~ msgstr "Continue Setup"

#: src/components/settings/views/ai.tsx:829
#: src/components/settings/views/ai.tsx:828
msgid "Control how autonomous the AI enhancement should be"
msgstr "Control how autonomous the AI enhancement should be"

Expand Down Expand Up @@ -624,7 +624,7 @@ msgstr "Create your first template to get started"
#~ msgid "Custom Endpoint"
#~ msgstr "Custom Endpoint"

#: src/components/settings/components/ai/llm-custom-view.tsx:150
#: src/components/settings/components/ai/llm-custom-view.tsx:212
msgid "Custom Endpoints"
msgstr "Custom Endpoints"

Expand Down Expand Up @@ -718,7 +718,7 @@ msgstr "Enter a section title"
#~ msgid "Enter the API key for your custom LLM endpoint"
#~ msgstr "Enter the API key for your custom LLM endpoint"

#: src/components/settings/components/ai/llm-custom-view.tsx:496
#: src/components/settings/components/ai/llm-custom-view.tsx:558
msgid "Enter the base URL for your custom LLM endpoint"
msgstr "Enter the base URL for your custom LLM endpoint"

Expand Down Expand Up @@ -795,7 +795,7 @@ msgstr "Generating title..."
msgid "Get Started"
msgstr "Get Started"

#: src/components/settings/components/ai/llm-custom-view.tsx:273
#: src/components/settings/components/ai/llm-custom-view.tsx:335
msgid "Google Gemini"
msgstr "Google Gemini"

Expand Down Expand Up @@ -904,15 +904,15 @@ msgstr "LinkedIn username"
#~ msgid "Live summary of the meeting"
#~ msgstr "Live summary of the meeting"

#: src/components/settings/views/ai.tsx:803
#: src/components/settings/views/ai.tsx:802
msgid "LLM - Custom"
msgstr "LLM - Custom"

#: src/components/settings/views/ai.tsx:800
#: src/components/settings/views/ai.tsx:799
msgid "LLM - Local"
msgstr "LLM - Local"

#: src/components/settings/components/ai/llm-custom-view.tsx:552
#: src/components/settings/components/ai/llm-custom-view.tsx:614
msgid "Loading available models..."
msgstr "Loading available models..."

Expand Down Expand Up @@ -962,13 +962,13 @@ msgstr "Members"
msgid "Microphone Access"
msgstr "Microphone Access"

#: src/components/settings/components/ai/llm-custom-view.tsx:221
#: src/components/settings/components/ai/llm-custom-view.tsx:317
#: src/components/settings/components/ai/llm-custom-view.tsx:423
#: src/components/settings/components/ai/llm-custom-view.tsx:283
#: src/components/settings/components/ai/llm-custom-view.tsx:379
#: src/components/settings/components/ai/llm-custom-view.tsx:485
msgid "Model"
msgstr "Model"

#: src/components/settings/components/ai/llm-custom-view.tsx:540
#: src/components/settings/components/ai/llm-custom-view.tsx:602
msgid "Model Name"
msgstr "Model Name"

Expand Down Expand Up @@ -1092,11 +1092,11 @@ msgstr "Open in new window"
#~ msgid "Open Note"
#~ msgstr "Open Note"

#: src/components/settings/components/ai/llm-custom-view.tsx:177
#: src/components/settings/components/ai/llm-custom-view.tsx:239
msgid "OpenAI"
msgstr "OpenAI"

#: src/components/settings/components/ai/llm-custom-view.tsx:379
#: src/components/settings/components/ai/llm-custom-view.tsx:441
msgid "OpenRouter"
msgstr "OpenRouter"

Expand All @@ -1108,7 +1108,7 @@ msgstr "Optional base folder path within your Obsidian vault."
msgid "Optional for participant suggestions"
msgstr "Optional for participant suggestions"

#: src/components/settings/components/ai/llm-custom-view.tsx:470
#: src/components/settings/components/ai/llm-custom-view.tsx:532
msgid "Others"
msgstr "Others"

Expand Down Expand Up @@ -1241,7 +1241,7 @@ msgstr "Search..."
msgid "Sections"
msgstr "Sections"

#: src/components/settings/components/ai/llm-custom-view.tsx:543
#: src/components/settings/components/ai/llm-custom-view.tsx:605
msgid "Select a model from the dropdown (if available) or manually enter the model name required by your endpoint."
msgstr "Select a model from the dropdown (if available) or manually enter the model name required by your endpoint."

Expand Down Expand Up @@ -1410,7 +1410,7 @@ msgstr "Toggle widget panel"
msgid "Transcribing"
msgstr "Transcribing"

#: src/components/settings/views/ai.tsx:797
#: src/components/settings/views/ai.tsx:796
msgid "Transcription"
msgstr "Transcription"

Expand Down Expand Up @@ -1450,11 +1450,11 @@ msgstr "Upcoming Events"
#~ msgid "Upgrade"
#~ msgstr "Upgrade"

#: src/components/settings/components/ai/llm-custom-view.tsx:277
#: src/components/settings/components/ai/llm-custom-view.tsx:339
msgid "Use Google's Gemini models with your API key"
msgstr "Use Google's Gemini models with your API key"

#: src/components/settings/components/ai/llm-custom-view.tsx:181
#: src/components/settings/components/ai/llm-custom-view.tsx:243
msgid "Use OpenAI's GPT models with your API key"
msgstr "Use OpenAI's GPT models with your API key"

Expand Down
Loading
Loading