Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 17 additions & 15 deletions frontend/src/components/settings/EmbeddingFormModal.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { useState, useEffect } from "react";
import { Box, Button, TextInput, FormControl, Select, Dialog } from "@primer/react";
import type { EmbeddingModelConfig, LLMProvider } from "../../types";
import { isLLMProvider, LLM_PROVIDERS } from "../../types";

interface Props {
isOpen: boolean;
Expand All @@ -9,13 +10,6 @@ interface Props {
initialData?: EmbeddingModelConfig;
}

const PROVIDERS: { value: LLMProvider; label: string }[] = [
{ value: "openai", label: "OpenAI" },
{ value: "anthropic", label: "Anthropic" },
{ value: "gemini", label: "Google Gemini" },
{ value: "ollama", label: "Ollama" },
];

const PROVIDER_DEFAULTS: Record<
LLMProvider,
{ endpoint: string; model: string; dimensions?: number }
Expand Down Expand Up @@ -58,14 +52,18 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
setApiKey(initialData.api_key || "");
setModelName(initialData.model_name);
setDimensions(initialData.dimensions?.toString() || "");
} else {
// set defaults for new model
const defaults = PROVIDER_DEFAULTS[provider];
} else if (isOpen) {
// set defaults for new model only when opening
const defaultProvider: LLMProvider = "openai";
const defaults = PROVIDER_DEFAULTS[defaultProvider];
setName("");
setProvider(defaultProvider);
setEndpoint(defaults.endpoint);
setModelName(defaults.model);
setDimensions(defaults.dimensions?.toString() || "");
setApiKey("");
}
}, [initialData, provider]);
}, [isOpen, initialData]);

const handleProviderChange = (newProvider: LLMProvider) => {
setProvider(newProvider);
Expand All @@ -92,8 +90,8 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
if (provider !== "ollama" && !apiKey.trim()) {
newErrors.apiKey = "api key is required for this provider";
}
if (dimensions && isNaN(parseInt(dimensions))) {
newErrors.dimensions = "dimensions must be a number";
if (dimensions && (isNaN(Number(dimensions)) || Number(dimensions) < 1)) {
newErrors.dimensions = "dimensions must be a number greater than 0";
}
Comment on lines +93 to 95
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Fractional dimensions silently truncated by parseInt.

A value like "1.5" passes validation (1.5 >= 1) but parseInt("1.5") on line 119 yields 1, silently dropping the fractional part. Dimensions should be a positive integer — tighten the check:

🐛 Proposed fix
-    if (dimensions && (isNaN(Number(dimensions)) || Number(dimensions) < 1)) {
-      newErrors.dimensions = "dimensions must be a number greater than 0";
+    if (dimensions && (isNaN(Number(dimensions)) || !Number.isInteger(Number(dimensions)) || Number(dimensions) < 1)) {
+      newErrors.dimensions = "dimensions must be a positive integer";
     }

Also applies to: 119-119

🤖 Prompt for AI Agents
In `@frontend/src/components/settings/EmbeddingFormModal.tsx` around lines 102 -
104, The current validation allows fractional strings like "1.5" because it only
checks Number(dimensions) >= 1 and later parseInt truncates; update the
validation in EmbeddingFormModal to require an integer by checking
Number.isInteger(Number(dimensions)) && Number(dimensions) > 0 and set
newErrors.dimensions accordingly (e.g., "dimensions must be a positive
integer"); ensure any subsequent conversion uses parseInt(dimensions, 10) only
after this integer check so fractional inputs are rejected rather than silently
truncated.


setErrors(newErrors);
Expand All @@ -110,6 +108,7 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
api_key: apiKey.trim() || null,
model_name: modelName.trim(),
dimensions: dimensions ? parseInt(dimensions) : null,
is_default: initialData?.is_default ?? false,
};

setSaving(true);
Expand Down Expand Up @@ -163,10 +162,13 @@ export default function EmbeddingFormModal({ isOpen, onClose, onSave, initialDat
<FormControl.Label sx={{ color: "fg.default" }}>Provider</FormControl.Label>
<Select
value={provider}
onChange={(e) => handleProviderChange(e.target.value as LLMProvider)}
onChange={(e) => {
const val = e.target.value;
if (isLLMProvider(val)) handleProviderChange(val);
}}
block
>
{PROVIDERS.map((p) => (
{LLM_PROVIDERS.map((p) => (
<Select.Option key={p.value} value={p.value}>
{p.label}
</Select.Option>
Expand Down
42 changes: 22 additions & 20 deletions frontend/src/components/settings/LLMFormModal.tsx
Original file line number Diff line number Diff line change
@@ -1,20 +1,7 @@
import { useState, useEffect } from "react";
import { Box, Button, TextInput, FormControl, Select, Dialog } from "@primer/react";
import type { LLMModelConfig, LLMProvider } from "../../types";

interface Props {
isOpen: boolean;
onClose: () => void;
onSave: (config: LLMModelConfig) => Promise<void>;
initialData?: LLMModelConfig;
}

const PROVIDERS: { value: LLMProvider; label: string }[] = [
{ value: "openai", label: "OpenAI" },
{ value: "anthropic", label: "Anthropic" },
{ value: "gemini", label: "Google Gemini" },
{ value: "ollama", label: "Ollama" },
];
import { isLLMProvider, LLM_PROVIDERS } from "../../types";

const PROVIDER_DEFAULTS: Record<LLMProvider, { endpoint: string; model: string }> = {
openai: {
Expand All @@ -35,6 +22,13 @@ const PROVIDER_DEFAULTS: Record<LLMProvider, { endpoint: string; model: string }
},
};

interface Props {
isOpen: boolean;
onClose: () => void;
onSave: (config: LLMModelConfig) => Promise<void>;
initialData?: LLMModelConfig;
}

export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: Props) {
const [name, setName] = useState("");
const [provider, setProvider] = useState<LLMProvider>("openai");
Expand All @@ -51,13 +45,17 @@ export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: P
setEndpoint(initialData.endpoint);
setApiKey(initialData.api_key || "");
setModelName(initialData.model_name);
} else {
// set defaults for new model
const defaults = PROVIDER_DEFAULTS[provider];
} else if (isOpen) {
// set defaults for new model only when opening
const defaultProvider: LLMProvider = "openai";
const defaults = PROVIDER_DEFAULTS[defaultProvider];
setName("");
setProvider(defaultProvider);
setEndpoint(defaults.endpoint);
setModelName(defaults.model);
setApiKey("");
}
}, [initialData, provider]);
}, [isOpen, initialData]);

const handleProviderChange = (newProvider: LLMProvider) => {
setProvider(newProvider);
Expand Down Expand Up @@ -97,6 +95,7 @@ export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: P
endpoint: endpoint.trim(),
api_key: apiKey.trim() || null,
model_name: modelName.trim(),
is_default: initialData?.is_default ?? false,
};

setSaving(true);
Expand Down Expand Up @@ -149,10 +148,13 @@ export default function LLMFormModal({ isOpen, onClose, onSave, initialData }: P
<FormControl.Label sx={{ color: "fg.default" }}>Provider</FormControl.Label>
<Select
value={provider}
onChange={(e) => handleProviderChange(e.target.value as LLMProvider)}
onChange={(e) => {
const val = e.target.value;
if (isLLMProvider(val)) handleProviderChange(val);
}}
block
>
{PROVIDERS.map((p) => (
{LLM_PROVIDERS.map((p) => (
<Select.Option key={p.value} value={p.value}>
{p.label}
</Select.Option>
Expand Down
13 changes: 6 additions & 7 deletions frontend/src/pages/Settings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ export default function Settings() {
const isMountedRef = useRef(true);

useEffect(() => {
// Reset to true on each mount (important for React StrictMode double-mount)
isMountedRef.current = true;

loadLlmModels();
loadEmbeddingModels();
loadLangfuseStatus();
Expand Down Expand Up @@ -64,14 +67,10 @@ export default function Settings() {

const loadLangfuseStatus = async () => {
try {
const res = await fetch("/api/langfuse/status");
if (!res.ok) {
throw new Error(`http ${res.status}`);
}
const data = await res.json();
const data = await llmConfigApi.getLangfuseStatus();
if (isMountedRef.current) {
setLangfuseEnabled(data.enabled);
setLangfuseHost(data.host);
setLangfuseHost(data.host ?? null);
}
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown error";
Expand Down Expand Up @@ -214,7 +213,7 @@ export default function Settings() {
loadEmbeddingModels();
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown error";
toast.error(`Failed to save LLM model: ${message}`);
toast.error(`Failed to save embedding model: ${message}`);
throw error;
}
};
Expand Down
11 changes: 11 additions & 0 deletions frontend/src/services/llmConfigApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,17 @@ class LLMConfigApi {
if (!response.ok) throw new Error(`http ${response.status}`);
return response.json();
}

async getLangfuseStatus(): Promise<{
enabled: boolean;
host?: string;
public_key?: string;
error?: string;
}> {
const response = await fetch(`${API_BASE}/langfuse/status`);
if (!response.ok) throw new Error(`http ${response.status}`);
return response.json();
}
}

export const llmConfigApi = new LLMConfigApi();
10 changes: 10 additions & 0 deletions frontend/src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,16 @@ export interface BlockSchema {

export type LLMProvider = "openai" | "anthropic" | "gemini" | "ollama";

export const LLM_PROVIDERS: { value: LLMProvider; label: string }[] = [
{ value: "openai", label: "OpenAI" },
{ value: "anthropic", label: "Anthropic" },
{ value: "gemini", label: "Google Gemini" },
{ value: "ollama", label: "Ollama" },
];

export const isLLMProvider = (v: string): v is LLMProvider =>
LLM_PROVIDERS.some((p) => p.value === v);

export interface LLMModelConfig {
name: string;
provider: LLMProvider;
Expand Down
6 changes: 6 additions & 0 deletions lib/entities/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ def validate_str_fields(cls, v: str | None) -> str:
"""convert None to empty string for database compatibility"""
return v if v is not None else ""

@field_validator("dimensions", mode="before")
@classmethod
def validate_dimensions(cls, v: int | None) -> int:
"""coerce None to 0"""
return v if v is not None else 0


class ConnectionTestResult(BaseModel):
success: bool
Expand Down
Loading
Loading