feat: support dynamic loading models

This commit is contained in:
Henry Li
2026-01-19 18:54:04 +08:00
parent 1a7c853811
commit 541586dc66
5 changed files with 49 additions and 23 deletions

View File

@@ -11,6 +11,7 @@ import {
PromptInputTextarea,
type PromptInputMessage,
} from "@/components/ai-elements/prompt-input";
import { useModels } from "@/core/models/hooks";
import type { AgentThreadContext } from "@/core/threads";
import { cn } from "@/lib/utils";
@@ -26,15 +27,6 @@ import {
import { Tooltip } from "./tooltip";
const AVAILABLE_MODELS = [
{ name: "deepseek-v3.2", displayName: "DeepSeek v3.2", provider: "deepseek" },
{
name: "doubao-seed-1.8",
displayName: "Doubao Seed 1.8",
provider: "doubao",
},
];
export function InputBox({
className,
autoFocus,
@@ -54,19 +46,22 @@ export function InputBox({
onStop?: () => void;
}) {
const [modelDialogOpen, setModelDialogOpen] = useState(false);
const { models } = useModels();
const selectedModel = useMemo(
() => AVAILABLE_MODELS.find((m) => m.name === context.model_name),
[context.model_name],
() => models.find((m) => m.name === context.model_name),
[context.model_name, models],
);
const handleModelSelect = useCallback(
(model_name: string) => {
const supports_thinking = selectedModel?.supports_thinking ?? false;
onContextChange?.({
...context,
model_name,
thinking_enabled: supports_thinking && context.thinking_enabled,
});
setModelDialogOpen(false);
},
[onContextChange, context],
[selectedModel?.supports_thinking, onContextChange, context],
);
const handleThinkingToggle = useCallback(() => {
onContextChange?.({
@@ -90,7 +85,7 @@ export function InputBox({
return (
<PromptInput
className={cn(
"bg-background/50 rounded-2xl backdrop-blur-sm transition-all duration-300 ease-out *:data-[slot='input-group']:rounded-2xl",
"bg-background/85 rounded-2xl backdrop-blur-sm transition-all duration-300 ease-out *:data-[slot='input-group']:rounded-2xl",
"h-48 translate-y-14 overflow-hidden",
className,
)}
@@ -123,13 +118,15 @@ export function InputBox({
)
}
>
<PromptInputButton onClick={handleThinkingToggle}>
{context.thinking_enabled ? (
<LightbulbIcon className="text-primary size-4" />
) : (
<LightbulbOffIcon className="size-4" />
)}
</PromptInputButton>
{selectedModel?.supports_thinking && (
<PromptInputButton onClick={handleThinkingToggle}>
{context.thinking_enabled ? (
<LightbulbIcon className="text-primary size-4" />
) : (
<LightbulbOffIcon className="size-4" />
)}
</PromptInputButton>
)}
</Tooltip>
</div>
<div className="flex items-center gap-2">
@@ -140,20 +137,20 @@ export function InputBox({
<ModelSelectorTrigger asChild>
<PromptInputButton>
<ModelSelectorName className="text-xs font-normal">
{selectedModel?.displayName}
{selectedModel?.display_name}
</ModelSelectorName>
</PromptInputButton>
</ModelSelectorTrigger>
<ModelSelectorContent>
<ModelSelectorInput placeholder="Search models..." />
<ModelSelectorList>
{AVAILABLE_MODELS.map((m) => (
{models.map((m) => (
<ModelSelectorItem
key={m.name}
value={m.name}
onSelect={() => handleModelSelect(m.name)}
>
<ModelSelectorName>{m.displayName}</ModelSelectorName>
<ModelSelectorName>{m.display_name}</ModelSelectorName>
{m.name === context.model_name ? (
<CheckIcon className="ml-auto size-4" />
) : (

View File

@@ -0,0 +1,9 @@
import { getBackendBaseURL } from "../config";
import type { Model } from "./types";
export async function loadModels() {
const res = fetch(`${getBackendBaseURL()}/api/models`);
const { models } = (await (await res).json()) as { models: Model[] };
return models;
}

View File

@@ -0,0 +1,11 @@
import { useQuery } from "@tanstack/react-query";
import { loadModels } from "./api";
export function useModels() {
const { data, isLoading, error } = useQuery({
queryKey: ["models"],
queryFn: () => loadModels(),
});
return { models: data ?? [], isLoading, error };
}

View File

@@ -0,0 +1,2 @@
export * from "./api";
export * from "./types";

View File

@@ -0,0 +1,7 @@
export interface Model {
id: string;
name: string;
display_name: string;
description?: string | null;
supports_thinking?: boolean;
}