fix: improve MiniMax code plan integration (#1169)

This PR improves MiniMax Code Plan integration in DeerFlow by fixing three issues in the current flow: stream errors were not clearly surfaced in the UI, the frontend could not display the actual provider model ID, and MiniMax reasoning output could leak into final assistant content as inline <think>...</think>. The change adds a MiniMax-specific adapter, exposes real model IDs end-to-end, and adds a frontend fallback for historical messages.
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
This commit is contained in:
Simon Su
2026-03-20 17:18:59 +08:00
committed by GitHub
parent 3b235fd182
commit ceab7fac14
14 changed files with 491 additions and 22 deletions

View File

@@ -2,7 +2,6 @@ import "@/styles/globals.css";
import "katex/dist/katex.min.css";
import { type Metadata } from "next";
import { Geist } from "next/font/google";
import { ThemeProvider } from "@/components/theme-provider";
import { I18nProvider } from "@/core/i18n/context";
@@ -13,22 +12,12 @@ export const metadata: Metadata = {
description: "A LangChain-based framework for building super agents.",
};
const geist = Geist({
subsets: ["latin"],
variable: "--font-geist-sans",
});
export default async function RootLayout({
children,
}: Readonly<{ children: React.ReactNode }>) {
const locale = await detectLocaleServer();
return (
<html
lang={locale}
className={geist.variable}
suppressContentEditableWarning
suppressHydrationWarning
>
<html lang={locale} suppressContentEditableWarning suppressHydrationWarning>
<body>
<ThemeProvider attribute="class" enableSystem disableTransitionOnChange>
<I18nProvider initialLocale={locale}>{children}</I18nProvider>

View File

@@ -4,24 +4,28 @@ export function GET() {
{
id: "doubao-seed-1.8",
name: "doubao-seed-1.8",
model: "doubao-seed-1-8",
display_name: "Doubao Seed 1.8",
supports_thinking: true,
},
{
id: "deepseek-v3.2",
name: "deepseek-v3.2",
model: "deepseek-chat",
display_name: "DeepSeek v3.2",
supports_thinking: true,
},
{
id: "gpt-5",
name: "gpt-5",
model: "gpt-5",
display_name: "GPT-5",
supports_thinking: true,
},
{
id: "gemini-3-pro",
name: "gemini-3-pro",
model: "gemini-3-pro",
display_name: "Gemini 3 Pro",
supports_thinking: true,
},

View File

@@ -154,7 +154,13 @@ export default function AgentChatPage() {
isNewThread={isNewThread}
threadId={threadId}
autoFocus={isNewThread}
status={thread.isLoading ? "streaming" : "ready"}
status={
thread.error
? "error"
: thread.isLoading
? "streaming"
: "ready"
}
context={settings.context}
extraHeader={
isNewThread && (

View File

@@ -122,7 +122,13 @@ export default function ChatPage() {
isNewThread={isNewThread}
threadId={threadId}
autoFocus={isNewThread}
status={thread.isLoading ? "streaming" : "ready"}
status={
thread.error
? "error"
: thread.isLoading
? "streaming"
: "ready"
}
context={settings.context}
extraHeader={
isNewThread && <Welcome mode={settings.context.mode} />

View File

@@ -702,9 +702,16 @@ export function InputBox({
>
<ModelSelectorTrigger asChild>
<PromptInputButton>
<ModelSelectorName className="text-xs font-normal">
{selectedModel?.display_name}
</ModelSelectorName>
<div className="flex min-w-0 flex-col items-start text-left">
<ModelSelectorName className="text-xs font-normal">
{selectedModel?.display_name}
</ModelSelectorName>
{selectedModel?.model && (
<span className="text-muted-foreground w-full truncate text-[10px] leading-none">
{selectedModel.model}
</span>
)}
</div>
</PromptInputButton>
</ModelSelectorTrigger>
<ModelSelectorContent>
@@ -716,7 +723,12 @@ export function InputBox({
value={m.name}
onSelect={() => handleModelSelect(m.name)}
>
<ModelSelectorName>{m.display_name}</ModelSelectorName>
<div className="flex min-w-0 flex-1 flex-col">
<ModelSelectorName>{m.display_name}</ModelSelectorName>
<span className="text-muted-foreground truncate text-[10px]">
{m.model}
</span>
</div>
{m.name === context.model_name ? (
<CheckIcon className="ml-auto size-4" />
) : (

View File

@@ -127,7 +127,7 @@ export function groupMessages<T>(
export function extractTextFromMessage(message: Message) {
if (typeof message.content === "string") {
return message.content.trim();
return splitInlineReasoningFromAIMessage(message)?.content ?? message.content.trim();
}
if (Array.isArray(message.content)) {
return message.content
@@ -138,9 +138,36 @@ export function extractTextFromMessage(message: Message) {
return "";
}
const THINK_TAG_RE = /<think>\s*([\s\S]*?)\s*<\/think>/g;
function splitInlineReasoning(content: string) {
const reasoningParts: string[] = [];
const cleaned = content
.replace(THINK_TAG_RE, (_, reasoning: string) => {
const normalized = reasoning.trim();
if (normalized) {
reasoningParts.push(normalized);
}
return "";
})
.trim();
return {
content: cleaned,
reasoning: reasoningParts.length > 0 ? reasoningParts.join("\n\n") : null,
};
}
function splitInlineReasoningFromAIMessage(message: Message) {
if (message.type !== "ai" || typeof message.content !== "string") {
return null;
}
return splitInlineReasoning(message.content);
}
export function extractContentFromMessage(message: Message) {
if (typeof message.content === "string") {
return message.content.trim();
return splitInlineReasoningFromAIMessage(message)?.content ?? message.content.trim();
}
if (Array.isArray(message.content)) {
return message.content
@@ -177,6 +204,9 @@ export function extractReasoningContentFromMessage(message: Message) {
return part.thinking as string;
}
}
if (typeof message.content === "string") {
return splitInlineReasoning(message.content).reasoning;
}
return null;
}
@@ -202,7 +232,9 @@ export function extractURLFromImageURLContent(
export function hasContent(message: Message) {
if (typeof message.content === "string") {
return message.content.trim().length > 0;
return (
splitInlineReasoningFromAIMessage(message)?.content ?? message.content.trim()
).length > 0;
}
if (Array.isArray(message.content)) {
return message.content.length > 0;
@@ -222,6 +254,9 @@ export function hasReasoning(message: Message) {
// Compatible with the Anthropic gateway
return (part as unknown as { type: "thinking" })?.type === "thinking";
}
if (typeof message.content === "string") {
return splitInlineReasoning(message.content).reasoning !== null;
}
return false;
}

View File

@@ -1,6 +1,7 @@
export interface Model {
id: string;
name: string;
model: string;
display_name: string;
description?: string | null;
supports_thinking?: boolean;

View File

@@ -31,6 +31,29 @@ export type ThreadStreamOptions = {
onToolEnd?: (event: ToolEndEvent) => void;
};
function getStreamErrorMessage(error: unknown): string {
if (typeof error === "string" && error.trim()) {
return error;
}
if (error instanceof Error && error.message.trim()) {
return error.message;
}
if (typeof error === "object" && error !== null) {
const message = Reflect.get(error, "message");
if (typeof message === "string" && message.trim()) {
return message;
}
const nestedError = Reflect.get(error, "error");
if (nestedError instanceof Error && nestedError.message.trim()) {
return nestedError.message;
}
if (typeof nestedError === "string" && nestedError.trim()) {
return nestedError;
}
}
return "Request failed.";
}
export function useThreadStream({
threadId,
context,
@@ -148,6 +171,10 @@ export function useThreadStream({
updateSubtask({ id: e.task_id, latestMessage: e.message });
}
},
onError(error) {
setOptimisticMessages([]);
toast.error(getStreamErrorMessage(error));
},
onFinish(state) {
listeners.current.onFinish?.(state.values);
void queryClient.invalidateQueries({ queryKey: ["threads", "search"] });

View File

@@ -72,7 +72,7 @@
@theme {
--font-sans:
var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif,
ui-sans-serif, system-ui, sans-serif,
"Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
--animate-fade-in: fade-in 1.1s;