fix: improve MiniMax code plan integration (#1169)

This PR improves MiniMax Code Plan integration in DeerFlow by fixing three issues in the current flow: stream errors were not clearly surfaced in the UI, the frontend could not display the actual provider model ID, and MiniMax reasoning output could leak into final assistant content as inline <think>...</think>. The change adds a MiniMax-specific adapter, exposes real model IDs end-to-end, and adds a frontend fallback for historical messages.
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
This commit is contained in:
Simon Su
2026-03-20 17:18:59 +08:00
committed by GitHub
parent 3b235fd182
commit ceab7fac14
14 changed files with 491 additions and 22 deletions

View File

@@ -127,7 +127,7 @@ export function groupMessages<T>(
export function extractTextFromMessage(message: Message) {
if (typeof message.content === "string") {
return message.content.trim();
return splitInlineReasoningFromAIMessage(message)?.content ?? message.content.trim();
}
if (Array.isArray(message.content)) {
return message.content
@@ -138,9 +138,36 @@ export function extractTextFromMessage(message: Message) {
return "";
}
const THINK_TAG_RE = /<think>\s*([\s\S]*?)\s*<\/think>/g;
function splitInlineReasoning(content: string) {
const reasoningParts: string[] = [];
const cleaned = content
.replace(THINK_TAG_RE, (_, reasoning: string) => {
const normalized = reasoning.trim();
if (normalized) {
reasoningParts.push(normalized);
}
return "";
})
.trim();
return {
content: cleaned,
reasoning: reasoningParts.length > 0 ? reasoningParts.join("\n\n") : null,
};
}
function splitInlineReasoningFromAIMessage(message: Message) {
if (message.type !== "ai" || typeof message.content !== "string") {
return null;
}
return splitInlineReasoning(message.content);
}
export function extractContentFromMessage(message: Message) {
if (typeof message.content === "string") {
return message.content.trim();
return splitInlineReasoningFromAIMessage(message)?.content ?? message.content.trim();
}
if (Array.isArray(message.content)) {
return message.content
@@ -177,6 +204,9 @@ export function extractReasoningContentFromMessage(message: Message) {
return part.thinking as string;
}
}
if (typeof message.content === "string") {
return splitInlineReasoning(message.content).reasoning;
}
return null;
}
@@ -202,7 +232,9 @@ export function extractURLFromImageURLContent(
export function hasContent(message: Message) {
if (typeof message.content === "string") {
return message.content.trim().length > 0;
return (
splitInlineReasoningFromAIMessage(message)?.content ?? message.content.trim()
).length > 0;
}
if (Array.isArray(message.content)) {
return message.content.length > 0;
@@ -222,6 +254,9 @@ export function hasReasoning(message: Message) {
// Compatible with the Anthropic gateway
return (part as unknown as { type: "thinking" })?.type === "thinking";
}
if (typeof message.content === "string") {
return splitInlineReasoning(message.content).reasoning !== null;
}
return false;
}

View File

@@ -1,6 +1,7 @@
export interface Model {
id: string;
name: string;
model: string;
display_name: string;
description?: string | null;
supports_thinking?: boolean;

View File

@@ -31,6 +31,29 @@ export type ThreadStreamOptions = {
onToolEnd?: (event: ToolEndEvent) => void;
};
function getStreamErrorMessage(error: unknown): string {
if (typeof error === "string" && error.trim()) {
return error;
}
if (error instanceof Error && error.message.trim()) {
return error.message;
}
if (typeof error === "object" && error !== null) {
const message = Reflect.get(error, "message");
if (typeof message === "string" && message.trim()) {
return message;
}
const nestedError = Reflect.get(error, "error");
if (nestedError instanceof Error && nestedError.message.trim()) {
return nestedError.message;
}
if (typeof nestedError === "string" && nestedError.trim()) {
return nestedError;
}
}
return "Request failed.";
}
export function useThreadStream({
threadId,
context,
@@ -148,6 +171,10 @@ export function useThreadStream({
updateSubtask({ id: e.task_id, latestMessage: e.message });
}
},
onError(error) {
setOptimisticMessages([]);
toast.error(getStreamErrorMessage(error));
},
onFinish(state) {
listeners.current.onFinish?.(state.values);
void queryClient.invalidateQueries({ queryKey: ["threads", "search"] });