feat: add deep think feature (#311)

* feat: implement backend logic

* feat: implement api/config endpoint

* rename the symbol

* feat: re-implement configuration at client-side

* feat: add client-side of deep thinking

* fix backend bug

* feat: add reasoning block

* docs: update readme

* fix: translate into English

* fix: change icon to lightbulb

* feat: ignore more bad cases

* feat: adjust thinking layout, and implement auto scrolling

* docs: add comments

---------

Co-authored-by: Henry Li <henry1943@163.com>
This commit is contained in:
DanielWalnut
2025-06-14 13:12:43 +08:00
committed by GitHub
parent a7315b46df
commit 19fa1e97c3
40 changed files with 2292 additions and 1102 deletions

View File

@@ -22,6 +22,7 @@ export async function* chatStream(
max_step_num: number;
max_search_results?: number;
interrupt_feedback?: string;
enable_deep_thinking?: boolean;
enable_background_investigation: boolean;
report_style?: "academic" | "popular_science" | "news" | "social_media";
mcp_settings?: {

View File

@@ -0,0 +1,25 @@
import { type DeerFlowConfig } from "../config/types";
import { resolveServiceURL } from "./resolve-service-url";
declare global {
interface Window {
__deerflowConfig: DeerFlowConfig;
}
}
export async function loadConfig() {
const res = await fetch(resolveServiceURL("./config"));
const config = await res.json();
return config;
}
export function getConfig(): DeerFlowConfig {
if (
typeof window === "undefined" ||
typeof window.__deerflowConfig === "undefined"
) {
throw new Error("Config not loaded");
}
return window.__deerflowConfig;
}

View File

@@ -8,7 +8,7 @@ import { env } from "~/env";
import { useReplay } from "../replay";
import { fetchReplayTitle } from "./chat";
import { getRAGConfig } from "./rag";
import { getConfig } from "./config";
export function useReplayMetadata() {
const { isReplay } = useReplay();
@@ -52,15 +52,8 @@ export function useRAGProvider() {
setLoading(false);
return;
}
getRAGConfig()
.then(setProvider)
.catch((e) => {
setProvider(null);
console.error("Failed to get RAG provider", e);
})
.finally(() => {
setLoading(false);
});
setProvider(getConfig().rag.provider);
setLoading(false);
}, []);
return { provider, loading };

View File

@@ -10,15 +10,7 @@ export function queryRAGResources(query: string) {
.then((res) => {
return res.resources as Array<Resource>;
})
.catch((err) => {
.catch(() => {
return [];
});
}
export function getRAGConfig() {
return fetch(resolveServiceURL(`rag/config`), {
method: "GET",
})
.then((res) => res.json())
.then((res) => res.provider);
}

View File

@@ -38,6 +38,7 @@ export interface MessageChunkEvent
"message_chunk",
{
content?: string;
reasoning_content?: string;
}
> {}

View File

@@ -0,0 +1 @@
export * from "./types";

View File

@@ -0,0 +1,13 @@
export interface ModelConfig {
basic: string[];
reasoning: string[];
}
export interface RagConfig {
provider: string;
}
export interface DeerFlowConfig {
rag: RagConfig;
models: ModelConfig;
}

View File

@@ -43,6 +43,11 @@ function mergeTextMessage(message: Message, event: MessageChunkEvent) {
message.content += event.data.content;
message.contentChunks.push(event.data.content);
}
if (event.data.reasoning_content) {
message.reasoningContent = (message.reasoningContent ?? "") + event.data.reasoning_content;
message.reasoningContentChunks = message.reasoningContentChunks ?? [];
message.reasoningContentChunks.push(event.data.reasoning_content);
}
}
function mergeToolCallMessage(

View File

@@ -17,6 +17,8 @@ export interface Message {
isStreaming?: boolean;
content: string;
contentChunks: string[];
reasoningContent?: string;
reasoningContentChunks?: string[];
toolCalls?: ToolCallRuntime[];
options?: Option[];
finishReason?: "stop" | "interrupt" | "tool_calls";

View File

@@ -10,6 +10,7 @@ const SETTINGS_KEY = "deerflow.settings";
const DEFAULT_SETTINGS: SettingsState = {
general: {
autoAcceptedPlan: false,
enableDeepThinking: false,
enableBackgroundInvestigation: false,
maxPlanIterations: 1,
maxStepNum: 3,
@@ -24,6 +25,7 @@ const DEFAULT_SETTINGS: SettingsState = {
export type SettingsState = {
general: {
autoAcceptedPlan: boolean;
enableDeepThinking: boolean;
enableBackgroundInvestigation: boolean;
maxPlanIterations: number;
maxStepNum: number;
@@ -127,7 +129,9 @@ export const getChatStreamSettings = () => {
};
};
export function setReportStyle(value: "academic" | "popular_science" | "news" | "social_media") {
export function setReportStyle(
value: "academic" | "popular_science" | "news" | "social_media",
) {
useSettingsStore.setState((state) => ({
general: {
...state.general,
@@ -137,6 +141,16 @@ export function setReportStyle(value: "academic" | "popular_science" | "news" |
saveSettings();
}
export function setEnableDeepThinking(value: boolean) {
useSettingsStore.setState((state) => ({
general: {
...state.general,
enableDeepThinking: value,
},
}));
saveSettings();
}
export function setEnableBackgroundInvestigation(value: boolean) {
useSettingsStore.setState((state) => ({
general: {

View File

@@ -104,6 +104,7 @@ export async function sendMessage(
interrupt_feedback: interruptFeedback,
resources,
auto_accepted_plan: settings.autoAcceptedPlan,
enable_deep_thinking: settings.enableDeepThinking ?? false,
enable_background_investigation:
settings.enableBackgroundInvestigation ?? true,
max_plan_iterations: settings.maxPlanIterations,
@@ -132,6 +133,8 @@ export async function sendMessage(
role: data.role,
content: "",
contentChunks: [],
reasoningContent: "",
reasoningContentChunks: [],
isStreaming: true,
interruptFeedback,
};
@@ -296,6 +299,8 @@ export async function listenToPodcast(researchId: string) {
agent: "podcast",
content: JSON.stringify(podcastObject),
contentChunks: [],
reasoningContent: "",
reasoningContentChunks: [],
isStreaming: true,
};
appendMessage(podcastMessage);

View File

@@ -7,7 +7,10 @@ export function parseJSON<T>(json: string | null | undefined, fallback: T) {
try {
const raw = json
.trim()
.replace(/^```js\s*/, "")
.replace(/^```json\s*/, "")
.replace(/^```ts\s*/, "")
.replace(/^```plaintext\s*/, "")
.replace(/^```\s*/, "")
.replace(/\s*```$/, "");
return parse(raw) as T;