mirror of
https://gitee.com/wanwujie/deer-flow
synced 2026-04-03 06:12:14 +08:00
fix: make SSE buffer size configurable to prevent overflow during multi-round searches (#664) (#665)
* fix: make SSE buffer size configurable to prevent overflow during multi-round searches (Issue #664) - Add NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE environment variable for frontend SSE stream buffer - Default to 1MB for backward compatibility, users can increase to 5-10MB for large searches - Enhance error message with actual buffer sizes and guidance on configuration - Add validation schema in env.js with positive integer requirement - Document configuration in .env.example with clear examples and use cases * Apply suggestions from code review Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -41,3 +41,17 @@ NEXT_PUBLIC_API_URL=http://localhost:8000/api
|
||||
# Github OAuth Token (optional)
|
||||
GITHUB_OAUTH_TOKEN=xxxx
|
||||
|
||||
# Stream Buffer Size Configuration (optional)
|
||||
# Controls the maximum buffer size for SSE (Server-Sent Events) streams in bytes.
|
||||
# Default: 1MB (1048576 bytes)
|
||||
# Increase this if you experience buffer overflow errors during large searches.
|
||||
# Use cases for increasing:
|
||||
# - Performing multi-round web searches with large content results
|
||||
# - Handling responses with many search results or images
|
||||
# - Systems with sufficient memory to handle larger buffers
|
||||
# Examples:
|
||||
# - 5MB: NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE=5242880
|
||||
# - 10MB: NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE=10485760
|
||||
# Note: Very large buffers may increase memory usage. Start with 5-10MB if needed.
|
||||
# NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE=1048576
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
// Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
import { env } from "~/env";
|
||||
|
||||
import { type StreamEvent } from "./StreamEvent";
|
||||
|
||||
export async function* fetchStream(
|
||||
@@ -28,7 +30,8 @@ export async function* fetchStream(
|
||||
|
||||
try {
|
||||
let buffer = "";
|
||||
const MAX_BUFFER_SIZE = 1024 * 1024; // 1MB buffer size limit
|
||||
// Use configurable buffer size from environment, default to 1MB (1048576 bytes)
|
||||
const MAX_BUFFER_SIZE = env.NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE ?? (1024 * 1024);
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
@@ -47,7 +50,11 @@ export async function* fetchStream(
|
||||
|
||||
// Check buffer size to avoid memory overflow
|
||||
if (buffer.length > MAX_BUFFER_SIZE) {
|
||||
throw new Error("Buffer overflow - received too much data without proper event boundaries");
|
||||
throw new Error(
|
||||
`Buffer overflow - received ${(buffer.length / 1024 / 1024).toFixed(2)}MB of data without proper event boundaries. ` +
|
||||
`Max buffer size is ${(MAX_BUFFER_SIZE / 1024 / 1024).toFixed(2)}MB. ` +
|
||||
`You can increase this by setting NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE environment variable.`
|
||||
);
|
||||
}
|
||||
|
||||
let newlineIndex;
|
||||
|
||||
@@ -23,6 +23,7 @@ export const env = createEnv({
|
||||
client: {
|
||||
NEXT_PUBLIC_API_URL: z.string().optional(),
|
||||
NEXT_PUBLIC_STATIC_WEBSITE_ONLY: z.boolean().optional(),
|
||||
NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE: z.coerce.number().int().positive().optional(),
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -34,6 +35,7 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL,
|
||||
NEXT_PUBLIC_STATIC_WEBSITE_ONLY:
|
||||
process.env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true",
|
||||
NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE: process.env.NEXT_PUBLIC_MAX_STREAM_BUFFER_SIZE,
|
||||
AMPLITUDE_API_KEY: process.env.AMPLITUDE_API_KEY,
|
||||
GITHUB_OAUTH_TOKEN: process.env.GITHUB_OAUTH_TOKEN,
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user