mirror of
https://gitee.com/wanwujie/deer-flow
synced 2026-04-03 06:12:14 +08:00
Implement a memory system that stores user context and conversation history in memory.json, uses LLM to summarize conversations, and injects relevant context into system prompts for personalized responses. Key components: - MemoryConfig for configuration management - MemoryUpdateQueue with debounce for batch processing - MemoryUpdater for LLM-based memory extraction - MemoryMiddleware to queue conversations after agent execution - Memory injection into lead agent system prompt Note: Add memory section to config.yaml to enable (see config.example.yaml) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
14 lines
364 B
Python
14 lines
364 B
Python
from .app_config import get_app_config
|
|
from .extensions_config import ExtensionsConfig, get_extensions_config
|
|
from .memory_config import MemoryConfig, get_memory_config
|
|
from .skills_config import SkillsConfig
|
|
|
|
__all__ = [
|
|
"get_app_config",
|
|
"SkillsConfig",
|
|
"ExtensionsConfig",
|
|
"get_extensions_config",
|
|
"MemoryConfig",
|
|
"get_memory_config",
|
|
]
|