mirror of
https://gitee.com/wanwujie/deer-flow
synced 2026-04-22 13:44:46 +08:00
Support langgraph checkpointer (#1005)
* Add checkpointer configuration to config.example.yaml - Introduced a new section for checkpointer configuration to enable state persistence for the embedded DeerFlowClient. - Documented supported types: memory, sqlite, and postgres, along with examples for each. - Clarified that the LangGraph Server manages its own state persistence separately. * refactor(checkpointer): streamline checkpointer initialization and logging * fix(uv.lock): update revision and add new wheel URLs for brotlicffi package * feat: add langchain-anthropic dependency and update related configurations * Fix checkpointer lifecycle, docstring, and path resolution bugs from PR #1005 review (#4) * Initial plan * Address all review suggestions from PR #1005 Co-authored-by: foreleven <4785594+foreleven@users.noreply.github.com> * Fix resolve_path to always return real Path; move SQLite special-string handling to callers Co-authored-by: foreleven <4785594+foreleven@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: foreleven <4785594+foreleven@users.noreply.github.com> --------- Co-authored-by: Copilot <198982749+Copilot@users.noreply.github.com> Co-authored-by: foreleven <4785594+foreleven@users.noreply.github.com>
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
from .checkpointer import get_checkpointer, make_checkpointer, reset_checkpointer
|
||||
from .lead_agent import make_lead_agent
|
||||
from .thread_state import SandboxState, ThreadState
|
||||
|
||||
__all__ = ["make_lead_agent", "SandboxState", "ThreadState"]
|
||||
__all__ = ["make_lead_agent", "SandboxState", "ThreadState", "get_checkpointer", "reset_checkpointer", "make_checkpointer"]
|
||||
|
||||
9
backend/src/agents/checkpointer/__init__.py
Normal file
9
backend/src/agents/checkpointer/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .async_provider import make_checkpointer
|
||||
from .provider import checkpointer_context, get_checkpointer, reset_checkpointer
|
||||
|
||||
__all__ = [
|
||||
"get_checkpointer",
|
||||
"reset_checkpointer",
|
||||
"checkpointer_context",
|
||||
"make_checkpointer",
|
||||
]
|
||||
107
backend/src/agents/checkpointer/async_provider.py
Normal file
107
backend/src/agents/checkpointer/async_provider.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""Async checkpointer factory.
|
||||
|
||||
Provides an **async context manager** for long-running async servers that need
|
||||
proper resource cleanup.
|
||||
|
||||
Supported backends: memory, sqlite, postgres.
|
||||
|
||||
Usage (e.g. FastAPI lifespan)::
|
||||
|
||||
from src.agents.checkpointer.async_provider import make_checkpointer
|
||||
|
||||
async with make_checkpointer() as checkpointer:
|
||||
app.state.checkpointer = checkpointer # None if not configured
|
||||
|
||||
For sync usage see :mod:`src.agents.checkpointer.provider`.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
from collections.abc import AsyncIterator
|
||||
|
||||
from langgraph.types import Checkpointer
|
||||
|
||||
from src.agents.checkpointer.provider import (
|
||||
POSTGRES_CONN_REQUIRED,
|
||||
POSTGRES_INSTALL,
|
||||
SQLITE_INSTALL,
|
||||
_resolve_sqlite_conn_str,
|
||||
)
|
||||
from src.config.app_config import get_app_config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Async factory
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def _async_checkpointer(config) -> AsyncIterator[Checkpointer]:
|
||||
"""Async context manager that constructs and tears down a checkpointer."""
|
||||
if config.type == "memory":
|
||||
from langgraph.checkpoint.memory import InMemorySaver
|
||||
|
||||
yield InMemorySaver()
|
||||
return
|
||||
|
||||
if config.type == "sqlite":
|
||||
try:
|
||||
from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver
|
||||
except ImportError as exc:
|
||||
raise ImportError(SQLITE_INSTALL) from exc
|
||||
|
||||
import pathlib
|
||||
|
||||
conn_str = _resolve_sqlite_conn_str(config.connection_string or "store.db")
|
||||
# Only create parent directories for real filesystem paths
|
||||
if conn_str != ":memory:" and not conn_str.startswith("file:"):
|
||||
pathlib.Path(conn_str).parent.mkdir(parents=True, exist_ok=True)
|
||||
async with AsyncSqliteSaver.from_conn_string(conn_str) as saver:
|
||||
await saver.setup()
|
||||
yield saver
|
||||
return
|
||||
|
||||
if config.type == "postgres":
|
||||
try:
|
||||
from langgraph.checkpoint.postgres.aio import AsyncPostgresSaver
|
||||
except ImportError as exc:
|
||||
raise ImportError(POSTGRES_INSTALL) from exc
|
||||
|
||||
if not config.connection_string:
|
||||
raise ValueError(POSTGRES_CONN_REQUIRED)
|
||||
|
||||
async with AsyncPostgresSaver.from_conn_string(config.connection_string) as saver:
|
||||
await saver.setup()
|
||||
yield saver
|
||||
return
|
||||
|
||||
raise ValueError(f"Unknown checkpointer type: {config.type!r}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public async context manager
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@contextlib.asynccontextmanager
|
||||
async def make_checkpointer() -> AsyncIterator[Checkpointer | None]:
|
||||
"""Async context manager that yields a checkpointer for the caller's lifetime.
|
||||
Resources are opened on enter and closed on exit — no global state::
|
||||
|
||||
async with make_checkpointer() as checkpointer:
|
||||
app.state.checkpointer = checkpointer
|
||||
|
||||
Yields ``None`` when no checkpointer is configured in *config.yaml*.
|
||||
"""
|
||||
|
||||
config = get_app_config()
|
||||
|
||||
if config.checkpointer is None:
|
||||
yield None
|
||||
return
|
||||
|
||||
async with _async_checkpointer(config.checkpointer) as saver:
|
||||
yield saver
|
||||
179
backend/src/agents/checkpointer/provider.py
Normal file
179
backend/src/agents/checkpointer/provider.py
Normal file
@@ -0,0 +1,179 @@
|
||||
"""Sync checkpointer factory.
|
||||
|
||||
Provides a **sync singleton** and a **sync context manager** for LangGraph
|
||||
graph compilation and CLI tools.
|
||||
|
||||
Supported backends: memory, sqlite, postgres.
|
||||
|
||||
Usage::
|
||||
|
||||
from src.agents.checkpointer.provider import get_checkpointer, checkpointer_context
|
||||
|
||||
# Singleton — reused across calls, closed on process exit
|
||||
cp = get_checkpointer()
|
||||
|
||||
# One-shot — fresh connection, closed on block exit
|
||||
with checkpointer_context() as cp:
|
||||
graph.invoke(input, config={"configurable": {"thread_id": "1"}})
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
from collections.abc import Iterator
|
||||
|
||||
from langgraph.types import Checkpointer
|
||||
|
||||
from src.config.app_config import get_app_config
|
||||
from src.config.checkpointer_config import CheckpointerConfig
|
||||
from src.config.paths import resolve_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Error message constants — imported by aio.provider too
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
SQLITE_INSTALL = "langgraph-checkpoint-sqlite is required for the SQLite checkpointer. Install it with: uv add langgraph-checkpoint-sqlite"
|
||||
POSTGRES_INSTALL = "langgraph-checkpoint-postgres is required for the PostgreSQL checkpointer. Install it with: uv add langgraph-checkpoint-postgres psycopg[binary] psycopg-pool"
|
||||
POSTGRES_CONN_REQUIRED = "checkpointer.connection_string is required for the postgres backend"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sync factory
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _resolve_sqlite_conn_str(raw: str) -> str:
|
||||
"""Return a SQLite connection string ready for use with ``SqliteSaver``.
|
||||
|
||||
SQLite special strings (``":memory:"`` and ``file:`` URIs) are returned
|
||||
unchanged. Plain filesystem paths — relative or absolute — are resolved
|
||||
to an absolute string via :func:`resolve_path`.
|
||||
"""
|
||||
if raw == ":memory:" or raw.startswith("file:"):
|
||||
return raw
|
||||
return str(resolve_path(raw))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _sync_checkpointer_cm(config: CheckpointerConfig) -> Iterator[Checkpointer]:
|
||||
"""Context manager that creates and tears down a sync checkpointer.
|
||||
|
||||
Returns a configured ``Checkpointer`` instance. Resource cleanup for any
|
||||
underlying connections or pools is handled by higher-level helpers in
|
||||
this module (such as the singleton factory or context manager); this
|
||||
function does not return a separate cleanup callback.
|
||||
"""
|
||||
if config.type == "memory":
|
||||
from langgraph.checkpoint.memory import InMemorySaver
|
||||
|
||||
logger.info("Checkpointer: using InMemorySaver (in-process, not persistent)")
|
||||
yield InMemorySaver()
|
||||
return
|
||||
|
||||
if config.type == "sqlite":
|
||||
try:
|
||||
from langgraph.checkpoint.sqlite import SqliteSaver
|
||||
except ImportError as exc:
|
||||
raise ImportError(SQLITE_INSTALL) from exc
|
||||
|
||||
conn_str = _resolve_sqlite_conn_str(config.connection_string or "store.db")
|
||||
with SqliteSaver.from_conn_string(conn_str) as saver:
|
||||
saver.setup()
|
||||
logger.info("Checkpointer: using SqliteSaver (%s)", conn_str)
|
||||
yield saver
|
||||
return
|
||||
|
||||
if config.type == "postgres":
|
||||
try:
|
||||
from langgraph.checkpoint.postgres import PostgresSaver
|
||||
except ImportError as exc:
|
||||
raise ImportError(POSTGRES_INSTALL) from exc
|
||||
|
||||
if not config.connection_string:
|
||||
raise ValueError(POSTGRES_CONN_REQUIRED)
|
||||
|
||||
with PostgresSaver.from_conn_string(config.connection_string) as saver:
|
||||
saver.setup()
|
||||
logger.info("Checkpointer: using PostgresSaver")
|
||||
yield saver
|
||||
return
|
||||
|
||||
raise ValueError(f"Unknown checkpointer type: {config.type!r}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sync singleton
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_checkpointer: Checkpointer = None
|
||||
_checkpointer_ctx = None # open context manager keeping the connection alive
|
||||
|
||||
|
||||
def get_checkpointer() -> Checkpointer | None:
|
||||
"""Return the global sync checkpointer singleton, creating it on first call.
|
||||
|
||||
Returns ``None`` when no checkpointer is configured in *config.yaml*.
|
||||
|
||||
Raises:
|
||||
ImportError: If the required package for the configured backend is not installed.
|
||||
ValueError: If ``connection_string`` is missing for a backend that requires it.
|
||||
"""
|
||||
global _checkpointer, _checkpointer_ctx
|
||||
|
||||
if _checkpointer is not None:
|
||||
return _checkpointer
|
||||
|
||||
from src.config.checkpointer_config import get_checkpointer_config
|
||||
|
||||
config = get_checkpointer_config()
|
||||
if config is None:
|
||||
return None
|
||||
|
||||
_checkpointer_ctx = _sync_checkpointer_cm(config)
|
||||
_checkpointer = _checkpointer_ctx.__enter__()
|
||||
|
||||
return _checkpointer
|
||||
|
||||
|
||||
def reset_checkpointer() -> None:
|
||||
"""Reset the sync singleton, forcing recreation on the next call.
|
||||
|
||||
Closes any open backend connections and clears the cached instance.
|
||||
Useful in tests or after a configuration change.
|
||||
"""
|
||||
global _checkpointer, _checkpointer_ctx
|
||||
if _checkpointer_ctx is not None:
|
||||
try:
|
||||
_checkpointer_ctx.__exit__(None, None, None)
|
||||
except Exception:
|
||||
logger.warning("Error during checkpointer cleanup", exc_info=True)
|
||||
_checkpointer_ctx = None
|
||||
_checkpointer = None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Sync context manager
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def checkpointer_context() -> Iterator[Checkpointer | None]:
|
||||
"""Sync context manager that yields a checkpointer and cleans up on exit.
|
||||
|
||||
Unlike :func:`get_checkpointer`, this does **not** cache the instance —
|
||||
each ``with`` block creates and destroys its own connection. Use it in
|
||||
CLI scripts or tests where you want deterministic cleanup::
|
||||
|
||||
with checkpointer_context() as cp:
|
||||
graph.invoke(input, config={"configurable": {"thread_id": "1"}})
|
||||
"""
|
||||
|
||||
config = get_app_config()
|
||||
if config.checkpointer is None:
|
||||
yield None
|
||||
return
|
||||
|
||||
with _sync_checkpointer_cm(config.checkpointer) as saver:
|
||||
yield saver
|
||||
@@ -152,7 +152,10 @@ class DeerFlowClient:
|
||||
def _atomic_write_json(path: Path, data: dict) -> None:
|
||||
"""Write JSON to *path* atomically (temp file + replace)."""
|
||||
fd = tempfile.NamedTemporaryFile(
|
||||
mode="w", dir=path.parent, suffix=".tmp", delete=False,
|
||||
mode="w",
|
||||
dir=path.parent,
|
||||
suffix=".tmp",
|
||||
delete=False,
|
||||
)
|
||||
try:
|
||||
json.dump(data, fd, indent=2)
|
||||
@@ -205,8 +208,13 @@ class DeerFlowClient:
|
||||
),
|
||||
"state_schema": ThreadState,
|
||||
}
|
||||
if self._checkpointer is not None:
|
||||
kwargs["checkpointer"] = self._checkpointer
|
||||
checkpointer = self._checkpointer
|
||||
if checkpointer is None:
|
||||
from src.agents.checkpointer import get_checkpointer
|
||||
|
||||
checkpointer = get_checkpointer()
|
||||
if checkpointer is not None:
|
||||
kwargs["checkpointer"] = checkpointer
|
||||
|
||||
self._agent = create_agent(**kwargs)
|
||||
self._agent_config_key = key
|
||||
@@ -320,10 +328,7 @@ class DeerFlowClient:
|
||||
"type": "ai",
|
||||
"content": "",
|
||||
"id": msg_id,
|
||||
"tool_calls": [
|
||||
{"name": tc["name"], "args": tc["args"], "id": tc.get("id")}
|
||||
for tc in msg.tool_calls
|
||||
],
|
||||
"tool_calls": [{"name": tc["name"], "args": tc["args"], "id": tc.get("id")} for tc in msg.tool_calls],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -494,10 +499,7 @@ class DeerFlowClient:
|
||||
"""
|
||||
config_path = ExtensionsConfig.resolve_config_path()
|
||||
if config_path is None:
|
||||
raise FileNotFoundError(
|
||||
"Cannot locate extensions_config.json. "
|
||||
"Set DEER_FLOW_EXTENSIONS_CONFIG_PATH or ensure it exists in the project root."
|
||||
)
|
||||
raise FileNotFoundError("Cannot locate extensions_config.json. Set DEER_FLOW_EXTENSIONS_CONFIG_PATH or ensure it exists in the project root.")
|
||||
|
||||
current_config = get_extensions_config()
|
||||
|
||||
@@ -561,10 +563,7 @@ class DeerFlowClient:
|
||||
|
||||
config_path = ExtensionsConfig.resolve_config_path()
|
||||
if config_path is None:
|
||||
raise FileNotFoundError(
|
||||
"Cannot locate extensions_config.json. "
|
||||
"Set DEER_FLOW_EXTENSIONS_CONFIG_PATH or ensure it exists in the project root."
|
||||
)
|
||||
raise FileNotFoundError("Cannot locate extensions_config.json. Set DEER_FLOW_EXTENSIONS_CONFIG_PATH or ensure it exists in the project root.")
|
||||
|
||||
extensions_config = get_extensions_config()
|
||||
extensions_config.skills[name] = SkillStateConfig(enabled=enabled)
|
||||
@@ -739,7 +738,6 @@ class DeerFlowClient:
|
||||
uploaded_files: list[dict] = []
|
||||
|
||||
for src_path in resolved_files:
|
||||
|
||||
dest = uploads_dir / src_path.name
|
||||
shutil.copy2(src_path, dest)
|
||||
|
||||
@@ -756,6 +754,7 @@ class DeerFlowClient:
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
import concurrent.futures
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor() as pool:
|
||||
md_path = pool.submit(lambda: asyncio.run(convert_file_to_markdown(dest))).result()
|
||||
except RuntimeError:
|
||||
@@ -795,15 +794,17 @@ class DeerFlowClient:
|
||||
for fp in sorted(uploads_dir.iterdir()):
|
||||
if fp.is_file():
|
||||
stat = fp.stat()
|
||||
files.append({
|
||||
"filename": fp.name,
|
||||
"size": str(stat.st_size),
|
||||
"path": str(fp),
|
||||
"virtual_path": f"/mnt/user-data/uploads/{fp.name}",
|
||||
"artifact_url": f"/api/threads/{thread_id}/artifacts/mnt/user-data/uploads/{fp.name}",
|
||||
"extension": fp.suffix,
|
||||
"modified": stat.st_mtime,
|
||||
})
|
||||
files.append(
|
||||
{
|
||||
"filename": fp.name,
|
||||
"size": str(stat.st_size),
|
||||
"path": str(fp),
|
||||
"virtual_path": f"/mnt/user-data/uploads/{fp.name}",
|
||||
"artifact_url": f"/api/threads/{thread_id}/artifacts/mnt/user-data/uploads/{fp.name}",
|
||||
"extension": fp.suffix,
|
||||
"modified": stat.st_mtime,
|
||||
}
|
||||
)
|
||||
return {"files": files, "count": len(files)}
|
||||
|
||||
def delete_upload(self, thread_id: str, filename: str) -> dict:
|
||||
@@ -858,7 +859,7 @@ class DeerFlowClient:
|
||||
if not clean_path.startswith(virtual_prefix):
|
||||
raise ValueError(f"Path must start with /{virtual_prefix}")
|
||||
|
||||
relative = clean_path[len(virtual_prefix):].lstrip("/")
|
||||
relative = clean_path[len(virtual_prefix) :].lstrip("/")
|
||||
base_dir = get_paths().sandbox_user_data_dir(thread_id)
|
||||
actual = (base_dir / relative).resolve()
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import yaml
|
||||
from dotenv import load_dotenv
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from src.config.checkpointer_config import CheckpointerConfig, load_checkpointer_config_from_dict
|
||||
from src.config.extensions_config import ExtensionsConfig
|
||||
from src.config.memory_config import load_memory_config_from_dict
|
||||
from src.config.model_config import ModelConfig
|
||||
@@ -29,6 +30,7 @@ class AppConfig(BaseModel):
|
||||
skills: SkillsConfig = Field(default_factory=SkillsConfig, description="Skills configuration")
|
||||
extensions: ExtensionsConfig = Field(default_factory=ExtensionsConfig, description="Extensions configuration (MCP servers and skills state)")
|
||||
model_config = ConfigDict(extra="allow", frozen=False)
|
||||
checkpointer: CheckpointerConfig | None = Field(default=None, description="Checkpointer configuration")
|
||||
|
||||
@classmethod
|
||||
def resolve_config_path(cls, config_path: str | None = None) -> Path:
|
||||
@@ -92,6 +94,10 @@ class AppConfig(BaseModel):
|
||||
if "subagents" in config_data:
|
||||
load_subagents_config_from_dict(config_data["subagents"])
|
||||
|
||||
# Load checkpointer config if present
|
||||
if "checkpointer" in config_data:
|
||||
load_checkpointer_config_from_dict(config_data["checkpointer"])
|
||||
|
||||
# Load extensions config separately (it's in a different file)
|
||||
extensions_config = ExtensionsConfig.from_file()
|
||||
config_data["extensions"] = extensions_config.model_dump()
|
||||
|
||||
46
backend/src/config/checkpointer_config.py
Normal file
46
backend/src/config/checkpointer_config.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Configuration for LangGraph checkpointer."""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
CheckpointerType = Literal["memory", "sqlite", "postgres"]
|
||||
|
||||
|
||||
class CheckpointerConfig(BaseModel):
|
||||
"""Configuration for LangGraph state persistence checkpointer."""
|
||||
|
||||
type: CheckpointerType = Field(
|
||||
description="Checkpointer backend type. "
|
||||
"'memory' is in-process only (lost on restart). "
|
||||
"'sqlite' persists to a local file (requires langgraph-checkpoint-sqlite). "
|
||||
"'postgres' persists to PostgreSQL (requires langgraph-checkpoint-postgres)."
|
||||
)
|
||||
connection_string: str | None = Field(
|
||||
default=None,
|
||||
description="Connection string for sqlite (file path) or postgres (DSN). "
|
||||
"Required for sqlite and postgres types. "
|
||||
"For sqlite, use a file path like '.deer-flow/checkpoints.db' or ':memory:' for in-memory. "
|
||||
"For postgres, use a DSN like 'postgresql://user:pass@localhost:5432/db'.",
|
||||
)
|
||||
|
||||
|
||||
# Global configuration instance — None means no checkpointer is configured.
|
||||
_checkpointer_config: CheckpointerConfig | None = None
|
||||
|
||||
|
||||
def get_checkpointer_config() -> CheckpointerConfig | None:
|
||||
"""Get the current checkpointer configuration, or None if not configured."""
|
||||
return _checkpointer_config
|
||||
|
||||
|
||||
def set_checkpointer_config(config: CheckpointerConfig | None) -> None:
|
||||
"""Set the checkpointer configuration."""
|
||||
global _checkpointer_config
|
||||
_checkpointer_config = config
|
||||
|
||||
|
||||
def load_checkpointer_config_from_dict(config_dict: dict) -> None:
|
||||
"""Load checkpointer configuration from a dictionary."""
|
||||
global _checkpointer_config
|
||||
_checkpointer_config = CheckpointerConfig(**config_dict)
|
||||
@@ -176,3 +176,15 @@ def get_paths() -> Paths:
|
||||
if _paths is None:
|
||||
_paths = Paths()
|
||||
return _paths
|
||||
|
||||
|
||||
def resolve_path(path: str) -> Path:
|
||||
"""Resolve *path* to an absolute ``Path``.
|
||||
|
||||
Relative paths are resolved relative to the application base directory.
|
||||
Absolute paths are returned as-is (after normalisation).
|
||||
"""
|
||||
p = Path(path)
|
||||
if not p.is_absolute():
|
||||
p = get_paths().base_dir / path
|
||||
return p.resolve()
|
||||
|
||||
Reference in New Issue
Block a user