mirror of
https://gitee.com/wanwujie/deer-flow
synced 2026-04-05 07:02:13 +08:00
feat: add LangSmith tracing integration (#878)
* feat: add LangSmith tracing integration Add optional LangSmith tracing support that can be enabled via environment variables (LANGSMITH_TRACING, LANGSMITH_API_KEY, LANGSMITH_PROJECT, LANGSMITH_ENDPOINT). When enabled, a LangChainTracer callback is attached to chat models and run metadata is injected for trace tagging. Co-Authored-By: Claude <noreply@anthropic.com> * Update backend/src/config/tracing_config.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update backend/src/agents/lead_agent/agent.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update backend/src/agents/lead_agent/agent.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update backend/src/models/factory.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Add threading lock to ensure thread-safe access to tracing configuration --------- Co-authored-by: Claude <noreply@anthropic.com> Co-authored-by: Willem Jiang <willem.jiang@gmail.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -245,6 +245,17 @@ def make_lead_agent(config: RunnableConfig):
|
||||
subagent_enabled = config.get("configurable", {}).get("subagent_enabled", False)
|
||||
max_concurrent_subagents = config.get("configurable", {}).get("max_concurrent_subagents", 3)
|
||||
print(f"thinking_enabled: {thinking_enabled}, model_name: {model_name}, is_plan_mode: {is_plan_mode}, subagent_enabled: {subagent_enabled}, max_concurrent_subagents: {max_concurrent_subagents}")
|
||||
|
||||
# Inject run metadata for LangSmith trace tagging
|
||||
if "metadata" not in config:
|
||||
config["metadata"] = {}
|
||||
config["metadata"].update({
|
||||
"model_name": model_name or "default",
|
||||
"thinking_enabled": thinking_enabled,
|
||||
"is_plan_mode": is_plan_mode,
|
||||
"subagent_enabled": subagent_enabled,
|
||||
})
|
||||
|
||||
return create_agent(
|
||||
model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled),
|
||||
tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled),
|
||||
|
||||
@@ -2,6 +2,7 @@ from .app_config import get_app_config
|
||||
from .extensions_config import ExtensionsConfig, get_extensions_config
|
||||
from .memory_config import MemoryConfig, get_memory_config
|
||||
from .skills_config import SkillsConfig
|
||||
from .tracing_config import get_tracing_config, is_tracing_enabled
|
||||
|
||||
__all__ = [
|
||||
"get_app_config",
|
||||
@@ -10,4 +11,6 @@ __all__ = [
|
||||
"get_extensions_config",
|
||||
"MemoryConfig",
|
||||
"get_memory_config",
|
||||
"get_tracing_config",
|
||||
"is_tracing_enabled",
|
||||
]
|
||||
|
||||
51
backend/src/config/tracing_config.py
Normal file
51
backend/src/config/tracing_config.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import logging
|
||||
import os
|
||||
from pydantic import BaseModel, Field
|
||||
import threading
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_config_lock = threading.Lock()
|
||||
|
||||
class TracingConfig(BaseModel):
|
||||
"""Configuration for LangSmith tracing."""
|
||||
|
||||
enabled: bool = Field(...)
|
||||
api_key: str | None = Field(...)
|
||||
project: str = Field(...)
|
||||
endpoint: str = Field(...)
|
||||
|
||||
@property
|
||||
def is_configured(self) -> bool:
|
||||
"""Check if tracing is fully configured (enabled and has API key)."""
|
||||
return self.enabled and bool(self.api_key)
|
||||
|
||||
|
||||
_tracing_config: TracingConfig | None = None
|
||||
|
||||
|
||||
def get_tracing_config() -> TracingConfig:
|
||||
"""Get the current tracing configuration from environment variables.
|
||||
Returns:
|
||||
TracingConfig with current settings.
|
||||
"""
|
||||
global _tracing_config
|
||||
if _tracing_config is not None:
|
||||
return _tracing_config
|
||||
with _config_lock:
|
||||
if _tracing_config is not None: # Double-check after acquiring lock
|
||||
return _tracing_config
|
||||
_tracing_config = TracingConfig(
|
||||
enabled=os.environ.get("LANGSMITH_TRACING", "").lower() == "true",
|
||||
api_key=os.environ.get("LANGSMITH_API_KEY"),
|
||||
project=os.environ.get("LANGSMITH_PROJECT", "deer-flow"),
|
||||
endpoint=os.environ.get("LANGSMITH_ENDPOINT", "https://api.smith.langchain.com"),
|
||||
)
|
||||
return _tracing_config
|
||||
|
||||
def is_tracing_enabled() -> bool:
|
||||
"""Check if LangSmith tracing is enabled and configured.
|
||||
Returns:
|
||||
True if tracing is enabled and has an API key.
|
||||
"""
|
||||
return get_tracing_config().is_configured
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import logging
|
||||
from langchain.chat_models import BaseChatModel
|
||||
|
||||
from src.config import get_app_config
|
||||
from src.config import get_app_config, get_tracing_config, is_tracing_enabled
|
||||
from src.reflection import resolve_class
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def create_chat_model(name: str | None = None, thinking_enabled: bool = False, **kwargs) -> BaseChatModel:
|
||||
"""Create a chat model instance from the config.
|
||||
@@ -37,4 +39,20 @@ def create_chat_model(name: str | None = None, thinking_enabled: bool = False, *
|
||||
raise ValueError(f"Model {name} does not support thinking. Set `supports_thinking` to true in the `config.yaml` to enable thinking.") from None
|
||||
model_settings_from_config.update(model_config.when_thinking_enabled)
|
||||
model_instance = model_class(**kwargs, **model_settings_from_config)
|
||||
|
||||
if is_tracing_enabled():
|
||||
try:
|
||||
from langchain_core.tracers.langchain import LangChainTracer
|
||||
|
||||
tracing_config = get_tracing_config()
|
||||
tracer = LangChainTracer(
|
||||
project_name=tracing_config.project,
|
||||
)
|
||||
existing_callbacks = model_instance.callbacks or []
|
||||
model_instance.callbacks = [*existing_callbacks, tracer]
|
||||
logger.debug(
|
||||
f"LangSmith tracing attached to model '{name}' (project='{tracing_config.project}')"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to attach LangSmith tracing to model '{name}': {e}")
|
||||
return model_instance
|
||||
|
||||
Reference in New Issue
Block a user