diff --git a/backend/src/agents/lead_agent/agent.py b/backend/src/agents/lead_agent/agent.py index b30f736..6853940 100644 --- a/backend/src/agents/lead_agent/agent.py +++ b/backend/src/agents/lead_agent/agent.py @@ -245,6 +245,17 @@ def make_lead_agent(config: RunnableConfig): subagent_enabled = config.get("configurable", {}).get("subagent_enabled", False) max_concurrent_subagents = config.get("configurable", {}).get("max_concurrent_subagents", 3) print(f"thinking_enabled: {thinking_enabled}, model_name: {model_name}, is_plan_mode: {is_plan_mode}, subagent_enabled: {subagent_enabled}, max_concurrent_subagents: {max_concurrent_subagents}") + + # Inject run metadata for LangSmith trace tagging + if "metadata" not in config: + config["metadata"] = {} + config["metadata"].update({ + "model_name": model_name or "default", + "thinking_enabled": thinking_enabled, + "is_plan_mode": is_plan_mode, + "subagent_enabled": subagent_enabled, + }) + return create_agent( model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled), tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled), diff --git a/backend/src/config/__init__.py b/backend/src/config/__init__.py index 01fab3f..dfc048a 100644 --- a/backend/src/config/__init__.py +++ b/backend/src/config/__init__.py @@ -2,6 +2,7 @@ from .app_config import get_app_config from .extensions_config import ExtensionsConfig, get_extensions_config from .memory_config import MemoryConfig, get_memory_config from .skills_config import SkillsConfig +from .tracing_config import get_tracing_config, is_tracing_enabled __all__ = [ "get_app_config", @@ -10,4 +11,6 @@ __all__ = [ "get_extensions_config", "MemoryConfig", "get_memory_config", + "get_tracing_config", + "is_tracing_enabled", ] diff --git a/backend/src/config/tracing_config.py b/backend/src/config/tracing_config.py new file mode 100644 index 0000000..d279db4 --- /dev/null +++ b/backend/src/config/tracing_config.py @@ -0,0 +1,51 @@ +import logging +import os +from pydantic import BaseModel, Field +import threading + +logger = logging.getLogger(__name__) +_config_lock = threading.Lock() + +class TracingConfig(BaseModel): + """Configuration for LangSmith tracing.""" + + enabled: bool = Field(...) + api_key: str | None = Field(...) + project: str = Field(...) + endpoint: str = Field(...) + + @property + def is_configured(self) -> bool: + """Check if tracing is fully configured (enabled and has API key).""" + return self.enabled and bool(self.api_key) + + +_tracing_config: TracingConfig | None = None + + +def get_tracing_config() -> TracingConfig: + """Get the current tracing configuration from environment variables. + Returns: + TracingConfig with current settings. + """ + global _tracing_config + if _tracing_config is not None: + return _tracing_config + with _config_lock: + if _tracing_config is not None: # Double-check after acquiring lock + return _tracing_config + _tracing_config = TracingConfig( + enabled=os.environ.get("LANGSMITH_TRACING", "").lower() == "true", + api_key=os.environ.get("LANGSMITH_API_KEY"), + project=os.environ.get("LANGSMITH_PROJECT", "deer-flow"), + endpoint=os.environ.get("LANGSMITH_ENDPOINT", "https://api.smith.langchain.com"), + ) + return _tracing_config + +def is_tracing_enabled() -> bool: + """Check if LangSmith tracing is enabled and configured. + Returns: + True if tracing is enabled and has an API key. + """ + return get_tracing_config().is_configured + diff --git a/backend/src/models/factory.py b/backend/src/models/factory.py index c9517a0..da3af3e 100644 --- a/backend/src/models/factory.py +++ b/backend/src/models/factory.py @@ -1,8 +1,10 @@ +import logging from langchain.chat_models import BaseChatModel -from src.config import get_app_config +from src.config import get_app_config, get_tracing_config, is_tracing_enabled from src.reflection import resolve_class +logger = logging.getLogger(__name__) def create_chat_model(name: str | None = None, thinking_enabled: bool = False, **kwargs) -> BaseChatModel: """Create a chat model instance from the config. @@ -37,4 +39,20 @@ def create_chat_model(name: str | None = None, thinking_enabled: bool = False, * raise ValueError(f"Model {name} does not support thinking. Set `supports_thinking` to true in the `config.yaml` to enable thinking.") from None model_settings_from_config.update(model_config.when_thinking_enabled) model_instance = model_class(**kwargs, **model_settings_from_config) + + if is_tracing_enabled(): + try: + from langchain_core.tracers.langchain import LangChainTracer + + tracing_config = get_tracing_config() + tracer = LangChainTracer( + project_name=tracing_config.project, + ) + existing_callbacks = model_instance.callbacks or [] + model_instance.callbacks = [*existing_callbacks, tracer] + logger.debug( + f"LangSmith tracing attached to model '{name}' (project='{tracing_config.project}')" + ) + except Exception as e: + logger.warning(f"Failed to attach LangSmith tracing to model '{name}': {e}") return model_instance