Files
deer-flow/src/workflow.py

100 lines
3.0 KiB
Python
Raw Normal View History

2025-04-17 11:34:42 +08:00
# Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
# SPDX-License-Identifier: MIT
2025-04-23 16:00:01 +08:00
import asyncio
import logging
from src.graph import build_graph
# Configure logging
logging.basicConfig(
level=logging.INFO, # Default level is INFO
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
def enable_debug_logging():
"""Enable debug level logging for more detailed execution information."""
logging.getLogger("src").setLevel(logging.DEBUG)
logger = logging.getLogger(__name__)
# Create the graph
graph = build_graph()
2025-04-23 16:00:01 +08:00
async def run_agent_workflow_async(
user_input: str,
debug: bool = False,
max_plan_iterations: int = 1,
max_step_num: int = 3,
):
2025-04-23 16:00:01 +08:00
"""Run the agent workflow asynchronously with the given user input.
Args:
user_input: The user's query or request
debug: If True, enables debug level logging
max_plan_iterations: Maximum number of plan iterations
max_step_num: Maximum number of steps in a plan
Returns:
The final state after the workflow completes
"""
if not user_input:
raise ValueError("Input could not be empty")
if debug:
enable_debug_logging()
2025-04-23 16:00:01 +08:00
logger.info(f"Starting async workflow with user input: {user_input}")
initial_state = {
# Runtime Variables
"messages": [{"role": "user", "content": user_input}],
2025-04-14 18:01:50 +08:00
"auto_accepted_plan": True,
}
config = {
"configurable": {
"thread_id": "default",
"max_plan_iterations": max_plan_iterations,
"max_step_num": max_step_num,
2025-04-23 16:00:01 +08:00
"mcp_settings": {
"servers": {
"mcp-github-trending": {
"transport": "stdio",
"command": "uvx",
"args": ["mcp-github-trending"],
"enabled_tools": ["get_github_trending_repositories"],
"add_to_agents": ["researcher"],
}
}
},
},
"recursion_limit": 100,
}
last_message_cnt = 0
2025-04-23 16:00:01 +08:00
async for s in graph.astream(
input=initial_state, config=config, stream_mode="values"
):
try:
if isinstance(s, dict) and "messages" in s:
if len(s["messages"]) <= last_message_cnt:
continue
last_message_cnt = len(s["messages"])
message = s["messages"][-1]
if isinstance(message, tuple):
print(message)
else:
message.pretty_print()
else:
# For any other output format
print(f"Output: {s}")
except Exception as e:
logger.error(f"Error processing stream output: {e}")
print(f"Error processing output: {str(e)}")
2025-04-23 16:00:01 +08:00
logger.info("Async workflow completed successfully")
if __name__ == "__main__":
print(graph.get_graph(xray=True).draw_mermaid())