mirror of
https://gitee.com/wanwujie/deer-flow
synced 2026-04-03 06:12:14 +08:00
82 lines
2.3 KiB
Python
82 lines
2.3 KiB
Python
import logging
|
|
from src.graph import build_graph
|
|
|
|
# Configure logging
|
|
logging.basicConfig(
|
|
level=logging.INFO, # Default level is INFO
|
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
)
|
|
|
|
|
|
def enable_debug_logging():
|
|
"""Enable debug level logging for more detailed execution information."""
|
|
logging.getLogger("src").setLevel(logging.DEBUG)
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Create the graph
|
|
graph = build_graph()
|
|
|
|
|
|
def run_agent_workflow(
|
|
user_input: str,
|
|
debug: bool = False,
|
|
max_plan_iterations: int = 1,
|
|
max_step_num: int = 3,
|
|
):
|
|
"""Run the agent workflow with the given user input.
|
|
|
|
Args:
|
|
user_input: The user's query or request
|
|
debug: If True, enables debug level logging
|
|
max_plan_iterations: Maximum number of plan iterations
|
|
max_step_num: Maximum number of steps in a plan
|
|
|
|
Returns:
|
|
The final state after the workflow completes
|
|
"""
|
|
if not user_input:
|
|
raise ValueError("Input could not be empty")
|
|
|
|
if debug:
|
|
enable_debug_logging()
|
|
|
|
logger.info(f"Starting workflow with user input: {user_input}")
|
|
initial_state = {
|
|
# Runtime Variables
|
|
"messages": [{"role": "user", "content": user_input}],
|
|
}
|
|
config = {
|
|
"configurable": {
|
|
"thread_id": "default",
|
|
"max_plan_iterations": max_plan_iterations,
|
|
"max_step_num": max_step_num,
|
|
},
|
|
"recursion_limit": 100,
|
|
}
|
|
last_message_cnt = 0
|
|
for s in graph.stream(input=initial_state, config=config, stream_mode="values"):
|
|
try:
|
|
if isinstance(s, dict) and "messages" in s:
|
|
if len(s["messages"]) <= last_message_cnt:
|
|
continue
|
|
last_message_cnt = len(s["messages"])
|
|
message = s["messages"][-1]
|
|
if isinstance(message, tuple):
|
|
print(message)
|
|
else:
|
|
message.pretty_print()
|
|
else:
|
|
# For any other output format
|
|
print(f"Output: {s}")
|
|
except Exception as e:
|
|
logger.error(f"Error processing stream output: {e}")
|
|
print(f"Error processing output: {str(e)}")
|
|
|
|
logger.info("Workflow completed successfully")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
print(graph.get_graph(xray=True).draw_mermaid())
|