Files
deer-flow/src/ppt/graph/ppt_composer_node.py
Willem Jiang 170c4eb33c Upgrade langchain version to 1.x (#720)
* fix: revert the part of patch of issue-710 to extract the content from the plan

* Upgrade the ddgs for the new compatible version

* Upgraded langchain to 1.1.0
updated langchain related package to the new compatable version

* Update pyproject.toml

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-28 22:09:13 +08:00

34 lines
1.1 KiB
Python

# Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
# SPDX-License-Identifier: MIT
import logging
import os
import uuid
from langchain_core.messages import HumanMessage, SystemMessage
from src.config.agents import AGENT_LLM_MAP
from src.llms.llm import get_llm_by_type
from src.prompts.template import get_prompt_template
from .state import PPTState
logger = logging.getLogger(__name__)
def ppt_composer_node(state: PPTState):
logger.info("Generating ppt content...")
model = get_llm_by_type(AGENT_LLM_MAP["ppt_composer"])
ppt_content = model.invoke(
[
SystemMessage(content=get_prompt_template("ppt/ppt_composer", locale=state.get("locale", "en-US"))),
HumanMessage(content=state["input"]),
],
)
logger.info(f"ppt_content: {ppt_content}")
# save the ppt content in a temp file
temp_ppt_file_path = os.path.join(os.getcwd(), f"ppt_content_{uuid.uuid4()}.md")
with open(temp_ppt_file_path, "w") as f:
f.write(ppt_content.content)
return {"ppt_content": ppt_content, "ppt_file_path": temp_ppt_file_path}