feat: 1. replace black with ruff for fomatting and sort import (#489)

2. use tavily from`langchain-tavily` rather than the older one from `langchain-community`

Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
This commit is contained in:
zgjja
2025-08-17 22:57:23 +08:00
committed by GitHub
parent 1bfec3ad05
commit 3b4e993531
62 changed files with 251 additions and 234 deletions

View File

@@ -1,7 +1,7 @@
# Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
# SPDX-License-Identifier: MIT
from .builder import build_graph_with_memory, build_graph
from .builder import build_graph, build_graph_with_memory
__all__ = [
"build_graph_with_memory",

View File

@@ -1,21 +1,22 @@
# Copyright (c) 2025 Bytedance Ltd. and/or its affiliates
# SPDX-License-Identifier: MIT
from langgraph.graph import StateGraph, START, END
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import END, START, StateGraph
from src.prompts.planner_model import StepType
from .types import State
from .nodes import (
background_investigation_node,
coder_node,
coordinator_node,
human_feedback_node,
planner_node,
reporter_node,
research_team_node,
researcher_node,
coder_node,
human_feedback_node,
background_investigation_node,
)
from .types import State
def continue_to_running_research_team(state: State):

View File

@@ -9,27 +9,26 @@ from typing import Annotated, Literal
from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.runnables import RunnableConfig
from langchain_core.tools import tool
from langgraph.types import Command, interrupt
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.types import Command, interrupt
from src.agents import create_agent
from src.tools.search import LoggedTavilySearch
from src.tools import (
crawl_tool,
get_web_search_tool,
get_retriever_tool,
python_repl_tool,
)
from src.config.agents import AGENT_LLM_MAP
from src.config.configuration import Configuration
from src.llms.llm import get_llm_by_type
from src.prompts.planner_model import Plan
from src.prompts.template import apply_prompt_template
from src.tools import (
crawl_tool,
get_retriever_tool,
get_web_search_tool,
python_repl_tool,
)
from src.tools.search import LoggedTavilySearch
from src.utils.json_utils import repair_json_output
from .types import State
from ..config import SELECTED_SEARCH_ENGINE, SearchEngine
from .types import State
logger = logging.getLogger(__name__)
@@ -106,7 +105,7 @@ def planner_node(
elif AGENT_LLM_MAP["planner"] == "basic":
llm = get_llm_by_type("basic").with_structured_output(
Plan,
method="json_mode",
# method="json_mode",
)
else:
llm = get_llm_by_type(AGENT_LLM_MAP["planner"])