Files
deer-flow/backend/tests/test_patched_openai.py
Willem Jiang a087fe7bcc fix(LLM): fixing Gemini thinking + tool calls via OpenAI gateway (#1180) (#1205)
* fix(LLM): fixing Gemini thinking + tool calls via OpenAI gateway (#1180)

When using Gemini with thinking enabled through an OpenAI-compatible gateway,
the API requires that  fields on thinking content blocks are
preserved and echoed back verbatim in subsequent requests. Standard
 silently drops these signatures when serializing
messages, causing HTTP 400 errors:

Changes:
- Add PatchedChatOpenAI adapter that re-injects signed thinking blocks into
  request payloads, preserving the signature chain across multi-turn
  conversations with tool calls.
- Support two LangChain storage patterns: additional_kwargs.thinking_blocks
  and content list.
- Add 11 unit tests covering signed/unsigned blocks, storage patterns, edge
  cases, and precedence rules.
- Update config.example.yaml with Gemini + thinking gateway example.
- Update CONFIGURATION.md with detailed guidance and error explanation.

Fixes: #1180

* Updated the patched_openai.py with thought_signature of function call

* Apply suggestions from code review

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* docs: fix inaccurate thought_signature description in CONFIGURATION.md (#1220)

* Initial plan

* docs: fix CONFIGURATION.md wording for thought_signature - tool-call objects, not thinking blocks

Co-authored-by: WillemJiang <219644+WillemJiang@users.noreply.github.com>
Agent-Logs-Url: https://github.com/bytedance/deer-flow/sessions/360f5226-4631-48a7-a050-189094af8ffe

---------

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: WillemJiang <219644+WillemJiang@users.noreply.github.com>

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Copilot <198982749+Copilot@users.noreply.github.com>
2026-03-26 15:07:05 +08:00

177 lines
6.2 KiB
Python

"""Tests for deerflow.models.patched_openai.PatchedChatOpenAI.
These tests verify that _restore_tool_call_signatures correctly re-injects
``thought_signature`` onto tool-call objects stored in
``additional_kwargs["tool_calls"]``, covering id-based matching, positional
fallback, camelCase keys, and several edge-cases.
"""
from __future__ import annotations
from langchain_core.messages import AIMessage
from deerflow.models.patched_openai import _restore_tool_call_signatures
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
RAW_TC_SIGNED = {
"id": "call_1",
"type": "function",
"function": {"name": "web_fetch", "arguments": '{"url":"http://example.com"}'},
"thought_signature": "SIG_A==",
}
RAW_TC_UNSIGNED = {
"id": "call_2",
"type": "function",
"function": {"name": "bash", "arguments": '{"cmd":"ls"}'},
}
PAYLOAD_TC_1 = {
"type": "function",
"id": "call_1",
"function": {"name": "web_fetch", "arguments": '{"url":"http://example.com"}'},
}
PAYLOAD_TC_2 = {
"type": "function",
"id": "call_2",
"function": {"name": "bash", "arguments": '{"cmd":"ls"}'},
}
def _ai_msg_with_raw_tool_calls(raw_tool_calls: list[dict]) -> AIMessage:
return AIMessage(content="", additional_kwargs={"tool_calls": raw_tool_calls})
# ---------------------------------------------------------------------------
# Core: signed tool-call restoration
# ---------------------------------------------------------------------------
def test_tool_call_signature_restored_by_id():
"""thought_signature is copied to the payload tool-call matched by id."""
payload_msg = {"role": "assistant", "content": None, "tool_calls": [PAYLOAD_TC_1.copy()]}
orig = _ai_msg_with_raw_tool_calls([RAW_TC_SIGNED])
_restore_tool_call_signatures(payload_msg, orig)
assert payload_msg["tool_calls"][0]["thought_signature"] == "SIG_A=="
def test_tool_call_signature_for_parallel_calls():
"""For parallel function calls, only the first has a signature (per Gemini spec)."""
payload_msg = {
"role": "assistant",
"content": None,
"tool_calls": [PAYLOAD_TC_1.copy(), PAYLOAD_TC_2.copy()],
}
orig = _ai_msg_with_raw_tool_calls([RAW_TC_SIGNED, RAW_TC_UNSIGNED])
_restore_tool_call_signatures(payload_msg, orig)
assert payload_msg["tool_calls"][0]["thought_signature"] == "SIG_A=="
assert "thought_signature" not in payload_msg["tool_calls"][1]
def test_tool_call_signature_camel_case():
"""thoughtSignature (camelCase) from some gateways is also handled."""
raw_camel = {
"id": "call_1",
"type": "function",
"function": {"name": "web_fetch", "arguments": "{}"},
"thoughtSignature": "SIG_CAMEL==",
}
payload_msg = {"role": "assistant", "content": None, "tool_calls": [PAYLOAD_TC_1.copy()]}
orig = _ai_msg_with_raw_tool_calls([raw_camel])
_restore_tool_call_signatures(payload_msg, orig)
assert payload_msg["tool_calls"][0]["thought_signature"] == "SIG_CAMEL=="
def test_tool_call_signature_positional_fallback():
"""When ids don't match, falls back to positional matching."""
raw_no_id = {
"type": "function",
"function": {"name": "web_fetch", "arguments": "{}"},
"thought_signature": "SIG_POS==",
}
payload_tc = {
"type": "function",
"id": "call_99",
"function": {"name": "web_fetch", "arguments": "{}"},
}
payload_msg = {"role": "assistant", "content": None, "tool_calls": [payload_tc]}
orig = _ai_msg_with_raw_tool_calls([raw_no_id])
_restore_tool_call_signatures(payload_msg, orig)
assert payload_tc["thought_signature"] == "SIG_POS=="
# ---------------------------------------------------------------------------
# Edge cases: no-op scenarios for tool-call signatures
# ---------------------------------------------------------------------------
def test_tool_call_no_raw_tool_calls_is_noop():
"""No change when additional_kwargs has no tool_calls."""
payload_msg = {"role": "assistant", "content": None, "tool_calls": [PAYLOAD_TC_1.copy()]}
orig = AIMessage(content="", additional_kwargs={})
_restore_tool_call_signatures(payload_msg, orig)
assert "thought_signature" not in payload_msg["tool_calls"][0]
def test_tool_call_no_payload_tool_calls_is_noop():
"""No change when payload has no tool_calls."""
payload_msg = {"role": "assistant", "content": "just text"}
orig = _ai_msg_with_raw_tool_calls([RAW_TC_SIGNED])
_restore_tool_call_signatures(payload_msg, orig)
assert "tool_calls" not in payload_msg
def test_tool_call_unsigned_raw_entries_is_noop():
"""No signature added when raw tool-calls have no thought_signature."""
payload_msg = {"role": "assistant", "content": None, "tool_calls": [PAYLOAD_TC_2.copy()]}
orig = _ai_msg_with_raw_tool_calls([RAW_TC_UNSIGNED])
_restore_tool_call_signatures(payload_msg, orig)
assert "thought_signature" not in payload_msg["tool_calls"][0]
def test_tool_call_multiple_sequential_signatures():
"""Sequential tool calls each carry their own signature."""
raw_tc_a = {
"id": "call_a",
"type": "function",
"function": {"name": "check_flight", "arguments": "{}"},
"thought_signature": "SIG_STEP1==",
}
raw_tc_b = {
"id": "call_b",
"type": "function",
"function": {"name": "book_taxi", "arguments": "{}"},
"thought_signature": "SIG_STEP2==",
}
payload_tc_a = {"type": "function", "id": "call_a", "function": {"name": "check_flight", "arguments": "{}"}}
payload_tc_b = {"type": "function", "id": "call_b", "function": {"name": "book_taxi", "arguments": "{}"}}
payload_msg = {"role": "assistant", "content": None, "tool_calls": [payload_tc_a, payload_tc_b]}
orig = _ai_msg_with_raw_tool_calls([raw_tc_a, raw_tc_b])
_restore_tool_call_signatures(payload_msg, orig)
assert payload_tc_a["thought_signature"] == "SIG_STEP1=="
assert payload_tc_b["thought_signature"] == "SIG_STEP2=="
# Integration behavior for PatchedChatOpenAI is validated indirectly via
# _restore_tool_call_signatures unit coverage above.