feat(codex): support explicit OpenAI Responses API config (#1235)

* feat: support explicit OpenAI Responses API config

Co-authored-by: Codex <noreply@openai.com>

* Update backend/packages/harness/deerflow/config/model_config.py

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Co-authored-by: Codex <noreply@openai.com>
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
mxyhi
2026-03-22 20:39:26 +08:00
committed by GitHub
parent 644501ae07
commit e119dc74ae
8 changed files with 113 additions and 1 deletions

View File

@@ -13,6 +13,14 @@ class ModelConfig(BaseModel):
)
model: str = Field(..., description="Model name")
model_config = ConfigDict(extra="allow")
use_responses_api: bool | None = Field(
default=None,
description="Whether to route OpenAI ChatOpenAI calls through the /v1/responses API",
)
output_version: str | None = Field(
default=None,
description="Structured output version for OpenAI responses content, e.g. responses/v1",
)
supports_thinking: bool = Field(default_factory=lambda: False, description="Whether the model supports thinking")
supports_reasoning_effort: bool = Field(default_factory=lambda: False, description="Whether the model supports reasoning effort")
when_thinking_enabled: dict | None = Field(