Merge pull request #2005 from gaoren002/pr/openai-strip-passthrough-fields

fix(openai): strip unsupported passthrough fields
This commit is contained in:
Wesley Liddick
2026-04-29 21:46:19 +08:00
committed by GitHub
4 changed files with 87 additions and 18 deletions

View File

@@ -53,6 +53,23 @@ const (
codexSparkImageUnsupportedText = codexSparkImageUnsupportedMarker + "\nThe current model is gpt-5.3-codex-spark, which does not support image generation, image editing, image input, the `image_generation` tool, or Codex `image_gen`/`$imagegen` workflows. If the user asks for image generation or image editing, clearly explain this model limitation and ask them to switch to a non-Spark Codex model such as gpt-5.3-codex or gpt-5.4. Do not claim that the local environment merely lacks image_gen tooling, and do not suggest CLI fallback as the primary fix while the model remains Spark.\n</sub2api-codex-spark-image-unsupported>"
)
var openAIChatGPTInternalUnsupportedFields = []string{
"user",
"metadata",
"prompt_cache_retention",
"safety_identifier",
"stream_options",
}
var openAICodexOAuthUnsupportedFields = append([]string{
"max_output_tokens",
"max_completion_tokens",
"temperature",
"top_p",
"frequency_penalty",
"presence_penalty",
}, openAIChatGPTInternalUnsupportedFields...)
func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact bool) codexTransformResult {
result := codexTransformResult{}
// 工具续链需求会影响存储策略与 input 过滤逻辑。
@@ -93,23 +110,8 @@ func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact
}
}
// Strip parameters unsupported by codex models via the Responses API.
for _, key := range []string{
"max_output_tokens",
"max_completion_tokens",
"temperature",
"top_p",
"frequency_penalty",
"presence_penalty",
// prompt_cache_retention is a newer Responses API parameter (cache TTL).
// The ChatGPT internal Codex endpoint rejects it with
// "Unsupported parameter: prompt_cache_retention". Defense-in-depth
// for any OAuth path that reaches this transform — the Cursor
// Responses-shape short-circuit in ForwardAsChatCompletions strips
// it earlier too, but we keep this line so other OAuth callers are
// equally protected.
"prompt_cache_retention",
} {
// Strip parameters unsupported by ChatGPT internal Codex endpoint.
for _, key := range openAICodexOAuthUnsupportedFields {
if _, ok := reqBody[key]; ok {
delete(reqBody, key)
result.Modified = true