tweak: move the max token exclusions to plugins @rekram1-node (#21225)

pull/8156/head
Aiden Cline 2026-04-06 15:43:58 -07:00 committed by GitHub
parent 40e4cd27a1
commit 48c1b6b338
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 15 additions and 8 deletions

View File

@ -599,5 +599,10 @@ export async function CodexAuthPlugin(input: PluginInput): Promise<Hooks> {
output.headers["User-Agent"] = `opencode/${Installation.VERSION} (${os.platform()} ${os.release()}; ${os.arch()})` output.headers["User-Agent"] = `opencode/${Installation.VERSION} (${os.platform()} ${os.release()}; ${os.arch()})`
output.headers.session_id = input.sessionID output.headers.session_id = input.sessionID
}, },
"chat.params": async (input, output) => {
if (input.model.providerID !== "openai") return
// Match codex cli
output.maxOutputTokens = undefined
},
} }
} }

View File

@ -309,6 +309,14 @@ export async function CopilotAuthPlugin(input: PluginInput): Promise<Hooks> {
}, },
], ],
}, },
"chat.params": async (incoming, output) => {
if (!incoming.model.providerID.includes("github-copilot")) return
// Match github copilot cli, omit maxOutputTokens for gpt models
if (incoming.model.api.id.includes("gpt")) {
output.maxOutputTokens = undefined
}
},
"chat.headers": async (incoming, output) => { "chat.headers": async (incoming, output) => {
if (!incoming.model.providerID.includes("github-copilot")) return if (!incoming.model.providerID.includes("github-copilot")) return

View File

@ -160,11 +160,6 @@ export namespace LLM {
...input.messages, ...input.messages,
] ]
const maxOutputTokens =
isOpenaiOauth || provider.id.includes("github-copilot")
? undefined
: ProviderTransform.maxOutputTokens(input.model)
const params = await Plugin.trigger( const params = await Plugin.trigger(
"chat.params", "chat.params",
{ {
@ -180,7 +175,7 @@ export namespace LLM {
: undefined, : undefined,
topP: input.agent.topP ?? ProviderTransform.topP(input.model), topP: input.agent.topP ?? ProviderTransform.topP(input.model),
topK: ProviderTransform.topK(input.model), topK: ProviderTransform.topK(input.model),
maxOutputTokens, maxOutputTokens: ProviderTransform.maxOutputTokens(input.model),
options, options,
}, },
) )

View File

@ -743,8 +743,7 @@ describe("session.llm.stream", () => {
expect((body.reasoning as { effort?: string } | undefined)?.effort).toBe("high") expect((body.reasoning as { effort?: string } | undefined)?.effort).toBe("high")
const maxTokens = body.max_output_tokens as number | undefined const maxTokens = body.max_output_tokens as number | undefined
const expectedMaxTokens = ProviderTransform.maxOutputTokens(resolved) expect(maxTokens).toBe(undefined) // match codex cli behavior
expect(maxTokens).toBe(expectedMaxTokens)
}, },
}) })
}) })