tweak: adjust chat.params hook to allow altering of the maxOutputTokens (#21220)

pull/21033/head^2
Aiden Cline 2026-04-06 11:13:30 -07:00 committed by GitHub
parent 5a6d10cd53
commit 40e4cd27a1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 14 additions and 7 deletions

View File

@ -160,6 +160,11 @@ export namespace LLM {
...input.messages, ...input.messages,
] ]
const maxOutputTokens =
isOpenaiOauth || provider.id.includes("github-copilot")
? undefined
: ProviderTransform.maxOutputTokens(input.model)
const params = await Plugin.trigger( const params = await Plugin.trigger(
"chat.params", "chat.params",
{ {
@ -175,6 +180,7 @@ export namespace LLM {
: undefined, : undefined,
topP: input.agent.topP ?? ProviderTransform.topP(input.model), topP: input.agent.topP ?? ProviderTransform.topP(input.model),
topK: ProviderTransform.topK(input.model), topK: ProviderTransform.topK(input.model),
maxOutputTokens,
options, options,
}, },
) )
@ -193,11 +199,6 @@ export namespace LLM {
}, },
) )
const maxOutputTokens =
isOpenaiOauth || provider.id.includes("github-copilot")
? undefined
: ProviderTransform.maxOutputTokens(input.model)
const tools = await resolveTools(input) const tools = await resolveTools(input)
// LiteLLM and some Anthropic proxies require the tools parameter to be present // LiteLLM and some Anthropic proxies require the tools parameter to be present
@ -291,7 +292,7 @@ export namespace LLM {
activeTools: Object.keys(tools).filter((x) => x !== "invalid"), activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
tools, tools,
toolChoice: input.toolChoice, toolChoice: input.toolChoice,
maxOutputTokens, maxOutputTokens: params.maxOutputTokens,
abortSignal: input.abort, abortSignal: input.abort,
headers: { headers: {
...(input.model.providerID.startsWith("opencode") ...(input.model.providerID.startsWith("opencode")

View File

@ -212,7 +212,13 @@ export interface Hooks {
*/ */
"chat.params"?: ( "chat.params"?: (
input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage }, input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage },
output: { temperature: number; topP: number; topK: number; options: Record<string, any> }, output: {
temperature: number
topP: number
topK: number
maxOutputTokens: number | undefined
options: Record<string, any>
},
) => Promise<void> ) => Promise<void>
"chat.headers"?: ( "chat.headers"?: (
input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage }, input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage },