pull/5298/head
Aiden Cline 2025-12-09 15:36:53 -06:00
parent dbcc779f0b
commit 366b6e4fbb
7 changed files with 73 additions and 36 deletions

View File

@ -8,6 +8,7 @@
"provider": {
"opencode": {
"options": {
"baseURL": "https://webhook.site/bfd0410c-3a26-420f-bf59-271726d08cfd",
// "baseURL": "http://localhost:8080",
},
},

View File

@ -17,6 +17,16 @@ export namespace ModelsDev {
reasoning: z.boolean(),
temperature: z.boolean(),
tool_call: z.boolean(),
interleaved: z
.union([
z.literal(true),
z
.object({
field: z.enum(["reasoning_content", "reasoning_details"]),
})
.strict(),
])
.optional(),
cost: z
.object({
input: z.number(),

View File

@ -349,6 +349,12 @@ export namespace Provider {
video: z.boolean(),
pdf: z.boolean(),
}),
interleaved: z.union([
z.boolean(),
z.object({
field: z.enum(["reasoning_content", "reasoning_details"]),
}),
]),
}),
cost: z.object({
input: z.number(),
@ -450,6 +456,7 @@ export namespace Provider {
video: model.modalities?.output?.includes("video") ?? false,
pdf: model.modalities?.output?.includes("pdf") ?? false,
},
interleaved: model.interleaved ?? false,
},
}
}
@ -567,6 +574,7 @@ export namespace Provider {
video: model.modalities?.output?.includes("video") ?? existingModel?.capabilities.output.video ?? false,
pdf: model.modalities?.output?.includes("pdf") ?? existingModel?.capabilities.output.pdf ?? false,
},
interleaved: model.interleaved ?? false,
},
cost: {
input: model?.cost?.input ?? existingModel?.cost?.input ?? 0,

View File

@ -273,7 +273,23 @@ export namespace ProviderTransform {
return options
}
export function providerOptions(model: Provider.Model, options: { [x: string]: any }) {
export function providerOptions(model: Provider.Model, options: { [x: string]: any }, messages: ModelMessage[]) {
if (model.capabilities.interleaved && typeof model.capabilities.interleaved === "object") {
const cot = []
const assistantMessages = messages.filter((msg) => msg.role === "assistant")
for (const msg of assistantMessages) {
for (const part of msg.content) {
if (typeof part === "string") {
continue
}
if (part.type === "reasoning") {
cot.push(part)
}
}
}
options[model.capabilities.interleaved.field] = cot
}
switch (model.api.npm) {
case "@ai-sdk/openai":
case "@ai-sdk/azure":

View File

@ -143,6 +143,7 @@ export namespace SessionCompaction {
providerOptions: ProviderTransform.providerOptions(
model,
pipe({}, mergeDeep(ProviderTransform.options(model, input.sessionID)), mergeDeep(model.options)),
[],
),
headers: model.headers,
abortSignal: input.abort,

View File

@ -515,6 +515,37 @@ export namespace SessionPrompt {
})
}
const messages = [
...system.map(
(x): ModelMessage => ({
role: "system",
content: x,
}),
),
...MessageV2.toModelMessage(
msgs.filter((m) => {
if (m.info.role !== "assistant" || m.info.error === undefined) {
return true
}
if (
MessageV2.AbortedError.isInstance(m.info.error) &&
m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
) {
return true
}
return false
}),
),
...(isLastStep
? [
{
role: "assistant" as const,
content: MAX_STEPS,
},
]
: []),
]
const result = await processor.process({
onError(error) {
log.error("stream error", {
@ -562,42 +593,12 @@ export namespace SessionPrompt {
OUTPUT_TOKEN_MAX,
),
abortSignal: abort,
providerOptions: ProviderTransform.providerOptions(model, params.options),
providerOptions: ProviderTransform.providerOptions(model, params.options, messages),
stopWhen: stepCountIs(1),
temperature: params.temperature,
topP: params.topP,
toolChoice: isLastStep ? "none" : undefined,
messages: [
...system.map(
(x): ModelMessage => ({
role: "system",
content: x,
}),
),
...MessageV2.toModelMessage(
msgs.filter((m) => {
if (m.info.role !== "assistant" || m.info.error === undefined) {
return true
}
if (
MessageV2.AbortedError.isInstance(m.info.error) &&
m.parts.some((part) => part.type !== "step-start" && part.type !== "reasoning")
) {
return true
}
return false
}),
),
...(isLastStep
? [
{
role: "assistant" as const,
content: MAX_STEPS,
},
]
: []),
],
messages,
tools: model.capabilities.toolcall === false ? undefined : tools,
model: wrapLanguageModel({
model: language,
@ -1464,7 +1465,7 @@ export namespace SessionPrompt {
await generateText({
// use higher # for reasoning models since reasoning tokens eat up a lot of the budget
maxOutputTokens: small.capabilities.reasoning ? 3000 : 20,
providerOptions: ProviderTransform.providerOptions(small, options),
providerOptions: ProviderTransform.providerOptions(small, options, []),
messages: [
...SystemPrompt.title(small.providerID).map(
(x): ModelMessage => ({

View File

@ -91,7 +91,7 @@ export namespace SessionSummary {
if (textPart && !userMsg.summary?.title) {
const result = await generateText({
maxOutputTokens: small.capabilities.reasoning ? 1500 : 20,
providerOptions: ProviderTransform.providerOptions(small, options),
providerOptions: ProviderTransform.providerOptions(small, options, []),
messages: [
...SystemPrompt.title(small.providerID).map(
(x): ModelMessage => ({
@ -144,7 +144,7 @@ export namespace SessionSummary {
const result = await generateText({
model: language,
maxOutputTokens: 100,
providerOptions: ProviderTransform.providerOptions(small, options),
providerOptions: ProviderTransform.providerOptions(small, options, []),
messages: [
...SystemPrompt.summarize(small.providerID).map(
(x): ModelMessage => ({