diff --git a/packages/opencode/test/fixture/fixture.ts b/packages/opencode/test/fixture/fixture.ts
index a36a3f9d84..96f5e3f084 100644
--- a/packages/opencode/test/fixture/fixture.ts
+++ b/packages/opencode/test/fixture/fixture.ts
@@ -3,9 +3,12 @@ import * as fs from "fs/promises"
import os from "os"
import path from "path"
import { Effect, FileSystem, ServiceMap } from "effect"
+import type * as PlatformError from "effect/PlatformError"
+import type * as Scope from "effect/Scope"
import { ChildProcess, ChildProcessSpawner } from "effect/unstable/process"
import type { Config } from "../../src/config/config"
import { Instance } from "../../src/project/instance"
+import { TestLLMServer } from "../lib/llm-server"
// Strip null bytes from paths (defensive fix for CI environment issues)
function sanitizePath(p: string): string {
@@ -139,3 +142,20 @@ export function provideTmpdirInstance(
return yield* self(path).pipe(provideInstance(path))
})
}
+
+export function provideTmpdirServer(
+ self: (input: { dir: string; llm: TestLLMServer["Service"] }) => Effect.Effect,
+ options?: { git?: boolean; config?: (url: string) => Partial },
+): Effect.Effect<
+ A,
+ E | PlatformError.PlatformError,
+ R | TestLLMServer | FileSystem.FileSystem | ChildProcessSpawner.ChildProcessSpawner | Scope.Scope
+> {
+ return Effect.gen(function* () {
+ const llm = yield* TestLLMServer
+ return yield* provideTmpdirInstance((dir) => self({ dir, llm }), {
+ git: options?.git,
+ config: options?.config?.(llm.url),
+ })
+ })
+}
diff --git a/packages/opencode/test/lib/llm-server.ts b/packages/opencode/test/lib/llm-server.ts
index c4a8722ee7..fad97bbd5b 100644
--- a/packages/opencode/test/lib/llm-server.ts
+++ b/packages/opencode/test/lib/llm-server.ts
@@ -14,6 +14,13 @@ type Step =
tool: string
input: unknown
}
+ | {
+ type: "fail"
+ message: string
+ }
+ | {
+ type: "hang"
+ }
type Hit = {
url: URL
@@ -105,16 +112,34 @@ function tool(step: Extract, seq: number) {
])
}
-export class TestLLMServer extends ServiceMap.Service<
- TestLLMServer,
- {
+function fail(step: Extract) {
+ return HttpServerResponse.text(step.message, { status: 500 })
+}
+
+function hang() {
+ return HttpServerResponse.stream(
+ Stream.fromIterable([
+ 'data: {"id":"chatcmpl-test","object":"chat.completion.chunk","choices":[{"delta":{"role":"assistant"}}]}\n\n',
+ ]).pipe(Stream.encodeText, Stream.concat(Stream.never)),
+ { contentType: "text/event-stream" },
+ )
+}
+
+namespace TestLLMServer {
+ export interface Service {
readonly url: string
readonly text: (value: string) => Effect.Effect
readonly tool: (tool: string, input: unknown) => Effect.Effect
+ readonly fail: (message?: string) => Effect.Effect
+ readonly hang: Effect.Effect
readonly hits: Effect.Effect
+ readonly calls: Effect.Effect
+ readonly inputs: Effect.Effect[]>
readonly pending: Effect.Effect
}
->()("@test/LLMServer") {
+}
+
+export class TestLLMServer extends ServiceMap.Service()("@test/LLMServer") {
static readonly layer = Layer.effect(
TestLLMServer,
Effect.gen(function* () {
@@ -153,7 +178,9 @@ export class TestLLMServer extends ServiceMap.Service<
},
]
if (next.step.type === "text") return text(next.step)
- return tool(next.step, next.seq)
+ if (next.step.type === "tool") return tool(next.step, next.seq)
+ if (next.step.type === "fail") return fail(next.step)
+ return hang()
}),
)
@@ -170,7 +197,15 @@ export class TestLLMServer extends ServiceMap.Service<
tool: Effect.fn("TestLLMServer.tool")(function* (tool: string, input: unknown) {
push({ type: "tool", tool, input })
}),
+ fail: Effect.fn("TestLLMServer.fail")(function* (message = "boom") {
+ push({ type: "fail", message })
+ }),
+ hang: Effect.gen(function* () {
+ push({ type: "hang" })
+ }).pipe(Effect.withSpan("TestLLMServer.hang")),
hits: Effect.sync(() => [...hits]),
+ calls: Effect.sync(() => hits.length),
+ inputs: Effect.sync(() => hits.map((hit) => hit.body)),
pending: Effect.sync(() => list.length),
})
}),
diff --git a/packages/opencode/test/session/prompt-effect.test.ts b/packages/opencode/test/session/prompt-effect.test.ts
index 0db9b760cf..847d00f961 100644
--- a/packages/opencode/test/session/prompt-effect.test.ts
+++ b/packages/opencode/test/session/prompt-effect.test.ts
@@ -31,7 +31,7 @@ import { ToolRegistry } from "../../src/tool/registry"
import { Truncate } from "../../src/tool/truncate"
import { Log } from "../../src/util/log"
import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner"
-import { provideTmpdirInstance } from "../fixture/fixture"
+import { provideTmpdirInstance, provideTmpdirServer } from "../fixture/fixture"
import { testEffect } from "../lib/effect"
import { TestLLMServer } from "../lib/llm-server"
@@ -451,36 +451,32 @@ it.live("loop exits immediately when last assistant has stop finish", () =>
)
http.live("loop calls LLM and returns assistant message", () =>
- Effect.gen(function* () {
- const llm = yield* TestLLMServer
- return yield* provideTmpdirInstance(
- () =>
- Effect.gen(function* () {
- const chat = yield* Effect.promise(() =>
- Session.create({
- title: "Pinned",
- permission: [{ permission: "*", pattern: "*", action: "allow" }],
- }),
- )
- yield* Effect.promise(() =>
- SessionPrompt.prompt({
- sessionID: chat.id,
- agent: "build",
- noReply: true,
- parts: [{ type: "text", text: "hello" }],
- }),
- )
- yield* llm.text("world")
-
- const result = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: chat.id }))
- expect(result.info.role).toBe("assistant")
- const parts = result.parts.filter((p) => p.type === "text")
- expect(parts.some((p) => p.type === "text" && p.text === "world")).toBe(true)
- expect(yield* llm.hits).toHaveLength(1)
+ provideTmpdirServer(
+ Effect.fnUntraced(function* ({ llm }: { dir: string; llm: TestLLMServer["Service"] }) {
+ const chat = yield* Effect.promise(() =>
+ Session.create({
+ title: "Pinned",
+ permission: [{ permission: "*", pattern: "*", action: "allow" }],
}),
- { git: true, config: providerCfg(llm.url) },
- )
- }),
+ )
+ yield* Effect.promise(() =>
+ SessionPrompt.prompt({
+ sessionID: chat.id,
+ agent: "build",
+ noReply: true,
+ parts: [{ type: "text", text: "hello" }],
+ }),
+ )
+ yield* llm.text("world")
+
+ const result = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: chat.id }))
+ expect(result.info.role).toBe("assistant")
+ const parts = result.parts.filter((p) => p.type === "text")
+ expect(parts.some((p) => p.type === "text" && p.text === "world")).toBe(true)
+ expect(yield* llm.hits).toHaveLength(1)
+ }),
+ { git: true, config: providerCfg },
+ ),
)
it.live("loop continues when finish is tool-calls", () =>
@@ -1039,74 +1035,82 @@ unix(
30_000,
)
-unix(
+http.live(
"loop waits while shell runs and starts after shell exits",
() =>
- provideTmpdirInstance(
- (dir) =>
- Effect.gen(function* () {
- const { test, prompt, chat } = yield* boot()
- yield* test.reply(...replyStop("after-shell"))
+ provideTmpdirServer(
+ Effect.fnUntraced(function* ({ llm }: { dir: string; llm: TestLLMServer["Service"] }) {
+ const chat = yield* Effect.promise(() =>
+ Session.create({
+ title: "Pinned",
+ permission: [{ permission: "*", pattern: "*", action: "allow" }],
+ }),
+ )
+ yield* llm.text("after-shell")
- const sh = yield* prompt
- .shell({ sessionID: chat.id, agent: "build", command: "sleep 0.2" })
- .pipe(Effect.forkChild)
- yield* waitMs(50)
+ const sh = yield* Effect.promise(() =>
+ SessionPrompt.shell({ sessionID: chat.id, agent: "build", command: "sleep 0.2" }),
+ ).pipe(Effect.forkChild)
+ yield* waitMs(50)
- const run = yield* prompt.loop({ sessionID: chat.id }).pipe(Effect.forkChild)
- yield* waitMs(50)
+ const run = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: chat.id })).pipe(Effect.forkChild)
+ yield* waitMs(50)
- expect(yield* test.calls).toBe(0)
+ expect(yield* llm.calls).toBe(0)
- yield* Fiber.await(sh)
- const exit = yield* Fiber.await(run)
+ yield* Fiber.await(sh)
+ const exit = yield* Fiber.await(run)
- expect(Exit.isSuccess(exit)).toBe(true)
- if (Exit.isSuccess(exit)) {
- expect(exit.value.info.role).toBe("assistant")
- expect(exit.value.parts.some((part) => part.type === "text" && part.text === "after-shell")).toBe(true)
- }
- expect(yield* test.calls).toBe(1)
- }),
- { git: true, config: cfg },
+ expect(Exit.isSuccess(exit)).toBe(true)
+ if (Exit.isSuccess(exit)) {
+ expect(exit.value.info.role).toBe("assistant")
+ expect(exit.value.parts.some((part) => part.type === "text" && part.text === "after-shell")).toBe(true)
+ }
+ expect(yield* llm.calls).toBe(1)
+ }),
+ { git: true, config: providerCfg },
),
- 30_000,
+ 5_000,
)
-unix(
+http.live(
"shell completion resumes queued loop callers",
() =>
- provideTmpdirInstance(
- (dir) =>
- Effect.gen(function* () {
- const { test, prompt, chat } = yield* boot()
- yield* test.reply(...replyStop("done"))
+ provideTmpdirServer(
+ Effect.fnUntraced(function* ({ llm }: { dir: string; llm: TestLLMServer["Service"] }) {
+ const chat = yield* Effect.promise(() =>
+ Session.create({
+ title: "Pinned",
+ permission: [{ permission: "*", pattern: "*", action: "allow" }],
+ }),
+ )
+ yield* llm.text("done")
- const sh = yield* prompt
- .shell({ sessionID: chat.id, agent: "build", command: "sleep 0.2" })
- .pipe(Effect.forkChild)
- yield* waitMs(50)
+ const sh = yield* Effect.promise(() =>
+ SessionPrompt.shell({ sessionID: chat.id, agent: "build", command: "sleep 0.2" }),
+ ).pipe(Effect.forkChild)
+ yield* waitMs(50)
- const a = yield* prompt.loop({ sessionID: chat.id }).pipe(Effect.forkChild)
- const b = yield* prompt.loop({ sessionID: chat.id }).pipe(Effect.forkChild)
- yield* waitMs(50)
+ const a = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: chat.id })).pipe(Effect.forkChild)
+ const b = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: chat.id })).pipe(Effect.forkChild)
+ yield* waitMs(50)
- expect(yield* test.calls).toBe(0)
+ expect(yield* llm.calls).toBe(0)
- yield* Fiber.await(sh)
- const [ea, eb] = yield* Effect.all([Fiber.await(a), Fiber.await(b)])
+ yield* Fiber.await(sh)
+ const [ea, eb] = yield* Effect.all([Fiber.await(a), Fiber.await(b)])
- expect(Exit.isSuccess(ea)).toBe(true)
- expect(Exit.isSuccess(eb)).toBe(true)
- if (Exit.isSuccess(ea) && Exit.isSuccess(eb)) {
- expect(ea.value.info.id).toBe(eb.value.info.id)
- expect(ea.value.info.role).toBe("assistant")
- }
- expect(yield* test.calls).toBe(1)
- }),
- { git: true, config: cfg },
+ expect(Exit.isSuccess(ea)).toBe(true)
+ expect(Exit.isSuccess(eb)).toBe(true)
+ if (Exit.isSuccess(ea) && Exit.isSuccess(eb)) {
+ expect(ea.value.info.id).toBe(eb.value.info.id)
+ expect(ea.value.info.role).toBe("assistant")
+ }
+ expect(yield* llm.calls).toBe(1)
+ }),
+ { git: true, config: providerCfg },
),
- 30_000,
+ 5_000,
)
unix(
diff --git a/packages/opencode/test/session/prompt-provider.test.ts b/packages/opencode/test/session/prompt-provider.test.ts
index 0cf90f2bd5..a7d2fa32f5 100644
--- a/packages/opencode/test/session/prompt-provider.test.ts
+++ b/packages/opencode/test/session/prompt-provider.test.ts
@@ -6,7 +6,7 @@ import { Session } from "../../src/session"
import { SessionPrompt } from "../../src/session/prompt"
import { Log } from "../../src/util/log"
import { testEffect } from "../lib/effect"
-import { provideTmpdirInstance } from "../fixture/fixture"
+import { provideTmpdirServer } from "../fixture/fixture"
import { TestLLMServer } from "../lib/llm-server"
import { Layer } from "effect"
@@ -53,88 +53,80 @@ function makeConfig(url: string) {
describe("session.prompt provider integration", () => {
it.live("loop returns assistant text through local provider", () =>
- Effect.gen(function* () {
- const llm = yield* TestLLMServer
- return yield* provideTmpdirInstance(
- () =>
- Effect.gen(function* () {
- const session = yield* Effect.promise(() =>
- Session.create({
- title: "Prompt provider",
- permission: [{ permission: "*", pattern: "*", action: "allow" }],
- }),
- )
-
- yield* Effect.promise(() =>
- SessionPrompt.prompt({
- sessionID: session.id,
- agent: "build",
- noReply: true,
- parts: [{ type: "text", text: "hello" }],
- }),
- )
-
- yield* llm.text("world")
-
- const result = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
- expect(result.info.role).toBe("assistant")
- expect(result.parts.some((part) => part.type === "text" && part.text === "world")).toBe(true)
- expect(yield* llm.hits).toHaveLength(1)
- expect(yield* llm.pending).toBe(0)
+ provideTmpdirServer(
+ Effect.fnUntraced(function* ({ llm }: { dir: string; llm: TestLLMServer["Service"] }) {
+ const session = yield* Effect.promise(() =>
+ Session.create({
+ title: "Prompt provider",
+ permission: [{ permission: "*", pattern: "*", action: "allow" }],
}),
- { git: true, config: makeConfig(llm.url) },
- )
- }),
+ )
+
+ yield* Effect.promise(() =>
+ SessionPrompt.prompt({
+ sessionID: session.id,
+ agent: "build",
+ noReply: true,
+ parts: [{ type: "text", text: "hello" }],
+ }),
+ )
+
+ yield* llm.text("world")
+
+ const result = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
+ expect(result.info.role).toBe("assistant")
+ expect(result.parts.some((part) => part.type === "text" && part.text === "world")).toBe(true)
+ expect(yield* llm.hits).toHaveLength(1)
+ expect(yield* llm.pending).toBe(0)
+ }),
+ { git: true, config: makeConfig },
+ ),
)
it.live("loop consumes queued replies across turns", () =>
- Effect.gen(function* () {
- const llm = yield* TestLLMServer
- return yield* provideTmpdirInstance(
- () =>
- Effect.gen(function* () {
- const session = yield* Effect.promise(() =>
- Session.create({
- title: "Prompt provider turns",
- permission: [{ permission: "*", pattern: "*", action: "allow" }],
- }),
- )
-
- yield* Effect.promise(() =>
- SessionPrompt.prompt({
- sessionID: session.id,
- agent: "build",
- noReply: true,
- parts: [{ type: "text", text: "hello one" }],
- }),
- )
-
- yield* llm.text("world one")
-
- const first = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
- expect(first.info.role).toBe("assistant")
- expect(first.parts.some((part) => part.type === "text" && part.text === "world one")).toBe(true)
-
- yield* Effect.promise(() =>
- SessionPrompt.prompt({
- sessionID: session.id,
- agent: "build",
- noReply: true,
- parts: [{ type: "text", text: "hello two" }],
- }),
- )
-
- yield* llm.text("world two")
-
- const second = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
- expect(second.info.role).toBe("assistant")
- expect(second.parts.some((part) => part.type === "text" && part.text === "world two")).toBe(true)
-
- expect(yield* llm.hits).toHaveLength(2)
- expect(yield* llm.pending).toBe(0)
+ provideTmpdirServer(
+ Effect.fnUntraced(function* ({ llm }: { dir: string; llm: TestLLMServer["Service"] }) {
+ const session = yield* Effect.promise(() =>
+ Session.create({
+ title: "Prompt provider turns",
+ permission: [{ permission: "*", pattern: "*", action: "allow" }],
}),
- { git: true, config: makeConfig(llm.url) },
- )
- }),
+ )
+
+ yield* Effect.promise(() =>
+ SessionPrompt.prompt({
+ sessionID: session.id,
+ agent: "build",
+ noReply: true,
+ parts: [{ type: "text", text: "hello one" }],
+ }),
+ )
+
+ yield* llm.text("world one")
+
+ const first = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
+ expect(first.info.role).toBe("assistant")
+ expect(first.parts.some((part) => part.type === "text" && part.text === "world one")).toBe(true)
+
+ yield* Effect.promise(() =>
+ SessionPrompt.prompt({
+ sessionID: session.id,
+ agent: "build",
+ noReply: true,
+ parts: [{ type: "text", text: "hello two" }],
+ }),
+ )
+
+ yield* llm.text("world two")
+
+ const second = yield* Effect.promise(() => SessionPrompt.loop({ sessionID: session.id }))
+ expect(second.info.role).toBe("assistant")
+ expect(second.parts.some((part) => part.type === "text" && part.text === "world two")).toBe(true)
+
+ expect(yield* llm.hits).toHaveLength(2)
+ expect(yield* llm.pending).toBe(0)
+ }),
+ { git: true, config: makeConfig },
+ ),
)
})