diff --git a/packages/opencode/specs/effect-migration.md b/packages/opencode/specs/effect-migration.md index 9f862d3b9a..60f4d5802f 100644 --- a/packages/opencode/specs/effect-migration.md +++ b/packages/opencode/specs/effect-migration.md @@ -235,6 +235,22 @@ Once individual tools are effectified, change `Tool.Info` (`tool/tool.ts`) so `i 2. Update `Tool.define()` factory to work with Effects 3. Update `SessionPrompt` to `yield*` tool results instead of `await`ing +### Tool migration details + +Until the tool interface itself returns `Effect`, use this transitional pattern for migrated tools: + +- `Tool.defineEffect(...)` should `yield*` the services the tool depends on and close over them in the returned tool definition. +- Keep the bridge at the Promise boundary only. In the temporary `async execute(...)` implementation, call service methods with `await Effect.runPromise(...)` instead of falling back to static async facades. +- If a tool starts requiring new services, wire them into `ToolRegistry.defaultLayer` so production callers resolve the same dependencies as tests. + +Tool tests should use the existing Effect helpers in `packages/opencode/test/lib/effect.ts`: + +- Use `testEffect(...)` / `it.live(...)` instead of creating fake local wrappers around effectful tools. +- Yield the real tool export, then initialize it: `const info = yield* ReadTool`, `const tool = yield* Effect.promise(() => info.init())`. +- Run tests inside a real instance with `provideTmpdirInstance(...)` or `provideInstance(tmpdirScoped(...))` so instance-scoped services resolve exactly as they do in production. + +This keeps migrated tool tests aligned with the production service graph today, and makes the eventual `Tool.Info` → `Effect` cleanup mostly mechanical later. + Individual tools, ordered by value: - [ ] `apply_patch.ts` — HIGH: multi-step orchestration, error accumulation, Bus events diff --git a/packages/opencode/src/tool/read.ts b/packages/opencode/src/tool/read.ts index 1c61cab286..e9890c91a1 100644 --- a/packages/opencode/src/tool/read.ts +++ b/packages/opencode/src/tool/read.ts @@ -1,10 +1,11 @@ import z from "zod" import { Effect } from "effect" import { createReadStream } from "fs" -import * as fs from "fs/promises" +import { open } from "fs/promises" import * as path from "path" import { createInterface } from "readline" import { Tool } from "./tool" +import { AppFileSystem } from "../filesystem" import { LSP } from "../lsp" import { FileTime } from "../file/time" import DESCRIPTION from "./read.txt" @@ -27,217 +28,228 @@ const parameters = z.object({ export const ReadTool = Tool.defineEffect( "read", - Effect.succeed({ - description: DESCRIPTION, - parameters, - async execute(params: z.infer, ctx) { - if (params.offset !== undefined && params.offset < 1) { - throw new Error("offset must be greater than or equal to 1") - } - let filepath = params.filePath - if (!path.isAbsolute(filepath)) { - filepath = path.resolve(Instance.directory, filepath) - } - if (process.platform === "win32") { - filepath = Filesystem.normalizePath(filepath) - } - const title = path.relative(Instance.worktree, filepath) + Effect.gen(function* () { + const fs = yield* AppFileSystem.Service + const instruction = yield* Instruction.Service + const lsp = yield* LSP.Service + const time = yield* FileTime.Service - const stat = Filesystem.stat(filepath) + return { + description: DESCRIPTION, + parameters, + async execute(params: z.infer, ctx) { + if (params.offset !== undefined && params.offset < 1) { + throw new Error("offset must be greater than or equal to 1") + } + let filepath = params.filePath + if (!path.isAbsolute(filepath)) { + filepath = path.resolve(Instance.directory, filepath) + } + if (process.platform === "win32") { + filepath = Filesystem.normalizePath(filepath) + } + const title = path.relative(Instance.worktree, filepath) - await assertExternalDirectory(ctx, filepath, { - bypass: Boolean(ctx.extra?.["bypassCwdCheck"]), - kind: stat?.isDirectory() ? "directory" : "file", - }) + const stat = await Effect.runPromise(fs.stat(filepath).pipe(Effect.catch(() => Effect.succeed(undefined)))) - await ctx.ask({ - permission: "read", - patterns: [filepath], - always: ["*"], - metadata: {}, - }) + await assertExternalDirectory(ctx, filepath, { + bypass: Boolean(ctx.extra?.["bypassCwdCheck"]), + kind: stat?.type === "Directory" ? "directory" : "file", + }) - if (!stat) { - const dir = path.dirname(filepath) - const base = path.basename(filepath) + await ctx.ask({ + permission: "read", + patterns: [filepath], + always: ["*"], + metadata: {}, + }) - const suggestions = await fs - .readdir(dir) - .then((entries) => - entries - .filter( - (entry) => - entry.toLowerCase().includes(base.toLowerCase()) || base.toLowerCase().includes(entry.toLowerCase()), - ) - .map((entry) => path.join(dir, entry)) - .slice(0, 3), + if (!stat) { + const dir = path.dirname(filepath) + const base = path.basename(filepath) + + const suggestions = await Effect.runPromise( + fs.readDirectory(dir).pipe( + Effect.map((entries) => + entries + .filter( + (entry) => + entry.toLowerCase().includes(base.toLowerCase()) || + base.toLowerCase().includes(entry.toLowerCase()), + ) + .map((entry) => path.join(dir, entry)) + .slice(0, 3), + ), + Effect.catch(() => Effect.succeed([] as string[])), + ), ) - .catch(() => []) - if (suggestions.length > 0) { - throw new Error(`File not found: ${filepath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`) + if (suggestions.length > 0) { + throw new Error(`File not found: ${filepath}\n\nDid you mean one of these?\n${suggestions.join("\n")}`) + } + + throw new Error(`File not found: ${filepath}`) } - throw new Error(`File not found: ${filepath}`) - } + if (stat.type === "Directory") { + const dirents = await Effect.runPromise(fs.readDirectoryEntries(filepath)) + const entries = await Promise.all( + dirents.map(async (dirent) => { + if (dirent.type === "directory") return dirent.name + "/" + if (dirent.type === "symlink") { + const target = await Effect.runPromise( + fs.stat(path.join(filepath, dirent.name)).pipe(Effect.catch(() => Effect.succeed(undefined))), + ) + if (target?.type === "Directory") return dirent.name + "/" + } + return dirent.name + }), + ) + entries.sort((a, b) => a.localeCompare(b)) - if (stat.isDirectory()) { - const dirents = await fs.readdir(filepath, { withFileTypes: true }) - const entries = await Promise.all( - dirents.map(async (dirent) => { - if (dirent.isDirectory()) return dirent.name + "/" - if (dirent.isSymbolicLink()) { - const target = await fs.stat(path.join(filepath, dirent.name)).catch(() => undefined) - if (target?.isDirectory()) return dirent.name + "/" - } - return dirent.name - }), - ) - entries.sort((a, b) => a.localeCompare(b)) + const limit = params.limit ?? DEFAULT_READ_LIMIT + const offset = params.offset ?? 1 + const start = offset - 1 + const sliced = entries.slice(start, start + limit) + const truncated = start + sliced.length < entries.length + + const output = [ + `${filepath}`, + `directory`, + ``, + sliced.join("\n"), + truncated + ? `\n(Showing ${sliced.length} of ${entries.length} entries. Use 'offset' parameter to read beyond entry ${offset + sliced.length})` + : `\n(${entries.length} entries)`, + ``, + ].join("\n") + + return { + title, + output, + metadata: { + preview: sliced.slice(0, 20).join("\n"), + truncated, + loaded: [] as string[], + }, + } + } + + const instructions = await Effect.runPromise(instruction.resolve(ctx.messages, filepath, ctx.messageID)) + + // Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files) + const mime = Filesystem.mimeType(filepath) + const isImage = mime.startsWith("image/") && mime !== "image/svg+xml" && mime !== "image/vnd.fastbidsheet" + const isPdf = mime === "application/pdf" + if (isImage || isPdf) { + const msg = `${isImage ? "Image" : "PDF"} read successfully` + return { + title, + output: msg, + metadata: { + preview: msg, + truncated: false, + loaded: instructions.map((i) => i.filepath), + }, + attachments: [ + { + type: "file", + mime, + url: `data:${mime};base64,${Buffer.from(await Effect.runPromise(fs.readFile(filepath))).toString("base64")}`, + }, + ], + } + } + + const isBinary = await isBinaryFile(filepath, Number(stat.size)) + if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`) + + const stream = createReadStream(filepath, { encoding: "utf8" }) + const rl = createInterface({ + input: stream, + // Note: we use the crlfDelay option to recognize all instances of CR LF + // ('\r\n') in file as a single line break. + crlfDelay: Infinity, + }) const limit = params.limit ?? DEFAULT_READ_LIMIT const offset = params.offset ?? 1 const start = offset - 1 - const sliced = entries.slice(start, start + limit) - const truncated = start + sliced.length < entries.length + const raw: string[] = [] + let bytes = 0 + let lines = 0 + let truncatedByBytes = false + let hasMoreLines = false + try { + for await (const text of rl) { + lines += 1 + if (lines <= start) continue - const output = [ - `${filepath}`, - `directory`, - ``, - sliced.join("\n"), - truncated - ? `\n(Showing ${sliced.length} of ${entries.length} entries. Use 'offset' parameter to read beyond entry ${offset + sliced.length})` - : `\n(${entries.length} entries)`, - ``, - ].join("\n") + if (raw.length >= limit) { + hasMoreLines = true + continue + } + + const line = text.length > MAX_LINE_LENGTH ? text.substring(0, MAX_LINE_LENGTH) + MAX_LINE_SUFFIX : text + const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0) + if (bytes + size > MAX_BYTES) { + truncatedByBytes = true + hasMoreLines = true + break + } + + raw.push(line) + bytes += size + } + } finally { + rl.close() + stream.destroy() + } + + if (lines < offset && !(lines === 0 && offset === 1)) { + throw new Error(`Offset ${offset} is out of range for this file (${lines} lines)`) + } + + const content = raw.map((line, index) => { + return `${index + offset}: ${line}` + }) + const preview = raw.slice(0, 20).join("\n") + + let output = [`${filepath}`, `file`, ""].join("\n") + output += content.join("\n") + + const totalLines = lines + const lastReadLine = offset + raw.length - 1 + const nextOffset = lastReadLine + 1 + const truncated = hasMoreLines || truncatedByBytes + + if (truncatedByBytes) { + output += `\n\n(Output capped at ${MAX_BYTES_LABEL}. Showing lines ${offset}-${lastReadLine}. Use offset=${nextOffset} to continue.)` + } else if (hasMoreLines) { + output += `\n\n(Showing lines ${offset}-${lastReadLine} of ${totalLines}. Use offset=${nextOffset} to continue.)` + } else { + output += `\n\n(End of file - total ${totalLines} lines)` + } + output += "\n" + + await Effect.runPromise(lsp.touchFile(filepath, false)) + await Effect.runPromise(time.read(ctx.sessionID, filepath)) + + if (instructions.length > 0) { + output += `\n\n\n${instructions.map((i) => i.content).join("\n\n")}\n` + } return { title, output, metadata: { - preview: sliced.slice(0, 20).join("\n"), + preview, truncated, - loaded: [] as string[], - }, - } - } - - const instructions = await Instruction.resolve(ctx.messages, filepath, ctx.messageID) - - // Exclude SVG (XML-based) and vnd.fastbidsheet (.fbs extension, commonly FlatBuffers schema files) - const mime = Filesystem.mimeType(filepath) - const isImage = mime.startsWith("image/") && mime !== "image/svg+xml" && mime !== "image/vnd.fastbidsheet" - const isPdf = mime === "application/pdf" - if (isImage || isPdf) { - const msg = `${isImage ? "Image" : "PDF"} read successfully` - return { - title, - output: msg, - metadata: { - preview: msg, - truncated: false, loaded: instructions.map((i) => i.filepath), }, - attachments: [ - { - type: "file", - mime, - url: `data:${mime};base64,${Buffer.from(await Filesystem.readBytes(filepath)).toString("base64")}`, - }, - ], } - } - - const isBinary = await isBinaryFile(filepath, Number(stat.size)) - if (isBinary) throw new Error(`Cannot read binary file: ${filepath}`) - - const stream = createReadStream(filepath, { encoding: "utf8" }) - const rl = createInterface({ - input: stream, - // Note: we use the crlfDelay option to recognize all instances of CR LF - // ('\r\n') in file as a single line break. - crlfDelay: Infinity, - }) - - const limit = params.limit ?? DEFAULT_READ_LIMIT - const offset = params.offset ?? 1 - const start = offset - 1 - const raw: string[] = [] - let bytes = 0 - let lines = 0 - let truncatedByBytes = false - let hasMoreLines = false - try { - for await (const text of rl) { - lines += 1 - if (lines <= start) continue - - if (raw.length >= limit) { - hasMoreLines = true - continue - } - - const line = text.length > MAX_LINE_LENGTH ? text.substring(0, MAX_LINE_LENGTH) + MAX_LINE_SUFFIX : text - const size = Buffer.byteLength(line, "utf-8") + (raw.length > 0 ? 1 : 0) - if (bytes + size > MAX_BYTES) { - truncatedByBytes = true - hasMoreLines = true - break - } - - raw.push(line) - bytes += size - } - } finally { - rl.close() - stream.destroy() - } - - if (lines < offset && !(lines === 0 && offset === 1)) { - throw new Error(`Offset ${offset} is out of range for this file (${lines} lines)`) - } - - const content = raw.map((line, index) => { - return `${index + offset}: ${line}` - }) - const preview = raw.slice(0, 20).join("\n") - - let output = [`${filepath}`, `file`, ""].join("\n") - output += content.join("\n") - - const totalLines = lines - const lastReadLine = offset + raw.length - 1 - const nextOffset = lastReadLine + 1 - const truncated = hasMoreLines || truncatedByBytes - - if (truncatedByBytes) { - output += `\n\n(Output capped at ${MAX_BYTES_LABEL}. Showing lines ${offset}-${lastReadLine}. Use offset=${nextOffset} to continue.)` - } else if (hasMoreLines) { - output += `\n\n(Showing lines ${offset}-${lastReadLine} of ${totalLines}. Use offset=${nextOffset} to continue.)` - } else { - output += `\n\n(End of file - total ${totalLines} lines)` - } - output += "\n" - - // just warms the lsp client - await LSP.touchFile(filepath, false) - await FileTime.read(ctx.sessionID, filepath) - - if (instructions.length > 0) { - output += `\n\n\n${instructions.map((i) => i.content).join("\n\n")}\n` - } - - return { - title, - output, - metadata: { - preview, - truncated, - loaded: instructions.map((i) => i.filepath), - }, - } - }, + }, + } }), ) @@ -280,7 +292,7 @@ async function isBinaryFile(filepath: string, fileSize: number): Promise()("@opencode/ToolRegistry") {} - export const layer: Layer.Layer = - Layer.effect( - Service, - Effect.gen(function* () { - const config = yield* Config.Service - const plugin = yield* Plugin.Service + export const layer: Layer.Layer< + Service, + never, + | Config.Service + | Plugin.Service + | Question.Service + | Todo.Service + | LSP.Service + | FileTime.Service + | Instruction.Service + | AppFileSystem.Service + > = Layer.effect( + Service, + Effect.gen(function* () { + const config = yield* Config.Service + const plugin = yield* Plugin.Service - const build = (tool: T | Effect.Effect) => - Effect.isEffect(tool) ? tool : Effect.succeed(tool) + const build = (tool: T | Effect.Effect) => + Effect.isEffect(tool) ? tool : Effect.succeed(tool) - const state = yield* InstanceState.make( - Effect.fn("ToolRegistry.state")(function* (ctx) { - const custom: Tool.Info[] = [] + const state = yield* InstanceState.make( + Effect.fn("ToolRegistry.state")(function* (ctx) { + const custom: Tool.Info[] = [] - function fromPlugin(id: string, def: ToolDefinition): Tool.Info { - return { - id, - init: async (initCtx) => ({ - parameters: z.object(def.args), - description: def.description, - execute: async (args, toolCtx) => { - const pluginCtx = { - ...toolCtx, - directory: ctx.directory, - worktree: ctx.worktree, - } as unknown as PluginToolContext - const result = await def.execute(args as any, pluginCtx) - const out = await Truncate.output(result, {}, initCtx?.agent) - return { - title: "", - output: out.truncated ? out.content : result, - metadata: { truncated: out.truncated, outputPath: out.truncated ? out.outputPath : undefined }, - } - }, - }), - } + function fromPlugin(id: string, def: ToolDefinition): Tool.Info { + return { + id, + init: async (initCtx) => ({ + parameters: z.object(def.args), + description: def.description, + execute: async (args, toolCtx) => { + const pluginCtx = { + ...toolCtx, + directory: ctx.directory, + worktree: ctx.worktree, + } as unknown as PluginToolContext + const result = await def.execute(args as any, pluginCtx) + const out = await Truncate.output(result, {}, initCtx?.agent) + return { + title: "", + output: out.truncated ? out.content : result, + metadata: { truncated: out.truncated, outputPath: out.truncated ? out.outputPath : undefined }, + } + }, + }), } + } - const dirs = yield* config.directories() - const matches = dirs.flatMap((dir) => - Glob.scanSync("{tool,tools}/*.{js,ts}", { cwd: dir, absolute: true, dot: true, symlink: true }), - ) - if (matches.length) yield* config.waitForDependencies() - for (const match of matches) { - const namespace = path.basename(match, path.extname(match)) - const mod = yield* Effect.promise( - () => import(process.platform === "win32" ? match : pathToFileURL(match).href), - ) - for (const [id, def] of Object.entries(mod)) { - custom.push(fromPlugin(id === "default" ? namespace : `${namespace}_${id}`, def)) - } - } - - const plugins = yield* plugin.list() - for (const p of plugins) { - for (const [id, def] of Object.entries(p.tool ?? {})) { - custom.push(fromPlugin(id, def)) - } - } - - return { custom } - }), - ) - - const invalid = yield* build(InvalidTool) - const ask = yield* build(QuestionTool) - const bash = yield* build(BashTool) - const read = yield* build(ReadTool) - const glob = yield* build(GlobTool) - const grep = yield* build(GrepTool) - const edit = yield* build(EditTool) - const write = yield* build(WriteTool) - const task = yield* build(TaskTool) - const fetch = yield* build(WebFetchTool) - const todo = yield* build(TodoWriteTool) - const search = yield* build(WebSearchTool) - const code = yield* build(CodeSearchTool) - const skill = yield* build(SkillTool) - const patch = yield* build(ApplyPatchTool) - const lsp = yield* build(LspTool) - const batch = yield* build(BatchTool) - const plan = yield* build(PlanExitTool) - - const all = Effect.fn("ToolRegistry.all")(function* (custom: Tool.Info[]) { - const cfg = yield* config.get() - const question = - ["app", "cli", "desktop"].includes(Flag.OPENCODE_CLIENT) || Flag.OPENCODE_ENABLE_QUESTION_TOOL - - return [ - invalid, - ...(question ? [ask] : []), - bash, - read, - glob, - grep, - edit, - write, - task, - fetch, - todo, - search, - code, - skill, - patch, - ...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [lsp] : []), - ...(cfg.experimental?.batch_tool === true ? [batch] : []), - ...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [plan] : []), - ...custom, - ] - }) - - const ids = Effect.fn("ToolRegistry.ids")(function* () { - const s = yield* InstanceState.get(state) - const tools = yield* all(s.custom) - return tools.map((t) => t.id) - }) - - const tools = Effect.fn("ToolRegistry.tools")(function* ( - model: { providerID: ProviderID; modelID: ModelID }, - agent?: Agent.Info, - ) { - const s = yield* InstanceState.get(state) - const allTools = yield* all(s.custom) - const filtered = allTools.filter((tool) => { - if (tool.id === "codesearch" || tool.id === "websearch") { - return model.providerID === ProviderID.opencode || Flag.OPENCODE_ENABLE_EXA - } - - const usePatch = - !!Env.get("OPENCODE_E2E_LLM_URL") || - (model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4")) - if (tool.id === "apply_patch") return usePatch - if (tool.id === "edit" || tool.id === "write") return !usePatch - - return true - }) - return yield* Effect.forEach( - filtered, - Effect.fnUntraced(function* (tool: Tool.Info) { - using _ = log.time(tool.id) - const next = yield* Effect.promise(() => tool.init({ agent })) - const output = { - description: next.description, - parameters: next.parameters, - } - yield* plugin.trigger("tool.definition", { toolID: tool.id }, output) - return { - id: tool.id, - description: output.description, - parameters: output.parameters, - execute: next.execute, - formatValidationError: next.formatValidationError, - } - }), - { concurrency: "unbounded" }, + const dirs = yield* config.directories() + const matches = dirs.flatMap((dir) => + Glob.scanSync("{tool,tools}/*.{js,ts}", { cwd: dir, absolute: true, dot: true, symlink: true }), ) - }) + if (matches.length) yield* config.waitForDependencies() + for (const match of matches) { + const namespace = path.basename(match, path.extname(match)) + const mod = yield* Effect.promise( + () => import(process.platform === "win32" ? match : pathToFileURL(match).href), + ) + for (const [id, def] of Object.entries(mod)) { + custom.push(fromPlugin(id === "default" ? namespace : `${namespace}_${id}`, def)) + } + } - return Service.of({ ids, named: { task, read }, tools }) - }), - ) + const plugins = yield* plugin.list() + for (const p of plugins) { + for (const [id, def] of Object.entries(p.tool ?? {})) { + custom.push(fromPlugin(id, def)) + } + } + + return { custom } + }), + ) + + const invalid = yield* build(InvalidTool) + const ask = yield* build(QuestionTool) + const bash = yield* build(BashTool) + const read = yield* build(ReadTool) + const glob = yield* build(GlobTool) + const grep = yield* build(GrepTool) + const edit = yield* build(EditTool) + const write = yield* build(WriteTool) + const task = yield* build(TaskTool) + const fetch = yield* build(WebFetchTool) + const todo = yield* build(TodoWriteTool) + const search = yield* build(WebSearchTool) + const code = yield* build(CodeSearchTool) + const skill = yield* build(SkillTool) + const patch = yield* build(ApplyPatchTool) + const lsp = yield* build(LspTool) + const batch = yield* build(BatchTool) + const plan = yield* build(PlanExitTool) + + const all = Effect.fn("ToolRegistry.all")(function* (custom: Tool.Info[]) { + const cfg = yield* config.get() + const question = ["app", "cli", "desktop"].includes(Flag.OPENCODE_CLIENT) || Flag.OPENCODE_ENABLE_QUESTION_TOOL + + return [ + invalid, + ...(question ? [ask] : []), + bash, + read, + glob, + grep, + edit, + write, + task, + fetch, + todo, + search, + code, + skill, + patch, + ...(Flag.OPENCODE_EXPERIMENTAL_LSP_TOOL ? [lsp] : []), + ...(cfg.experimental?.batch_tool === true ? [batch] : []), + ...(Flag.OPENCODE_EXPERIMENTAL_PLAN_MODE && Flag.OPENCODE_CLIENT === "cli" ? [plan] : []), + ...custom, + ] + }) + + const ids = Effect.fn("ToolRegistry.ids")(function* () { + const s = yield* InstanceState.get(state) + const tools = yield* all(s.custom) + return tools.map((t) => t.id) + }) + + const tools = Effect.fn("ToolRegistry.tools")(function* ( + model: { providerID: ProviderID; modelID: ModelID }, + agent?: Agent.Info, + ) { + const s = yield* InstanceState.get(state) + const allTools = yield* all(s.custom) + const filtered = allTools.filter((tool) => { + if (tool.id === "codesearch" || tool.id === "websearch") { + return model.providerID === ProviderID.opencode || Flag.OPENCODE_ENABLE_EXA + } + + const usePatch = + !!Env.get("OPENCODE_E2E_LLM_URL") || + (model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4")) + if (tool.id === "apply_patch") return usePatch + if (tool.id === "edit" || tool.id === "write") return !usePatch + + return true + }) + return yield* Effect.forEach( + filtered, + Effect.fnUntraced(function* (tool: Tool.Info) { + using _ = log.time(tool.id) + const next = yield* Effect.promise(() => tool.init({ agent })) + const output = { + description: next.description, + parameters: next.parameters, + } + yield* plugin.trigger("tool.definition", { toolID: tool.id }, output) + return { + id: tool.id, + description: output.description, + parameters: output.parameters, + execute: next.execute, + formatValidationError: next.formatValidationError, + } + }), + { concurrency: "unbounded" }, + ) + }) + + return Service.of({ ids, named: { task, read }, tools }) + }), + ) export const defaultLayer = Layer.unwrap( Effect.sync(() => @@ -226,6 +239,10 @@ export namespace ToolRegistry { Layer.provide(Plugin.defaultLayer), Layer.provide(Question.defaultLayer), Layer.provide(Todo.defaultLayer), + Layer.provide(LSP.defaultLayer), + Layer.provide(FileTime.defaultLayer), + Layer.provide(Instruction.defaultLayer), + Layer.provide(AppFileSystem.defaultLayer), ), ), ) diff --git a/packages/opencode/test/tool/read.test.ts b/packages/opencode/test/tool/read.test.ts index 5bb5b341ac..22762b4e0b 100644 --- a/packages/opencode/test/tool/read.test.ts +++ b/packages/opencode/test/tool/read.test.ts @@ -1,14 +1,20 @@ -import { afterEach, describe, expect, test } from "bun:test" -import { Effect } from "effect" +import { afterEach, describe, expect } from "bun:test" +import { Cause, Effect, Exit, Layer } from "effect" import path from "path" +import { Agent } from "../../src/agent/agent" +import * as CrossSpawnSpawner from "../../src/effect/cross-spawn-spawner" +import { AppFileSystem } from "../../src/filesystem" +import { FileTime } from "../../src/file/time" +import { LSP } from "../../src/lsp" +import { Permission } from "../../src/permission" import { Instance } from "../../src/project/instance" -import { ReadTool as ReadToolFx } from "../../src/tool/read" +import { SessionID, MessageID } from "../../src/session/schema" +import { Instruction } from "../../src/session/instruction" +import { ReadTool } from "../../src/tool/read" import { Tool } from "../../src/tool/tool" import { Filesystem } from "../../src/util/filesystem" -import { tmpdir } from "../fixture/fixture" -import { Permission } from "../../src/permission" -import { Agent } from "../../src/agent/agent" -import { SessionID, MessageID } from "../../src/session/schema" +import { provideInstance, tmpdirScoped } from "../fixture/fixture" +import { testEffect } from "../lib/effect" const FIXTURES_DIR = path.join(import.meta.dir, "fixtures") @@ -27,185 +33,186 @@ const ctx = { ask: async () => {}, } -const ReadTool = { - init: async () => ({ - execute: (args: Tool.InferParameters, ctx: Tool.Context) => - Effect.runPromise( - ReadToolFx.pipe( - Effect.flatMap((tool) => Effect.promise(() => tool.init())), - Effect.flatMap((tool) => Effect.promise(() => tool.execute(args, ctx))), - ), - ), - }), -} +const it = testEffect( + Layer.mergeAll( + Agent.defaultLayer, + AppFileSystem.defaultLayer, + CrossSpawnSpawner.defaultLayer, + FileTime.defaultLayer, + Instruction.defaultLayer, + LSP.defaultLayer, + ), +) + +const init = Effect.fn("ReadToolTest.init")(function* () { + const info = yield* ReadTool + return yield* Effect.promise(() => info.init()) +}) + +const exec = Effect.fn("ReadToolTest.exec")(function* ( + dir: string, + args: Tool.InferParameters, + next: Tool.Context = ctx, +) { + return yield* provideInstance(dir)( + Effect.gen(function* () { + const tool = yield* init() + return yield* Effect.promise(() => tool.execute(args, next)) + }), + ) +}) + +const fail = Effect.fn("ReadToolTest.fail")(function* ( + dir: string, + args: Tool.InferParameters, + next: Tool.Context = ctx, +) { + const exit = yield* exec(dir, args, next).pipe(Effect.exit) + if (Exit.isFailure(exit)) { + const err = Cause.squash(exit.cause) + return err instanceof Error ? err : new Error(String(err)) + } + throw new Error("expected read to fail") +}) const full = (p: string) => (process.platform === "win32" ? Filesystem.normalizePath(p) : p) const glob = (p: string) => process.platform === "win32" ? Filesystem.normalizePathPattern(p) : p.replaceAll("\\", "/") +const put = Effect.fn("ReadToolTest.put")(function* (p: string, content: string | Buffer | Uint8Array) { + const fs = yield* AppFileSystem.Service + yield* fs.writeWithDirs(p, content) +}) +const load = Effect.fn("ReadToolTest.load")(function* (p: string) { + const fs = yield* AppFileSystem.Service + return yield* fs.readFileString(p) +}) describe("tool.read external_directory permission", () => { - test("allows reading absolute path inside project directory", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "test.txt"), "hello world") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "test.txt") }, ctx) - expect(result.output).toContain("hello world") - }, - }) - }) + it.live("allows reading absolute path inside project directory", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "test.txt"), "hello world") - test("allows reading file in subdirectory inside project directory", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "subdir", "test.txt"), "nested content") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "subdir", "test.txt") }, ctx) - expect(result.output).toContain("nested content") - }, - }) - }) + const result = yield* exec(dir, { filePath: path.join(dir, "test.txt") }) + expect(result.output).toContain("hello world") + }), + ) - test("asks for external_directory permission when reading absolute path outside project", async () => { - await using outerTmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "secret.txt"), "secret data") - }, - }) - await using tmp = await tmpdir({ git: true }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const requests: Array> = [] - const testCtx = { - ...ctx, - ask: async (req: Omit) => { - requests.push(req) - }, - } - await read.execute({ filePath: path.join(outerTmp.path, "secret.txt") }, testCtx) - const extDirReq = requests.find((r) => r.permission === "external_directory") - expect(extDirReq).toBeDefined() - expect(extDirReq!.patterns).toContain(glob(path.join(outerTmp.path, "*"))) - }, - }) - }) + it.live("allows reading file in subdirectory inside project directory", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "subdir", "test.txt"), "nested content") + + const result = yield* exec(dir, { filePath: path.join(dir, "subdir", "test.txt") }) + expect(result.output).toContain("nested content") + }), + ) + + it.live("asks for external_directory permission when reading absolute path outside project", () => + Effect.gen(function* () { + const outer = yield* tmpdirScoped() + const dir = yield* tmpdirScoped({ git: true }) + yield* put(path.join(outer, "secret.txt"), "secret data") + + const requests: Array> = [] + const next = { + ...ctx, + ask: async (req: Omit) => { + requests.push(req) + }, + } + + yield* exec(dir, { filePath: path.join(outer, "secret.txt") }, next) + const ext = requests.find((item) => item.permission === "external_directory") + expect(ext).toBeDefined() + expect(ext!.patterns).toContain(glob(path.join(outer, "*"))) + }), + ) if (process.platform === "win32") { - test("normalizes read permission paths on Windows", async () => { - await using tmp = await tmpdir({ - git: true, - init: async (dir) => { - await Bun.write(path.join(dir, "test.txt"), "hello world") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const requests: Array> = [] - const testCtx = { - ...ctx, - ask: async (req: Omit) => { - requests.push(req) - }, - } - const target = path.join(tmp.path, "test.txt") - const alt = target - .replace(/^[A-Za-z]:/, "") - .replaceAll("\\", "/") - .toLowerCase() - await read.execute({ filePath: alt }, testCtx) - const readReq = requests.find((r) => r.permission === "read") - expect(readReq).toBeDefined() - expect(readReq!.patterns).toEqual([full(target)]) - }, - }) - }) + it.live("normalizes read permission paths on Windows", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped({ git: true }) + yield* put(path.join(dir, "test.txt"), "hello world") + + const requests: Array> = [] + const next = { + ...ctx, + ask: async (req: Omit) => { + requests.push(req) + }, + } + const target = path.join(dir, "test.txt") + const alt = target + .replace(/^[A-Za-z]:/, "") + .replaceAll("\\", "/") + .toLowerCase() + + yield* exec(dir, { filePath: alt }, next) + const read = requests.find((item) => item.permission === "read") + expect(read).toBeDefined() + expect(read!.patterns).toEqual([full(target)]) + }), + ) } - test("asks for directory-scoped external_directory permission when reading external directory", async () => { - await using outerTmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "external", "a.txt"), "a") - }, - }) - await using tmp = await tmpdir({ git: true }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const requests: Array> = [] - const testCtx = { - ...ctx, - ask: async (req: Omit) => { - requests.push(req) - }, - } - await read.execute({ filePath: path.join(outerTmp.path, "external") }, testCtx) - const extDirReq = requests.find((r) => r.permission === "external_directory") - expect(extDirReq).toBeDefined() - expect(extDirReq!.patterns).toContain(glob(path.join(outerTmp.path, "external", "*"))) - }, - }) - }) + it.live("asks for directory-scoped external_directory permission when reading external directory", () => + Effect.gen(function* () { + const outer = yield* tmpdirScoped() + const dir = yield* tmpdirScoped({ git: true }) + yield* put(path.join(outer, "external", "a.txt"), "a") - test("asks for external_directory permission when reading relative path outside project", async () => { - await using tmp = await tmpdir({ git: true }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const requests: Array> = [] - const testCtx = { - ...ctx, - ask: async (req: Omit) => { - requests.push(req) - }, - } - // This will fail because file doesn't exist, but we can check if permission was asked - await read.execute({ filePath: "../outside.txt" }, testCtx).catch(() => {}) - const extDirReq = requests.find((r) => r.permission === "external_directory") - expect(extDirReq).toBeDefined() - }, - }) - }) + const requests: Array> = [] + const next = { + ...ctx, + ask: async (req: Omit) => { + requests.push(req) + }, + } - test("does not ask for external_directory permission when reading inside project", async () => { - await using tmp = await tmpdir({ - git: true, - init: async (dir) => { - await Bun.write(path.join(dir, "internal.txt"), "internal content") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const requests: Array> = [] - const testCtx = { - ...ctx, - ask: async (req: Omit) => { - requests.push(req) - }, - } - await read.execute({ filePath: path.join(tmp.path, "internal.txt") }, testCtx) - const extDirReq = requests.find((r) => r.permission === "external_directory") - expect(extDirReq).toBeUndefined() - }, - }) - }) + yield* exec(dir, { filePath: path.join(outer, "external") }, next) + const ext = requests.find((item) => item.permission === "external_directory") + expect(ext).toBeDefined() + expect(ext!.patterns).toContain(glob(path.join(outer, "external", "*"))) + }), + ) + + it.live("asks for external_directory permission when reading relative path outside project", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped({ git: true }) + + const requests: Array> = [] + const next = { + ...ctx, + ask: async (req: Omit) => { + requests.push(req) + }, + } + + yield* fail(dir, { filePath: "../outside.txt" }, next) + const ext = requests.find((item) => item.permission === "external_directory") + expect(ext).toBeDefined() + }), + ) + + it.live("does not ask for external_directory permission when reading inside project", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped({ git: true }) + yield* put(path.join(dir, "internal.txt"), "internal content") + + const requests: Array> = [] + const next = { + ...ctx, + ask: async (req: Omit) => { + requests.push(req) + }, + } + + yield* exec(dir, { filePath: path.join(dir, "internal.txt") }, next) + const ext = requests.find((item) => item.permission === "external_directory") + expect(ext).toBeUndefined() + }), + ) }) describe("tool.read env file permissions", () => { @@ -219,261 +226,202 @@ describe("tool.read env file permissions", () => { ["environment.ts", false], ] - describe.each(["build", "plan"])("agent=%s", (agentName) => { - test.each(cases)("%s asks=%s", async (filename, shouldAsk) => { - await using tmp = await tmpdir({ - init: (dir) => Bun.write(path.join(dir, filename), "content"), - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const agent = await Agent.get(agentName) - let askedForEnv = false - const ctxWithPermissions = { - ...ctx, - ask: async (req: Omit) => { - for (const pattern of req.patterns) { - const rule = Permission.evaluate(req.permission, pattern, agent.permission) - if (rule.action === "ask" && req.permission === "read") { - askedForEnv = true + for (const agentName of ["build", "plan"] as const) { + describe(`agent=${agentName}`, () => { + for (const [filename, shouldAsk] of cases) { + it.live(`${filename} asks=${shouldAsk}`, () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, filename), "content") + + const info = yield* provideInstance(dir)( + Effect.gen(function* () { + const agent = yield* Agent.Service + return yield* agent.get(agentName) + }), + ) + let asked = false + const next = { + ...ctx, + ask: async (req: Omit) => { + for (const pattern of req.patterns) { + const rule = Permission.evaluate(req.permission, pattern, info.permission) + if (rule.action === "ask" && req.permission === "read") { + asked = true + } + if (rule.action === "deny") { + throw new Permission.DeniedError({ ruleset: info.permission }) + } } - if (rule.action === "deny") { - throw new Permission.DeniedError({ ruleset: agent.permission }) - } - } - }, - } - const read = await ReadTool.init() - await read.execute({ filePath: path.join(tmp.path, filename) }, ctxWithPermissions) - expect(askedForEnv).toBe(shouldAsk) - }, - }) + }, + } + + yield* exec(dir, { filePath: path.join(dir, filename) }, next) + expect(asked).toBe(shouldAsk) + }), + ) + } }) - }) + } }) describe("tool.read truncation", () => { - test("truncates large file by bytes and sets truncated metadata", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - const base = await Filesystem.readText(path.join(FIXTURES_DIR, "models-api.json")) - const target = 60 * 1024 - const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length)) - await Filesystem.write(path.join(dir, "large.json"), content) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "large.json") }, ctx) - expect(result.metadata.truncated).toBe(true) - expect(result.output).toContain("Output capped at") - expect(result.output).toContain("Use offset=") - }, - }) - }) + it.live("truncates large file by bytes and sets truncated metadata", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const base = yield* load(path.join(FIXTURES_DIR, "models-api.json")) + const target = 60 * 1024 + const content = base.length >= target ? base : base.repeat(Math.ceil(target / base.length)) + yield* put(path.join(dir, "large.json"), content) - test("truncates by line count when limit is specified", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - const lines = Array.from({ length: 100 }, (_, i) => `line${i}`).join("\n") - await Bun.write(path.join(dir, "many-lines.txt"), lines) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "many-lines.txt"), limit: 10 }, ctx) - expect(result.metadata.truncated).toBe(true) - expect(result.output).toContain("Showing lines 1-10 of 100") - expect(result.output).toContain("Use offset=11") - expect(result.output).toContain("line0") - expect(result.output).toContain("line9") - expect(result.output).not.toContain("line10") - }, - }) - }) + const result = yield* exec(dir, { filePath: path.join(dir, "large.json") }) + expect(result.metadata.truncated).toBe(true) + expect(result.output).toContain("Output capped at") + expect(result.output).toContain("Use offset=") + }), + ) - test("does not truncate small file", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "small.txt"), "hello world") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "small.txt") }, ctx) - expect(result.metadata.truncated).toBe(false) - expect(result.output).toContain("End of file") - }, - }) - }) + it.live("truncates by line count when limit is specified", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const lines = Array.from({ length: 100 }, (_, i) => `line${i}`).join("\n") + yield* put(path.join(dir, "many-lines.txt"), lines) - test("respects offset parameter", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - const lines = Array.from({ length: 20 }, (_, i) => `line${i + 1}`).join("\n") - await Bun.write(path.join(dir, "offset.txt"), lines) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "offset.txt"), offset: 10, limit: 5 }, ctx) - expect(result.output).toContain("10: line10") - expect(result.output).toContain("14: line14") - expect(result.output).not.toContain("9: line10") - expect(result.output).not.toContain("15: line15") - expect(result.output).toContain("line10") - expect(result.output).toContain("line14") - expect(result.output).not.toContain("line0") - expect(result.output).not.toContain("line15") - }, - }) - }) + const result = yield* exec(dir, { filePath: path.join(dir, "many-lines.txt"), limit: 10 }) + expect(result.metadata.truncated).toBe(true) + expect(result.output).toContain("Showing lines 1-10 of 100") + expect(result.output).toContain("Use offset=11") + expect(result.output).toContain("line0") + expect(result.output).toContain("line9") + expect(result.output).not.toContain("line10") + }), + ) - test("throws when offset is beyond end of file", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - const lines = Array.from({ length: 3 }, (_, i) => `line${i + 1}`).join("\n") - await Bun.write(path.join(dir, "short.txt"), lines) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - await expect( - read.execute({ filePath: path.join(tmp.path, "short.txt"), offset: 4, limit: 5 }, ctx), - ).rejects.toThrow("Offset 4 is out of range for this file (3 lines)") - }, - }) - }) + it.live("does not truncate small file", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "small.txt"), "hello world") - test("allows reading empty file at default offset", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "empty.txt"), "") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "empty.txt") }, ctx) - expect(result.metadata.truncated).toBe(false) - expect(result.output).toContain("End of file - total 0 lines") - }, - }) - }) + const result = yield* exec(dir, { filePath: path.join(dir, "small.txt") }) + expect(result.metadata.truncated).toBe(false) + expect(result.output).toContain("End of file") + }), + ) - test("throws when offset > 1 for empty file", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "empty.txt"), "") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - await expect(read.execute({ filePath: path.join(tmp.path, "empty.txt"), offset: 2 }, ctx)).rejects.toThrow( - "Offset 2 is out of range for this file (0 lines)", - ) - }, - }) - }) + it.live("respects offset parameter", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const lines = Array.from({ length: 20 }, (_, i) => `line${i + 1}`).join("\n") + yield* put(path.join(dir, "offset.txt"), lines) - test("does not mark final directory page as truncated", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Promise.all( - Array.from({ length: 10 }, (_, i) => Bun.write(path.join(dir, "dir", `file-${i + 1}.txt`), `line${i}`)), - ) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "dir"), offset: 6, limit: 5 }, ctx) - expect(result.metadata.truncated).toBe(false) - expect(result.output).not.toContain("Showing 5 of 10 entries") - }, - }) - }) + const result = yield* exec(dir, { filePath: path.join(dir, "offset.txt"), offset: 10, limit: 5 }) + expect(result.output).toContain("10: line10") + expect(result.output).toContain("14: line14") + expect(result.output).not.toContain("9: line10") + expect(result.output).not.toContain("15: line15") + expect(result.output).toContain("line10") + expect(result.output).toContain("line14") + expect(result.output).not.toContain("line0") + expect(result.output).not.toContain("line15") + }), + ) - test("truncates long lines", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - const longLine = "x".repeat(3000) - await Bun.write(path.join(dir, "long-line.txt"), longLine) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "long-line.txt") }, ctx) - expect(result.output).toContain("(line truncated to 2000 chars)") - expect(result.output.length).toBeLessThan(3000) - }, - }) - }) + it.live("throws when offset is beyond end of file", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const lines = Array.from({ length: 3 }, (_, i) => `line${i + 1}`).join("\n") + yield* put(path.join(dir, "short.txt"), lines) - test("image files set truncated to false", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - // 1x1 red PNG - const png = Buffer.from( - "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==", - "base64", - ) - await Bun.write(path.join(dir, "image.png"), png) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "image.png") }, ctx) - expect(result.metadata.truncated).toBe(false) - expect(result.attachments).toBeDefined() - expect(result.attachments?.length).toBe(1) - expect(result.attachments?.[0]).not.toHaveProperty("id") - expect(result.attachments?.[0]).not.toHaveProperty("sessionID") - expect(result.attachments?.[0]).not.toHaveProperty("messageID") - }, - }) - }) + const err = yield* fail(dir, { filePath: path.join(dir, "short.txt"), offset: 4, limit: 5 }) + expect(err.message).toContain("Offset 4 is out of range for this file (3 lines)") + }), + ) - test("large image files are properly attached without error", async () => { - await Instance.provide({ - directory: FIXTURES_DIR, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(FIXTURES_DIR, "large-image.png") }, ctx) - expect(result.metadata.truncated).toBe(false) - expect(result.attachments).toBeDefined() - expect(result.attachments?.length).toBe(1) - expect(result.attachments?.[0].type).toBe("file") - expect(result.attachments?.[0]).not.toHaveProperty("id") - expect(result.attachments?.[0]).not.toHaveProperty("sessionID") - expect(result.attachments?.[0]).not.toHaveProperty("messageID") - }, - }) - }) + it.live("allows reading empty file at default offset", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "empty.txt"), "") - test(".fbs files (FlatBuffers schema) are read as text, not images", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - // FlatBuffers schema content - const fbsContent = `namespace MyGame; + const result = yield* exec(dir, { filePath: path.join(dir, "empty.txt") }) + expect(result.metadata.truncated).toBe(false) + expect(result.output).toContain("End of file - total 0 lines") + }), + ) + + it.live("throws when offset > 1 for empty file", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "empty.txt"), "") + + const err = yield* fail(dir, { filePath: path.join(dir, "empty.txt"), offset: 2 }) + expect(err.message).toContain("Offset 2 is out of range for this file (0 lines)") + }), + ) + + it.live("does not mark final directory page as truncated", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* Effect.forEach( + Array.from({ length: 10 }, (_, i) => i), + (i) => put(path.join(dir, "dir", `file-${i + 1}.txt`), `line${i}`), + { + concurrency: "unbounded", + }, + ) + + const result = yield* exec(dir, { filePath: path.join(dir, "dir"), offset: 6, limit: 5 }) + expect(result.metadata.truncated).toBe(false) + expect(result.output).not.toContain("Showing 5 of 10 entries") + }), + ) + + it.live("truncates long lines", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "long-line.txt"), "x".repeat(3000)) + + const result = yield* exec(dir, { filePath: path.join(dir, "long-line.txt") }) + expect(result.output).toContain("(line truncated to 2000 chars)") + expect(result.output.length).toBeLessThan(3000) + }), + ) + + it.live("image files set truncated to false", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const png = Buffer.from( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8DwHwAFBQIAX8jx0gAAAABJRU5ErkJggg==", + "base64", + ) + yield* put(path.join(dir, "image.png"), png) + + const result = yield* exec(dir, { filePath: path.join(dir, "image.png") }) + expect(result.metadata.truncated).toBe(false) + expect(result.attachments).toBeDefined() + expect(result.attachments?.length).toBe(1) + expect(result.attachments?.[0]).not.toHaveProperty("id") + expect(result.attachments?.[0]).not.toHaveProperty("sessionID") + expect(result.attachments?.[0]).not.toHaveProperty("messageID") + }), + ) + + it.live("large image files are properly attached without error", () => + Effect.gen(function* () { + const result = yield* exec(FIXTURES_DIR, { filePath: path.join(FIXTURES_DIR, "large-image.png") }) + expect(result.metadata.truncated).toBe(false) + expect(result.attachments).toBeDefined() + expect(result.attachments?.length).toBe(1) + expect(result.attachments?.[0].type).toBe("file") + expect(result.attachments?.[0]).not.toHaveProperty("id") + expect(result.attachments?.[0]).not.toHaveProperty("sessionID") + expect(result.attachments?.[0]).not.toHaveProperty("messageID") + }), + ) + + it.live(".fbs files (FlatBuffers schema) are read as text, not images", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const fbs = `namespace MyGame; table Monster { pos:Vec3; @@ -482,79 +430,52 @@ table Monster { } root_type Monster;` - await Bun.write(path.join(dir, "schema.fbs"), fbsContent) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "schema.fbs") }, ctx) - // Should be read as text, not as image - expect(result.attachments).toBeUndefined() - expect(result.output).toContain("namespace MyGame") - expect(result.output).toContain("table Monster") - }, - }) - }) + yield* put(path.join(dir, "schema.fbs"), fbs) + + const result = yield* exec(dir, { filePath: path.join(dir, "schema.fbs") }) + expect(result.attachments).toBeUndefined() + expect(result.output).toContain("namespace MyGame") + expect(result.output).toContain("table Monster") + }), + ) }) describe("tool.read loaded instructions", () => { - test("loads AGENTS.md from parent directory and includes in metadata", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "subdir", "AGENTS.md"), "# Test Instructions\nDo something special.") - await Bun.write(path.join(dir, "subdir", "nested", "test.txt"), "test content") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - const result = await read.execute({ filePath: path.join(tmp.path, "subdir", "nested", "test.txt") }, ctx) - expect(result.output).toContain("test content") - expect(result.output).toContain("system-reminder") - expect(result.output).toContain("Test Instructions") - expect(result.metadata.loaded).toBeDefined() - expect(result.metadata.loaded).toContain(path.join(tmp.path, "subdir", "AGENTS.md")) - }, - }) - }) + it.live("loads AGENTS.md from parent directory and includes in metadata", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "subdir", "AGENTS.md"), "# Test Instructions\nDo something special.") + yield* put(path.join(dir, "subdir", "nested", "test.txt"), "test content") + + const result = yield* exec(dir, { filePath: path.join(dir, "subdir", "nested", "test.txt") }) + expect(result.output).toContain("test content") + expect(result.output).toContain("system-reminder") + expect(result.output).toContain("Test Instructions") + expect(result.metadata.loaded).toBeDefined() + expect(result.metadata.loaded).toContain(path.join(dir, "subdir", "AGENTS.md")) + }), + ) }) describe("tool.read binary detection", () => { - test("rejects text extension files with null bytes", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - const bytes = Buffer.from([0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x00, 0x77, 0x6f, 0x72, 0x6c, 0x64]) - await Bun.write(path.join(dir, "null-byte.txt"), bytes) - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - await expect(read.execute({ filePath: path.join(tmp.path, "null-byte.txt") }, ctx)).rejects.toThrow( - "Cannot read binary file", - ) - }, - }) - }) + it.live("rejects text extension files with null bytes", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + const bytes = Buffer.from([0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x00, 0x77, 0x6f, 0x72, 0x6c, 0x64]) + yield* put(path.join(dir, "null-byte.txt"), bytes) - test("rejects known binary extensions", async () => { - await using tmp = await tmpdir({ - init: async (dir) => { - await Bun.write(path.join(dir, "module.wasm"), "not really wasm") - }, - }) - await Instance.provide({ - directory: tmp.path, - fn: async () => { - const read = await ReadTool.init() - await expect(read.execute({ filePath: path.join(tmp.path, "module.wasm") }, ctx)).rejects.toThrow( - "Cannot read binary file", - ) - }, - }) - }) + const err = yield* fail(dir, { filePath: path.join(dir, "null-byte.txt") }) + expect(err.message).toContain("Cannot read binary file") + }), + ) + + it.live("rejects known binary extensions", () => + Effect.gen(function* () { + const dir = yield* tmpdirScoped() + yield* put(path.join(dir, "module.wasm"), "not really wasm") + + const err = yield* fail(dir, { filePath: path.join(dir, "module.wasm") }) + expect(err.message).toContain("Cannot read binary file") + }), + ) })