feat: add experimental hashline edit mode

pull/13854/merge
Shoubhit Dash 2026-02-22 19:40:34 +05:30
parent b16f7b426c
commit 56decd79db
14 changed files with 1694 additions and 98 deletions

View File

@ -6,6 +6,10 @@
},
},
"mcp": {},
"experimental": {
"hashline_edit": true,
"hashline_autocorrect": true,
},
"tools": {
"github-triage": false,
"github-pr-search": false,

View File

@ -2019,7 +2019,9 @@ function Edit(props: ToolProps<typeof EditTool>) {
</Match>
<Match when={true}>
<InlineTool icon="←" pending="Preparing edit..." complete={props.input.filePath} part={props.part}>
Edit {normalizePath(props.input.filePath!)} {input({ replaceAll: props.input.replaceAll })}
Edit{" "}
{normalizePath(props.input.filePath!)}{" "}
{input({ replaceAll: "replaceAll" in props.input ? props.input.replaceAll : undefined })}
</InlineTool>
</Match>
</Switch>

View File

@ -1186,6 +1186,11 @@ export namespace Config {
.object({
disable_paste_summary: z.boolean().optional(),
batch_tool: z.boolean().optional().describe("Enable the batch tool"),
hashline_edit: z.boolean().optional().describe("Enable hashline-backed edit/read tool behavior"),
hashline_autocorrect: z
.boolean()
.optional()
.describe("Enable hashline autocorrect cleanup for copied prefixes and formatting artifacts"),
openTelemetry: z
.boolean()
.optional()

View File

@ -5,6 +5,7 @@
import z from "zod"
import * as path from "path"
import * as fs from "fs/promises"
import { Tool } from "./tool"
import { LSP } from "../lsp"
import { createTwoFilesPatch, diffLines } from "diff"
@ -17,72 +18,158 @@ import { Filesystem } from "../util/filesystem"
import { Instance } from "../project/instance"
import { Snapshot } from "@/snapshot"
import { assertExternalDirectory } from "./external-directory"
import {
HashlineEdit,
applyHashlineEdits,
hashlineOnlyCreates,
parseHashlineContent,
serializeHashlineContent,
} from "./hashline"
import { Config } from "../config/config"
const MAX_DIAGNOSTICS_PER_FILE = 20
const LEGACY_EDIT_MODE = "legacy"
const HASHLINE_EDIT_MODE = "hashline"
const LegacyEditParams = z.object({
filePath: z.string().describe("The absolute path to the file to modify"),
oldString: z.string().describe("The text to replace"),
newString: z.string().describe("The text to replace it with (must be different from oldString)"),
replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"),
})
const HashlineEditParams = z.object({
filePath: z.string().describe("The absolute path to the file to modify"),
edits: z.array(HashlineEdit).default([]),
delete: z.boolean().optional(),
rename: z.string().optional(),
})
const EditParams = z
.object({
filePath: z.string().describe("The absolute path to the file to modify"),
oldString: z.string().optional().describe("The text to replace"),
newString: z.string().optional().describe("The text to replace it with (must be different from oldString)"),
replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"),
edits: z.array(HashlineEdit).optional(),
delete: z.boolean().optional(),
rename: z.string().optional(),
})
.strict()
.superRefine((value, ctx) => {
const legacy = value.oldString !== undefined || value.newString !== undefined || value.replaceAll !== undefined
const hashline = value.edits !== undefined || value.delete !== undefined || value.rename !== undefined
if (legacy && hashline) {
ctx.addIssue({
code: "custom",
message: "Do not mix legacy (oldString/newString) and hashline (edits/delete/rename) fields.",
})
return
}
if (!legacy && !hashline) {
ctx.addIssue({
code: "custom",
message: "Provide either legacy fields (oldString/newString) or hashline fields (edits/delete/rename).",
})
return
}
if (legacy) {
if (value.oldString === undefined || value.newString === undefined) {
ctx.addIssue({
code: "custom",
message: "Legacy payload requires both oldString and newString.",
})
}
return
}
if (value.edits === undefined) {
ctx.addIssue({
code: "custom",
message: "Hashline payload requires edits (use [] when only delete is intended).",
})
}
})
type LegacyEditParams = z.infer<typeof LegacyEditParams>
type HashlineEditParams = z.infer<typeof HashlineEditParams>
type EditParams = z.infer<typeof EditParams>
function normalizeLineEndings(text: string): string {
return text.replaceAll("\r\n", "\n")
}
export const EditTool = Tool.define("edit", {
description: DESCRIPTION,
parameters: z.object({
filePath: z.string().describe("The absolute path to the file to modify"),
oldString: z.string().describe("The text to replace"),
newString: z.string().describe("The text to replace it with (must be different from oldString)"),
replaceAll: z.boolean().optional().describe("Replace all occurrences of oldString (default false)"),
}),
async execute(params, ctx) {
if (!params.filePath) {
throw new Error("filePath is required")
function isLegacyParams(params: EditParams): params is LegacyEditParams {
return params.oldString !== undefined || params.newString !== undefined || params.replaceAll !== undefined
}
async function withLocks(paths: string[], fn: () => Promise<void>) {
const unique = Array.from(new Set(paths)).sort((a, b) => a.localeCompare(b))
const recurse = async (idx: number): Promise<void> => {
if (idx >= unique.length) return fn()
await FileTime.withLock(unique[idx], () => recurse(idx + 1))
}
await recurse(0)
}
function createFileDiff(file: string, before: string, after: string): Snapshot.FileDiff {
const filediff: Snapshot.FileDiff = {
file,
before,
after,
additions: 0,
deletions: 0,
}
for (const change of diffLines(before, after)) {
if (change.added) filediff.additions += change.count || 0
if (change.removed) filediff.deletions += change.count || 0
}
return filediff
}
async function diagnosticsOutput(filePath: string, output: string) {
await LSP.touchFile(filePath, true)
const diagnostics = await LSP.diagnostics()
const normalizedFilePath = Filesystem.normalizePath(filePath)
const issues = diagnostics[normalizedFilePath] ?? []
const errors = issues.filter((item) => item.severity === 1)
if (errors.length === 0) {
return {
output,
diagnostics,
}
}
if (params.oldString === params.newString) {
throw new Error("No changes to apply: oldString and newString are identical.")
}
const limited = errors.slice(0, MAX_DIAGNOSTICS_PER_FILE)
const suffix =
errors.length > MAX_DIAGNOSTICS_PER_FILE ? `\n... and ${errors.length - MAX_DIAGNOSTICS_PER_FILE} more` : ""
return {
output:
output +
`\n\nLSP errors detected in this file, please fix:\n<diagnostics file="${filePath}">\n${limited.map(LSP.Diagnostic.pretty).join("\n")}${suffix}\n</diagnostics>`,
diagnostics,
}
}
const filePath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
await assertExternalDirectory(ctx, filePath)
async function executeLegacy(params: LegacyEditParams, ctx: Tool.Context) {
if (params.oldString === params.newString) {
throw new Error("No changes to apply: oldString and newString are identical.")
}
let diff = ""
let contentOld = ""
let contentNew = ""
await FileTime.withLock(filePath, async () => {
if (params.oldString === "") {
const existed = await Filesystem.exists(filePath)
contentNew = params.newString
diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew))
await ctx.ask({
permission: "edit",
patterns: [path.relative(Instance.worktree, filePath)],
always: ["*"],
metadata: {
filepath: filePath,
diff,
},
})
await Filesystem.write(filePath, params.newString)
await Bus.publish(File.Event.Edited, {
file: filePath,
})
await Bus.publish(FileWatcher.Event.Updated, {
file: filePath,
event: existed ? "change" : "add",
})
FileTime.read(ctx.sessionID, filePath)
return
}
const filePath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
await assertExternalDirectory(ctx, filePath)
const stats = Filesystem.stat(filePath)
if (!stats) throw new Error(`File ${filePath} not found`)
if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filePath}`)
await FileTime.assert(ctx.sessionID, filePath)
contentOld = await Filesystem.readText(filePath)
contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
diff = trimDiff(
createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)),
)
let diff = ""
let contentOld = ""
let contentNew = ""
await FileTime.withLock(filePath, async () => {
if (params.oldString === "") {
const existed = await Filesystem.exists(filePath)
contentNew = params.newString
diff = trimDiff(createTwoFilesPatch(filePath, filePath, contentOld, contentNew))
await ctx.ask({
permission: "edit",
patterns: [path.relative(Instance.worktree, filePath)],
@ -92,64 +179,312 @@ export const EditTool = Tool.define("edit", {
diff,
},
})
await Filesystem.write(filePath, contentNew)
await Filesystem.write(filePath, params.newString)
await Bus.publish(File.Event.Edited, {
file: filePath,
})
await Bus.publish(FileWatcher.Event.Updated, {
file: filePath,
event: "change",
event: existed ? "change" : "add",
})
contentNew = await Filesystem.readText(filePath)
diff = trimDiff(
createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)),
)
FileTime.read(ctx.sessionID, filePath)
})
const filediff: Snapshot.FileDiff = {
file: filePath,
before: contentOld,
after: contentNew,
additions: 0,
deletions: 0,
}
for (const change of diffLines(contentOld, contentNew)) {
if (change.added) filediff.additions += change.count || 0
if (change.removed) filediff.deletions += change.count || 0
return
}
ctx.metadata({
const stats = Filesystem.stat(filePath)
if (!stats) throw new Error(`File ${filePath} not found`)
if (stats.isDirectory()) throw new Error(`Path is a directory, not a file: ${filePath}`)
await FileTime.assert(ctx.sessionID, filePath)
contentOld = await Filesystem.readText(filePath)
contentNew = replace(contentOld, params.oldString, params.newString, params.replaceAll)
diff = trimDiff(
createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)),
)
await ctx.ask({
permission: "edit",
patterns: [path.relative(Instance.worktree, filePath)],
always: ["*"],
metadata: {
filepath: filePath,
diff,
filediff,
diagnostics: {},
},
})
let output = "Edit applied successfully."
await LSP.touchFile(filePath, true)
const diagnostics = await LSP.diagnostics()
const normalizedFilePath = Filesystem.normalizePath(filePath)
const issues = diagnostics[normalizedFilePath] ?? []
const errors = issues.filter((item) => item.severity === 1)
if (errors.length > 0) {
const limited = errors.slice(0, MAX_DIAGNOSTICS_PER_FILE)
const suffix =
errors.length > MAX_DIAGNOSTICS_PER_FILE ? `\n... and ${errors.length - MAX_DIAGNOSTICS_PER_FILE} more` : ""
output += `\n\nLSP errors detected in this file, please fix:\n<diagnostics file="${filePath}">\n${limited.map(LSP.Diagnostic.pretty).join("\n")}${suffix}\n</diagnostics>`
await Filesystem.write(filePath, contentNew)
await Bus.publish(File.Event.Edited, {
file: filePath,
})
await Bus.publish(FileWatcher.Event.Updated, {
file: filePath,
event: "change",
})
contentNew = await Filesystem.readText(filePath)
diff = trimDiff(
createTwoFilesPatch(filePath, filePath, normalizeLineEndings(contentOld), normalizeLineEndings(contentNew)),
)
FileTime.read(ctx.sessionID, filePath)
})
const filediff = createFileDiff(filePath, contentOld, contentNew)
ctx.metadata({
metadata: {
diff,
filediff,
diagnostics: {},
edit_mode: LEGACY_EDIT_MODE,
},
})
const result = await diagnosticsOutput(filePath, "Edit applied successfully.")
return {
metadata: {
diagnostics: result.diagnostics,
diff,
filediff,
edit_mode: LEGACY_EDIT_MODE,
},
title: `${path.relative(Instance.worktree, filePath)}`,
output: result.output,
}
}
async function executeHashline(params: HashlineEditParams, ctx: Tool.Context, autocorrect: boolean) {
const sourcePath = path.isAbsolute(params.filePath) ? params.filePath : path.join(Instance.directory, params.filePath)
const targetPath = params.rename
? path.isAbsolute(params.rename)
? params.rename
: path.join(Instance.directory, params.rename)
: sourcePath
await assertExternalDirectory(ctx, sourcePath)
if (params.rename) {
await assertExternalDirectory(ctx, targetPath)
}
if (params.delete && params.edits.length > 0) {
throw new Error("delete=true cannot be combined with edits")
}
if (params.delete && params.rename) {
throw new Error("delete=true cannot be combined with rename")
}
let diff = ""
let before = ""
let after = ""
let noop = 0
let deleted = false
let changed = false
let diagnostics: Awaited<ReturnType<typeof LSP.diagnostics>> = {}
const paths = [sourcePath, targetPath]
await withLocks(paths, async () => {
const sourceStat = Filesystem.stat(sourcePath)
if (sourceStat?.isDirectory()) throw new Error(`Path is a directory, not a file: ${sourcePath}`)
const exists = Boolean(sourceStat)
if (params.rename && !exists) {
throw new Error("rename requires an existing source file")
}
if (params.delete) {
if (!exists) {
noop = 1
return
}
await FileTime.assert(ctx.sessionID, sourcePath)
before = await Filesystem.readText(sourcePath)
after = ""
diff = trimDiff(
createTwoFilesPatch(sourcePath, sourcePath, normalizeLineEndings(before), normalizeLineEndings(after)),
)
await ctx.ask({
permission: "edit",
patterns: [path.relative(Instance.worktree, sourcePath)],
always: ["*"],
metadata: {
filepath: sourcePath,
diff,
},
})
await fs.rm(sourcePath, { force: true })
await Bus.publish(File.Event.Edited, {
file: sourcePath,
})
await Bus.publish(FileWatcher.Event.Updated, {
file: sourcePath,
event: "unlink",
})
deleted = true
changed = true
return
}
if (!exists && !hashlineOnlyCreates(params.edits)) {
throw new Error("Missing file can only be created with append/prepend hashline edits")
}
if (exists) {
await FileTime.assert(ctx.sessionID, sourcePath)
}
const parsed = exists
? parseHashlineContent(await Filesystem.readBytes(sourcePath))
: {
bom: false,
eol: "\n",
trailing: false,
lines: [] as string[],
text: "",
raw: "",
}
before = parsed.raw
const next = applyHashlineEdits({
lines: parsed.lines,
trailing: parsed.trailing,
edits: params.edits,
autocorrect,
})
const output = serializeHashlineContent({
lines: next.lines,
trailing: next.trailing,
eol: parsed.eol,
bom: parsed.bom,
})
after = output.text
const noContentChange = before === after && sourcePath === targetPath
if (noContentChange) {
noop = 1
diff = trimDiff(
createTwoFilesPatch(sourcePath, sourcePath, normalizeLineEndings(before), normalizeLineEndings(after)),
)
return
}
diff = trimDiff(
createTwoFilesPatch(sourcePath, targetPath, normalizeLineEndings(before), normalizeLineEndings(after)),
)
const patterns = [path.relative(Instance.worktree, sourcePath)]
if (sourcePath !== targetPath) patterns.push(path.relative(Instance.worktree, targetPath))
await ctx.ask({
permission: "edit",
patterns: Array.from(new Set(patterns)),
always: ["*"],
metadata: {
filepath: sourcePath,
diff,
},
})
if (sourcePath === targetPath) {
await Filesystem.write(sourcePath, output.bytes)
await Bus.publish(File.Event.Edited, {
file: sourcePath,
})
await Bus.publish(FileWatcher.Event.Updated, {
file: sourcePath,
event: exists ? "change" : "add",
})
FileTime.read(ctx.sessionID, sourcePath)
changed = true
return
}
const targetExists = await Filesystem.exists(targetPath)
await Filesystem.write(targetPath, output.bytes)
await fs.rm(sourcePath, { force: true })
await Bus.publish(File.Event.Edited, {
file: sourcePath,
})
await Bus.publish(File.Event.Edited, {
file: targetPath,
})
await Bus.publish(FileWatcher.Event.Updated, {
file: sourcePath,
event: "unlink",
})
await Bus.publish(FileWatcher.Event.Updated, {
file: targetPath,
event: targetExists ? "change" : "add",
})
FileTime.read(ctx.sessionID, targetPath)
changed = true
})
const file = deleted ? sourcePath : targetPath
const filediff = createFileDiff(file, before, after)
ctx.metadata({
metadata: {
diff,
filediff,
diagnostics,
edit_mode: HASHLINE_EDIT_MODE,
noop,
},
})
if (!deleted && (changed || noop === 0)) {
const result = await diagnosticsOutput(targetPath, noop > 0 ? "No changes applied." : "Edit applied successfully.")
diagnostics = result.diagnostics
return {
metadata: {
diagnostics,
diff,
filediff,
edit_mode: HASHLINE_EDIT_MODE,
noop,
},
title: `${path.relative(Instance.worktree, filePath)}`,
output,
title: `${path.relative(Instance.worktree, targetPath)}`,
output: result.output,
}
}
return {
metadata: {
diagnostics,
diff,
filediff,
edit_mode: HASHLINE_EDIT_MODE,
noop,
},
title: `${path.relative(Instance.worktree, file)}`,
output: deleted ? "Edit applied successfully." : "No changes applied.",
}
}
export const EditTool = Tool.define("edit", {
description: DESCRIPTION,
parameters: EditParams,
async execute(params, ctx) {
if (!params.filePath) {
throw new Error("filePath is required")
}
if (isLegacyParams(params)) {
return executeLegacy(params, ctx)
}
const config = await Config.get()
if (config.experimental?.hashline_edit !== true) {
throw new Error(
"Hashline edit payload is disabled. Enable experimental.hashline_edit to use hashline operations.",
)
}
const hashlineParams: HashlineEditParams = {
filePath: params.filePath,
edits: params.edits ?? [],
delete: params.delete,
rename: params.rename,
}
return executeHashline(
hashlineParams,
ctx,
config.experimental?.hashline_autocorrect === true || Bun.env.OPENCODE_HL_AUTOCORRECT === "1",
)
},
})

View File

@ -1,10 +1,30 @@
Performs exact string replacements in files.
Performs file edits with two supported payload schemas.
Usage:
- You must use your `Read` tool at least once in the conversation before editing. This tool will error if you attempt an edit without reading the file.
- When editing text from Read tool output, ensure you preserve the exact indentation (tabs/spaces) as it appears AFTER the line number prefix. The line number prefix format is: line number + colon + space (e.g., `1: `). Everything after that space is the actual file content to match. Never include any part of the line number prefix in the oldString or newString.
- You must use your `Read` tool at least once before editing an existing file. This tool rejects stale edits when file contents changed since read.
- ALWAYS prefer editing existing files in the codebase. NEVER write new files unless explicitly required.
- Only use emojis if the user explicitly requests it. Avoid adding emojis to files unless asked.
- The edit will FAIL if `oldString` is not found in the file with an error "oldString not found in content".
- The edit will FAIL if `oldString` is found multiple times in the file with an error "Found multiple matches for oldString. Provide more surrounding lines in oldString to identify the correct match." Either provide a larger string with more surrounding context to make it unique or use `replaceAll` to change every instance of `oldString`.
- Use `replaceAll` for replacing and renaming strings across the file. This parameter is useful if you want to rename a variable for instance.
Legacy schema (always supported):
- `{ filePath, oldString, newString, replaceAll? }`
- Exact replacement only.
- The edit fails if `oldString` is not found.
- The edit fails if `oldString` matches multiple locations and `replaceAll` is not true.
- Use `replaceAll: true` for global replacements.
Hashline schema (requires `experimental.hashline_edit: true`):
- `{ filePath, edits, delete?, rename? }`
- Do not mix legacy fields (`oldString/newString/replaceAll`) with hashline fields (`edits/delete/rename`) in one call.
- Use strict anchor references from `Read` output: `LINE#ID`.
- Optional cleanup behavior can be enabled with `experimental.hashline_autocorrect: true`.
- When `Read` returns `LINE#ID:<content>`, prefer hashline operations.
- Operations:
- `set_line { line, text }`
- `replace_lines { start_line, end_line, text }`
- `insert_after { line, text }`
- `insert_before { line, text }`
- `insert_between { after_line, before_line, text }`
- `append { text }`
- `prepend { text }`
- `replace { old_text, new_text, all? }`
- In hashline mode, provide the exact `LINE#ID` anchors from the latest `Read` result. Mismatched anchors are rejected and must be retried with updated references.

View File

@ -0,0 +1,621 @@
// hashline autocorrect heuristics in this file are inspired by
// https://github.com/can1357/oh-my-pi (mit license), adapted for opencode.
import z from "zod"
export const HASHLINE_ALPHABET = "ZPMQVRWSNKTXJBYH"
const HASHLINE_ID_LENGTH = 2
const HASHLINE_ID_REGEX = new RegExp(`^[${HASHLINE_ALPHABET}]{${HASHLINE_ID_LENGTH}}$`)
const HASHLINE_REF_REGEX = new RegExp(`(\\d+)#([${HASHLINE_ALPHABET}]{${HASHLINE_ID_LENGTH}})(?=$|\\s|:)`)
type TextValue = string | string[]
export const HashlineText = z.union([z.string(), z.array(z.string())])
export const HashlineEdit = z.discriminatedUnion("type", [
z
.object({
type: z.literal("set_line"),
line: z.string(),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("replace_lines"),
start_line: z.string(),
end_line: z.string(),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("insert_after"),
line: z.string(),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("insert_before"),
line: z.string(),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("insert_between"),
after_line: z.string(),
before_line: z.string(),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("append"),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("prepend"),
text: HashlineText,
})
.strict(),
z
.object({
type: z.literal("replace"),
old_text: z.string(),
new_text: HashlineText,
all: z.boolean().optional(),
})
.strict(),
])
export type HashlineEdit = z.infer<typeof HashlineEdit>
export function hashlineID(lineNumber: number, line: string): string {
let normalized = line
if (normalized.endsWith("\r")) normalized = normalized.slice(0, -1)
normalized = normalized.replace(/\s+/g, "")
void lineNumber
const hash = Bun.hash.xxHash32(normalized) & 0xff
const high = (hash >>> 4) & 0x0f
const low = hash & 0x0f
return `${HASHLINE_ALPHABET[high]}${HASHLINE_ALPHABET[low]}`
}
export function hashlineRef(lineNumber: number, line: string): string {
return `${lineNumber}#${hashlineID(lineNumber, line)}`
}
export function hashlineLine(lineNumber: number, line: string): string {
return `${hashlineRef(lineNumber, line)}:${line}`
}
export function parseHashlineRef(input: string, label: string) {
const match = input.match(HASHLINE_REF_REGEX)
if (!match) {
throw new Error(`${label} must contain a LINE#ID reference`)
}
const line = Number.parseInt(match[1], 10)
if (!Number.isInteger(line) || line < 1) {
throw new Error(`${label} has invalid line number: ${match[1]}`)
}
const id = match[2]
if (!HASHLINE_ID_REGEX.test(id)) {
throw new Error(`${label} has invalid hash id: ${id}`)
}
return {
raw: `${line}#${id}`,
line,
id,
}
}
function toLines(text: TextValue) {
if (Array.isArray(text)) return text
return text.split(/\r?\n/)
}
const HASHLINE_PREFIX_RE = /^\s*(?:>>>|>>)?\s*\d+#[ZPMQVRWSNKTXJBYH]{2}:/
const DIFF_PLUS_RE = /^[+-](?![+-])/
function stripNewLinePrefixes(lines: string[]) {
let hashPrefixCount = 0
let diffPlusCount = 0
let nonEmpty = 0
for (const line of lines) {
if (line.length === 0) continue
nonEmpty++
if (HASHLINE_PREFIX_RE.test(line)) hashPrefixCount++
if (DIFF_PLUS_RE.test(line)) diffPlusCount++
}
if (nonEmpty === 0) return lines
const stripHash = hashPrefixCount > 0 && hashPrefixCount >= nonEmpty * 0.5
const stripPlus = !stripHash && diffPlusCount > 0 && diffPlusCount >= nonEmpty * 0.5
if (!stripHash && !stripPlus) return lines
return lines.map((line) => {
if (stripHash) return line.replace(HASHLINE_PREFIX_RE, "")
if (stripPlus) return line.replace(DIFF_PLUS_RE, "")
return line
})
}
function equalsIgnoringWhitespace(a: string, b: string) {
if (a === b) return true
return a.replace(/\s+/g, "") === b.replace(/\s+/g, "")
}
function leadingWhitespace(line: string) {
const match = line.match(/^\s*/)
if (!match) return ""
return match[0]
}
function restoreLeadingIndent(template: string, line: string) {
if (line.length === 0) return line
const templateIndent = leadingWhitespace(template)
if (templateIndent.length === 0) return line
const indent = leadingWhitespace(line)
if (indent.length > 0) return line
return templateIndent + line
}
function restoreIndentForPairedReplacement(oldLines: string[], newLines: string[]) {
if (oldLines.length !== newLines.length) return newLines
let changed = false
const out = new Array<string>(newLines.length)
for (let idx = 0; idx < newLines.length; idx++) {
const restored = restoreLeadingIndent(oldLines[idx], newLines[idx])
out[idx] = restored
if (restored !== newLines[idx]) changed = true
}
if (changed) return out
return newLines
}
function stripAllWhitespace(s: string) {
return s.replace(/\s+/g, "")
}
function restoreOldWrappedLines(oldLines: string[], newLines: string[]) {
if (oldLines.length === 0 || newLines.length < 2) return newLines
const canonToOld = new Map<string, { line: string; count: number }>()
for (const line of oldLines) {
const canon = stripAllWhitespace(line)
const bucket = canonToOld.get(canon)
if (bucket) bucket.count++
if (!bucket) canonToOld.set(canon, { line, count: 1 })
}
const candidates: Array<{ start: number; len: number; replacement: string; canon: string }> = []
for (let start = 0; start < newLines.length; start++) {
for (let len = 2; len <= 10 && start + len <= newLines.length; len++) {
const canonSpan = stripAllWhitespace(newLines.slice(start, start + len).join(""))
const old = canonToOld.get(canonSpan)
if (old && old.count === 1 && canonSpan.length >= 6) {
candidates.push({
start,
len,
replacement: old.line,
canon: canonSpan,
})
}
}
}
if (candidates.length === 0) return newLines
const canonCounts = new Map<string, number>()
for (const candidate of candidates) {
canonCounts.set(candidate.canon, (canonCounts.get(candidate.canon) ?? 0) + 1)
}
const unique = candidates.filter((candidate) => (canonCounts.get(candidate.canon) ?? 0) === 1)
if (unique.length === 0) return newLines
unique.sort((a, b) => b.start - a.start)
const out = [...newLines]
for (const candidate of unique) {
out.splice(candidate.start, candidate.len, candidate.replacement)
}
return out
}
function stripInsertAnchorEchoAfter(anchorLine: string, lines: string[]) {
if (lines.length <= 1) return lines
if (equalsIgnoringWhitespace(lines[0], anchorLine)) return lines.slice(1)
return lines
}
function stripInsertAnchorEchoBefore(anchorLine: string, lines: string[]) {
if (lines.length <= 1) return lines
if (equalsIgnoringWhitespace(lines[lines.length - 1], anchorLine)) return lines.slice(0, -1)
return lines
}
function stripInsertBoundaryEcho(afterLine: string, beforeLine: string, lines: string[]) {
let out = lines
if (out.length > 1 && equalsIgnoringWhitespace(out[0], afterLine)) out = out.slice(1)
if (out.length > 1 && equalsIgnoringWhitespace(out[out.length - 1], beforeLine)) out = out.slice(0, -1)
return out
}
function stripRangeBoundaryEcho(fileLines: string[], startLine: number, endLine: number, lines: string[]) {
const count = endLine - startLine + 1
if (lines.length <= 1 || lines.length <= count) return lines
let out = lines
const beforeIdx = startLine - 2
if (beforeIdx >= 0 && equalsIgnoringWhitespace(out[0], fileLines[beforeIdx])) {
out = out.slice(1)
}
const afterIdx = endLine
if (
afterIdx < fileLines.length &&
out.length > 0 &&
equalsIgnoringWhitespace(out[out.length - 1], fileLines[afterIdx])
) {
out = out.slice(0, -1)
}
return out
}
function ensureText(text: TextValue, label: string) {
const value = Array.isArray(text) ? text.join("") : text
if (value.length > 0) return
throw new Error(`${label} must be non-empty`)
}
function applyReplace(content: string, oldText: string, newText: TextValue, all = false) {
if (oldText.length === 0) throw new Error("replace.old_text must be non-empty")
const next = toLines(newText).join("\n")
const first = content.indexOf(oldText)
if (first < 0) throw new Error(`replace.old_text not found: ${JSON.stringify(oldText)}`)
if (all) return content.replaceAll(oldText, next)
const last = content.lastIndexOf(oldText)
if (first !== last) {
throw new Error("replace.old_text matched multiple times. Set all=true or provide a more specific old_text.")
}
return content.slice(0, first) + next + content.slice(first + oldText.length)
}
function mismatchContext(lines: string[], line: number) {
if (lines.length === 0) return ">>> (file is empty)"
const start = Math.max(1, line - 1)
const end = Math.min(lines.length, line + 1)
return Array.from({ length: end - start + 1 }, (_, idx) => start + idx)
.map((num) => {
const marker = num === line ? ">>>" : " "
return `${marker} ${hashlineLine(num, lines[num - 1])}`
})
.join("\n")
}
function throwMismatch(lines: string[], mismatches: Array<{ expected: string; line: number }>) {
const seen = new Set<string>()
const unique = mismatches.filter((m) => {
const key = `${m.expected}:${m.line}`
if (seen.has(key)) return false
seen.add(key)
return true
})
const body = unique
.map((m) => {
if (m.line < 1 || m.line > lines.length) {
return [
`>>> expected ${m.expected}`,
`>>> current line ${m.line} is out of range (1-${Math.max(lines.length, 1)})`,
].join("\n")
}
const current = hashlineRef(m.line, lines[m.line - 1])
return [`>>> expected ${m.expected}`, mismatchContext(lines, m.line), `>>> retry with ${current}`].join("\n")
})
.join("\n\n")
throw new Error(
[
"Hashline edit rejected: file changed since last read. Re-read the file and retry with updated LINE#ID anchors.",
body,
].join("\n\n"),
)
}
function validateAnchors(lines: string[], refs: Array<{ raw: string; line: number; id: string }>) {
const mismatches = refs
.filter((ref) => {
if (ref.line < 1 || ref.line > lines.length) return true
return hashlineID(ref.line, lines[ref.line - 1]) !== ref.id
})
.map((ref) => ({ expected: ref.raw, line: ref.line }))
if (mismatches.length > 0) throwMismatch(lines, mismatches)
}
function splitLines(text: string) {
if (text === "") {
return {
lines: [] as string[],
trailing: false,
}
}
const trailing = text.endsWith("\n")
const lines = text.split(/\r?\n/)
if (trailing) lines.pop()
return { lines, trailing }
}
export function parseHashlineContent(bytes: Buffer) {
const raw = bytes.toString("utf8")
let text = raw
const bom = raw.startsWith("\uFEFF")
if (bom) text = raw.slice(1)
const eol = text.includes("\r\n") ? "\r\n" : "\n"
const { lines, trailing } = splitLines(text)
return {
bom,
eol,
trailing,
lines,
text,
raw,
}
}
export function serializeHashlineContent(input: { lines: string[]; bom: boolean; eol: string; trailing: boolean }) {
let text = input.lines.join(input.eol)
if (input.trailing && input.lines.length > 0) text += input.eol
if (input.bom) text = `\uFEFF${text}`
return {
text,
bytes: Buffer.from(text, "utf8"),
}
}
type Splice = {
start: number
del: number
text: string[]
order: number
kind: "set_line" | "replace_lines" | "insert_after" | "insert_before" | "insert_between" | "append" | "prepend"
sortLine: number
precedence: number
startLine?: number
endLine?: number
anchorLine?: number
beforeLine?: number
afterLine?: number
}
export function applyHashlineEdits(input: {
lines: string[]
trailing: boolean
edits: HashlineEdit[]
autocorrect?: boolean
}) {
const lines = [...input.lines]
const originalLines = [...input.lines]
let trailing = input.trailing
const refs: Array<{ raw: string; line: number; id: string }> = []
const replaceOps: Array<Extract<HashlineEdit, { type: "replace" }>> = []
const ops: Splice[] = []
const autocorrect = input.autocorrect ?? Bun.env.OPENCODE_HL_AUTOCORRECT === "1"
const parseText = (text: TextValue) => {
const next = toLines(text)
if (!autocorrect) return next
return stripNewLinePrefixes(next)
}
input.edits.forEach((edit, order) => {
if (edit.type === "replace") {
replaceOps.push(edit)
return
}
if (edit.type === "append") {
ensureText(edit.text, "append.text")
ops.push({
start: lines.length,
del: 0,
text: parseText(edit.text),
order,
kind: "append",
sortLine: lines.length + 1,
precedence: 1,
})
return
}
if (edit.type === "prepend") {
ensureText(edit.text, "prepend.text")
ops.push({
start: 0,
del: 0,
text: parseText(edit.text),
order,
kind: "prepend",
sortLine: 0,
precedence: 2,
})
return
}
if (edit.type === "set_line") {
const line = parseHashlineRef(edit.line, "set_line.line")
refs.push(line)
ops.push({
start: line.line - 1,
del: 1,
text: parseText(edit.text),
order,
kind: "set_line",
sortLine: line.line,
precedence: 0,
startLine: line.line,
endLine: line.line,
})
return
}
if (edit.type === "replace_lines") {
const start = parseHashlineRef(edit.start_line, "replace_lines.start_line")
const end = parseHashlineRef(edit.end_line, "replace_lines.end_line")
refs.push(start)
refs.push(end)
if (start.line > end.line) {
throw new Error("replace_lines.start_line must be less than or equal to replace_lines.end_line")
}
ops.push({
start: start.line - 1,
del: end.line - start.line + 1,
text: parseText(edit.text),
order,
kind: "replace_lines",
sortLine: end.line,
precedence: 0,
startLine: start.line,
endLine: end.line,
})
return
}
if (edit.type === "insert_after") {
const line = parseHashlineRef(edit.line, "insert_after.line")
ensureText(edit.text, "insert_after.text")
refs.push(line)
ops.push({
start: line.line,
del: 0,
text: parseText(edit.text),
order,
kind: "insert_after",
sortLine: line.line,
precedence: 1,
anchorLine: line.line,
})
return
}
if (edit.type === "insert_before") {
const line = parseHashlineRef(edit.line, "insert_before.line")
ensureText(edit.text, "insert_before.text")
refs.push(line)
ops.push({
start: line.line - 1,
del: 0,
text: parseText(edit.text),
order,
kind: "insert_before",
sortLine: line.line,
precedence: 2,
anchorLine: line.line,
})
return
}
const after = parseHashlineRef(edit.after_line, "insert_between.after_line")
const before = parseHashlineRef(edit.before_line, "insert_between.before_line")
ensureText(edit.text, "insert_between.text")
refs.push(after)
refs.push(before)
if (after.line >= before.line) {
throw new Error("insert_between.after_line must be less than insert_between.before_line")
}
ops.push({
start: after.line,
del: 0,
text: parseText(edit.text),
order,
kind: "insert_between",
sortLine: before.line,
precedence: 3,
afterLine: after.line,
beforeLine: before.line,
})
})
validateAnchors(lines, refs)
const sorted = [...ops].sort((a, b) => {
if (a.sortLine !== b.sortLine) return b.sortLine - a.sortLine
if (a.precedence !== b.precedence) return a.precedence - b.precedence
return a.order - b.order
})
sorted.forEach((op) => {
if (op.start < 0 || op.start > lines.length) {
throw new Error(`line index ${op.start + 1} is out of range`)
}
let text = op.text
if (autocorrect) {
if (op.kind === "set_line" || op.kind === "replace_lines") {
const start = op.startLine ?? op.start + 1
const end = op.endLine ?? start + op.del - 1
const old = originalLines.slice(start - 1, end)
text = stripRangeBoundaryEcho(originalLines, start, end, text)
text = restoreOldWrappedLines(old, text)
text = restoreIndentForPairedReplacement(old, text)
}
if ((op.kind === "insert_after" || op.kind === "append") && op.anchorLine) {
text = stripInsertAnchorEchoAfter(originalLines[op.anchorLine - 1], text)
}
if ((op.kind === "insert_before" || op.kind === "prepend") && op.anchorLine) {
text = stripInsertAnchorEchoBefore(originalLines[op.anchorLine - 1], text)
}
if (op.kind === "insert_between" && op.afterLine && op.beforeLine) {
text = stripInsertBoundaryEcho(originalLines[op.afterLine - 1], originalLines[op.beforeLine - 1], text)
}
}
lines.splice(op.start, op.del, ...text)
})
if (replaceOps.length > 0) {
const content = `${lines.join("\n")}${trailing && lines.length > 0 ? "\n" : ""}`
const replaced = replaceOps.reduce(
(acc, op) =>
applyReplace(acc, op.old_text, autocorrect ? stripNewLinePrefixes(toLines(op.new_text)) : op.new_text, op.all),
content,
)
const split = splitLines(replaced)
lines.splice(0, lines.length, ...split.lines)
trailing = split.trailing
}
return {
lines,
trailing,
}
}
export function hashlineOnlyCreates(edits: HashlineEdit[]) {
return edits.every((edit) => edit.type === "append" || edit.type === "prepend")
}

View File

@ -11,6 +11,8 @@ import { Instance } from "../project/instance"
import { assertExternalDirectory } from "./external-directory"
import { InstructionPrompt } from "../session/instruction"
import { Filesystem } from "../util/filesystem"
import { Config } from "../config/config"
import { hashlineRef } from "./hashline"
const DEFAULT_READ_LIMIT = 2000
const MAX_LINE_LENGTH = 2000
@ -156,6 +158,7 @@ export const ReadTool = Tool.define("read", {
const offset = params.offset ?? 1
const start = offset - 1
const raw: string[] = []
const full: string[] = []
let bytes = 0
let lines = 0
let truncatedByBytes = false
@ -179,6 +182,7 @@ export const ReadTool = Tool.define("read", {
}
raw.push(line)
full.push(text)
bytes += size
}
} finally {
@ -190,8 +194,11 @@ export const ReadTool = Tool.define("read", {
throw new Error(`Offset ${offset} is out of range for this file (${lines} lines)`)
}
const useHashline = (await Config.get()).experimental?.hashline_edit === true
const content = raw.map((line, index) => {
return `${index + offset}: ${line}`
const lineNumber = index + offset
if (useHashline) return `${hashlineRef(lineNumber, full[index])}:${line}`
return `${lineNumber}: ${line}`
})
const preview = raw.slice(0, 20).join("\n")

View File

@ -7,7 +7,10 @@ Usage:
- To read later sections, call this tool again with a larger offset.
- Use the grep tool to find specific content in large files or files with long lines.
- If you are unsure of the correct file path, use the glob tool to look up filenames by glob pattern.
- Contents are returned with each line prefixed by its line number as `<line>: <content>`. For example, if a file has contents "foo\n", you will receive "1: foo\n". For directories, entries are returned one per line (without line numbers) with a trailing `/` for subdirectories.
- Contents are returned with a line prefix.
- Default format: `<line>: <content>` (example: `1: foo`).
- When `experimental.hashline_edit` is enabled: `LINE#ID:<content>` (example: `1#AB:foo`). Use these anchors for hashline edits.
- For directories, entries are returned one per line (without line numbers) with a trailing `/` for subdirectories.
- Any line longer than 2000 characters is truncated.
- Call this tool in parallel when you know there are multiple files you want to read.
- Avoid tiny repeated slices (30 line chunks). If you need more context, read a larger window.

View File

@ -133,6 +133,7 @@ export namespace ToolRegistry {
},
agent?: Agent.Info,
) {
const config = await Config.get()
const tools = await all()
const result = await Promise.all(
tools
@ -142,6 +143,11 @@ export namespace ToolRegistry {
return model.providerID === "opencode" || Flag.OPENCODE_ENABLE_EXA
}
if (config.experimental?.hashline_edit === true) {
if (t.id === "apply_patch") return false
return true
}
// use apply tool in same format as codex
const usePatch =
model.modelID.includes("gpt-") && !model.modelID.includes("oss") && !model.modelID.includes("gpt-4")

View File

@ -56,6 +56,28 @@ test("loads JSON config file", async () => {
})
})
test("parses experimental.hashline_edit and experimental.hashline_autocorrect", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await writeConfig(dir, {
$schema: "https://opencode.ai/config.json",
experimental: {
hashline_edit: true,
hashline_autocorrect: true,
},
})
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const config = await Config.get()
expect(config.experimental?.hashline_edit).toBe(true)
expect(config.experimental?.hashline_autocorrect).toBe(true)
},
})
})
test("loads JSONC config file", async () => {
await using tmp = await tmpdir({
init: async (dir) => {

View File

@ -5,6 +5,7 @@ import { EditTool } from "../../src/tool/edit"
import { Instance } from "../../src/project/instance"
import { tmpdir } from "../fixture/fixture"
import { FileTime } from "../../src/file/time"
import { hashlineLine, hashlineRef } from "../../src/tool/hashline"
const ctx = {
sessionID: "test-edit-session",
@ -493,4 +494,281 @@ describe("tool.edit", () => {
})
})
})
describe("hashline payload", () => {
test("replaces a single line in hashline mode", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
init: async (dir) => {
await fs.writeFile(path.join(dir, "file.txt"), "a\nb\nc", "utf-8")
},
})
const filepath = path.join(tmp.path, "file.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
FileTime.read(ctx.sessionID, filepath)
const edit = await EditTool.init()
const result = await edit.execute(
{
filePath: filepath,
edits: [
{
type: "set_line",
line: hashlineRef(2, "b"),
text: "B",
},
],
},
ctx,
)
const content = await fs.readFile(filepath, "utf-8")
expect(content).toBe("a\nB\nc")
expect(result.metadata.edit_mode).toBe("hashline")
},
})
})
test("applies hashline autocorrect prefixes through config", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
hashline_autocorrect: true,
},
},
init: async (dir) => {
await fs.writeFile(path.join(dir, "file.txt"), "a\nb\nc", "utf-8")
},
})
const filepath = path.join(tmp.path, "file.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
FileTime.read(ctx.sessionID, filepath)
const edit = await EditTool.init()
await edit.execute(
{
filePath: filepath,
edits: [
{
type: "set_line",
line: hashlineRef(2, "b"),
text: hashlineLine(2, "B"),
},
],
},
ctx,
)
const content = await fs.readFile(filepath, "utf-8")
expect(content).toBe("a\nB\nc")
},
})
})
test("supports range replacement and insert modes", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
init: async (dir) => {
await fs.writeFile(path.join(dir, "file.txt"), "a\nb\nc\nd", "utf-8")
},
})
const filepath = path.join(tmp.path, "file.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
FileTime.read(ctx.sessionID, filepath)
const edit = await EditTool.init()
await edit.execute(
{
filePath: filepath,
edits: [
{
type: "replace_lines",
start_line: hashlineRef(2, "b"),
end_line: hashlineRef(3, "c"),
text: ["B", "C"],
},
{
type: "insert_before",
line: hashlineRef(2, "b"),
text: "x",
},
{
type: "insert_after",
line: hashlineRef(3, "c"),
text: "y",
},
],
},
ctx,
)
const content = await fs.readFile(filepath, "utf-8")
expect(content).toBe("a\nx\nB\nC\ny\nd")
},
})
})
test("creates missing files from append/prepend operations", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
})
const filepath = path.join(tmp.path, "created.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
const edit = await EditTool.init()
await edit.execute(
{
filePath: filepath,
edits: [
{
type: "prepend",
text: "start",
},
{
type: "append",
text: "end",
},
],
},
ctx,
)
const content = await fs.readFile(filepath, "utf-8")
expect(content).toBe("start\nend")
},
})
})
test("rejects missing files for non-append/prepend edits", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
})
const filepath = path.join(tmp.path, "missing.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
const edit = await EditTool.init()
await expect(
edit.execute(
{
filePath: filepath,
edits: [
{
type: "replace",
old_text: "a",
new_text: "b",
},
],
},
ctx,
),
).rejects.toThrow("Missing file can only be created")
},
})
})
test("supports delete and rename flows", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
init: async (dir) => {
await fs.writeFile(path.join(dir, "src.txt"), "a\nb", "utf-8")
await fs.writeFile(path.join(dir, "delete.txt"), "delete me", "utf-8")
},
})
const source = path.join(tmp.path, "src.txt")
const target = path.join(tmp.path, "renamed.txt")
const doomed = path.join(tmp.path, "delete.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
const edit = await EditTool.init()
FileTime.read(ctx.sessionID, source)
await edit.execute(
{
filePath: source,
rename: target,
edits: [
{
type: "set_line",
line: hashlineRef(2, "b"),
text: "B",
},
],
},
ctx,
)
expect(await fs.readFile(target, "utf-8")).toBe("a\nB")
await expect(fs.stat(source)).rejects.toThrow()
FileTime.read(ctx.sessionID, doomed)
await edit.execute(
{
filePath: doomed,
delete: true,
edits: [],
},
ctx,
)
await expect(fs.stat(doomed)).rejects.toThrow()
},
})
})
test("rejects hashline payload when experimental mode is disabled", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await fs.writeFile(path.join(dir, "file.txt"), "a", "utf-8")
},
})
const filepath = path.join(tmp.path, "file.txt")
await Instance.provide({
directory: tmp.path,
fn: async () => {
const edit = await EditTool.init()
await expect(
edit.execute(
{
filePath: filepath,
edits: [
{
type: "append",
text: "b",
},
],
},
ctx,
),
).rejects.toThrow("Hashline edit payload is disabled")
},
})
})
})
})

View File

@ -0,0 +1,184 @@
import { describe, expect, test } from "bun:test"
import { applyHashlineEdits, hashlineID, hashlineLine, hashlineRef, parseHashlineRef } from "../../src/tool/hashline"
function swapID(ref: string) {
const [line, id] = ref.split("#")
const next = id[0] === "Z" ? `P${id[1]}` : `Z${id[1]}`
return `${line}#${next}`
}
describe("tool.hashline", () => {
test("hash computation is stable and 2-char alphabet encoded", () => {
const a = hashlineID(1, " const x = 1")
const b = hashlineID(1, "constx=1")
const c = hashlineID(99, "constx=1")
expect(a).toBe(b)
expect(a).toBe(c)
expect(a).toMatch(/^[ZPMQVRWSNKTXJBYH]{2}$/)
})
test("autocorrect strips copied hashline prefixes when enabled", () => {
const old = Bun.env.OPENCODE_HL_AUTOCORRECT
Bun.env.OPENCODE_HL_AUTOCORRECT = "1"
try {
const result = applyHashlineEdits({
lines: ["a"],
trailing: false,
edits: [
{
type: "set_line",
line: hashlineRef(1, "a"),
text: hashlineLine(1, "a"),
},
],
})
expect(result.lines).toEqual(["a"])
} finally {
if (old === undefined) delete Bun.env.OPENCODE_HL_AUTOCORRECT
else Bun.env.OPENCODE_HL_AUTOCORRECT = old
}
})
test("parses strict LINE#ID references with tolerant extraction", () => {
const ref = parseHashlineRef(">>> 12#ZP:const value = 1", "line")
expect(ref.line).toBe(12)
expect(ref.id).toBe("ZP")
expect(ref.raw).toBe("12#ZP")
expect(() => parseHashlineRef("12#ab", "line")).toThrow("LINE#ID")
})
test("aggregates mismatch errors with >>> context and retry refs", () => {
const lines = ["alpha", "beta", "gamma"]
const wrong = swapID(hashlineRef(2, lines[1]))
expect(() =>
applyHashlineEdits({
lines,
trailing: false,
edits: [
{
type: "set_line",
line: wrong,
text: "BETA",
},
],
}),
).toThrow("changed since last read")
expect(() =>
applyHashlineEdits({
lines,
trailing: false,
edits: [
{
type: "set_line",
line: wrong,
text: "BETA",
},
],
}),
).toThrow(">>> retry with")
})
test("applies batched line edits bottom-up for stable results", () => {
const lines = ["a", "b", "c", "d"]
const one = hashlineRef(1, lines[0])
const two = hashlineRef(2, lines[1])
const three = hashlineRef(3, lines[2])
const four = hashlineRef(4, lines[3])
const result = applyHashlineEdits({
lines,
trailing: false,
edits: [
{
type: "replace_lines",
start_line: two,
end_line: three,
text: ["B", "C"],
},
{
type: "insert_after",
line: one,
text: "A1",
},
{
type: "set_line",
line: four,
text: "D",
},
],
})
expect(result.lines).toEqual(["a", "A1", "B", "C", "D"])
})
test("orders append and prepend deterministically on empty files", () => {
const result = applyHashlineEdits({
lines: [],
trailing: false,
edits: [
{
type: "append",
text: "end",
},
{
type: "prepend",
text: "start",
},
],
})
expect(result.lines).toEqual(["start", "end"])
})
test("validates ranges, between constraints, and non-empty insert text", () => {
const lines = ["a", "b", "c"]
const one = hashlineRef(1, lines[0])
const two = hashlineRef(2, lines[1])
expect(() =>
applyHashlineEdits({
lines,
trailing: false,
edits: [
{
type: "replace_lines",
start_line: two,
end_line: one,
text: "x",
},
],
}),
).toThrow("start_line")
expect(() =>
applyHashlineEdits({
lines,
trailing: false,
edits: [
{
type: "insert_between",
after_line: two,
before_line: one,
text: "x",
},
],
}),
).toThrow("insert_between.after_line")
expect(() =>
applyHashlineEdits({
lines,
trailing: false,
edits: [
{
type: "append",
text: "",
},
],
}),
).toThrow("append.text")
})
})

View File

@ -6,6 +6,7 @@ import { Filesystem } from "../../src/util/filesystem"
import { tmpdir } from "../fixture/fixture"
import { PermissionNext } from "../../src/permission/next"
import { Agent } from "../../src/agent/agent"
import { hashlineLine } from "../../src/tool/hashline"
const FIXTURES_DIR = path.join(import.meta.dir, "fixtures")
@ -443,6 +444,50 @@ root_type Monster;`
})
})
describe("tool.read hashline output", () => {
test("returns LINE#ID prefixes when hashline mode is enabled", async () => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
init: async (dir) => {
await Bun.write(path.join(dir, "hashline.txt"), "foo\nbar")
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const read = await ReadTool.init()
const result = await read.execute({ filePath: path.join(tmp.path, "hashline.txt") }, ctx)
expect(result.output).toContain(hashlineLine(1, "foo"))
expect(result.output).toContain(hashlineLine(2, "bar"))
expect(result.output).not.toContain("1: foo")
},
})
})
test("keeps legacy line prefixes when hashline mode is disabled", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(path.join(dir, "legacy.txt"), "foo\nbar")
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const read = await ReadTool.init()
const result = await read.execute({ filePath: path.join(tmp.path, "legacy.txt") }, ctx)
expect(result.output).toContain("1: foo")
expect(result.output).toContain("2: bar")
},
})
})
})
describe("tool.read loaded instructions", () => {
test("loads AGENTS.md from parent directory and includes in metadata", async () => {
await using tmp = await tmpdir({

View File

@ -0,0 +1,64 @@
import { describe, expect, test } from "bun:test"
import { tmpdir } from "../fixture/fixture"
import { Instance } from "../../src/project/instance"
import { ToolRegistry } from "../../src/tool/registry"
describe("tool.registry hashline routing", () => {
test.each([
{ providerID: "openai", modelID: "gpt-5" },
{ providerID: "anthropic", modelID: "claude-3-7-sonnet" },
])("disables apply_patch and enables edit when experimental hashline is on (%o)", async (model) => {
await using tmp = await tmpdir({
config: {
experimental: {
hashline_edit: true,
},
},
})
await Instance.provide({
directory: tmp.path,
fn: async () => {
const tools = await ToolRegistry.tools(model)
const ids = tools.map((tool) => tool.id)
expect(ids).toContain("edit")
expect(ids).toContain("write")
expect(ids).not.toContain("apply_patch")
},
})
})
test("keeps existing GPT apply_patch routing when experimental hashline is off", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
fn: async () => {
const tools = await ToolRegistry.tools({
providerID: "openai",
modelID: "gpt-5",
})
const ids = tools.map((tool) => tool.id)
expect(ids).toContain("apply_patch")
expect(ids).not.toContain("edit")
},
})
})
test("keeps existing non-GPT routing when experimental hashline is off", async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
fn: async () => {
const tools = await ToolRegistry.tools({
providerID: "anthropic",
modelID: "claude-3-7-sonnet",
})
const ids = tools.map((tool) => tool.id)
expect(ids).toContain("edit")
expect(ids).not.toContain("apply_patch")
},
})
})
})