fix(opencode): batch snapshot revert without reordering (#20564)

pull/20602/head
Kit Langton 2026-04-01 19:46:06 -04:00 committed by GitHub
parent 802d165572
commit 48db7cf07a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 173 additions and 11 deletions

View File

@ -301,28 +301,113 @@ export namespace Snapshot {
const revert = Effect.fnUntraced(function* (patches: Snapshot.Patch[]) {
return yield* locked(
Effect.gen(function* () {
const ops: { hash: string; file: string; rel: string }[] = []
const seen = new Set<string>()
for (const item of patches) {
for (const file of item.files) {
if (seen.has(file)) continue
seen.add(file)
log.info("reverting", { file, hash: item.hash })
const result = yield* git([...core, ...args(["checkout", item.hash, "--", file])], {
cwd: state.worktree,
ops.push({
hash: item.hash,
file,
rel: path.relative(state.worktree, file).replaceAll("\\", "/"),
})
if (result.code !== 0) {
const rel = path.relative(state.worktree, file)
const tree = yield* git([...core, ...args(["ls-tree", item.hash, "--", rel])], {
}
}
const single = Effect.fnUntraced(function* (op: (typeof ops)[number]) {
log.info("reverting", { file: op.file, hash: op.hash })
const result = yield* git([...core, ...args(["checkout", op.hash, "--", op.file])], {
cwd: state.worktree,
})
if (result.code === 0) return
const tree = yield* git([...core, ...args(["ls-tree", op.hash, "--", op.rel])], {
cwd: state.worktree,
})
if (tree.code === 0 && tree.text.trim()) {
log.info("file existed in snapshot but checkout failed, keeping", { file: op.file, hash: op.hash })
return
}
log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash })
yield* remove(op.file)
})
const clash = (a: string, b: string) => a === b || a.startsWith(`${b}/`) || b.startsWith(`${a}/`)
for (let i = 0; i < ops.length; ) {
const first = ops[i]!
const run = [first]
let j = i + 1
// Only batch adjacent files when their paths cannot affect each other.
while (j < ops.length && run.length < 100) {
const next = ops[j]!
if (next.hash !== first.hash) break
if (run.some((item) => clash(item.rel, next.rel))) break
run.push(next)
j += 1
}
if (run.length === 1) {
yield* single(first)
i = j
continue
}
const tree = yield* git(
[...core, ...args(["ls-tree", "--name-only", first.hash, "--", ...run.map((item) => item.rel)])],
{
cwd: state.worktree,
},
)
if (tree.code !== 0) {
log.info("batched ls-tree failed, falling back to single-file revert", {
hash: first.hash,
files: run.length,
})
for (const op of run) {
yield* single(op)
}
i = j
continue
}
const have = new Set(
tree.text
.trim()
.split("\n")
.map((item) => item.trim())
.filter(Boolean),
)
const list = run.filter((item) => have.has(item.rel))
if (list.length) {
log.info("reverting", { hash: first.hash, files: list.length })
const result = yield* git(
[...core, ...args(["checkout", first.hash, "--", ...list.map((item) => item.file)])],
{
cwd: state.worktree,
},
)
if (result.code !== 0) {
log.info("batched checkout failed, falling back to single-file revert", {
hash: first.hash,
files: list.length,
})
if (tree.code === 0 && tree.text.trim()) {
log.info("file existed in snapshot but checkout failed, keeping", { file })
} else {
log.info("file did not exist in snapshot, deleting", { file })
yield* remove(file)
for (const op of run) {
yield* single(op)
}
i = j
continue
}
}
for (const op of run) {
if (have.has(op.rel)) continue
log.info("file did not exist in snapshot, deleting", { file: op.file, hash: op.hash })
yield* remove(op.file)
}
i = j
}
}),
)

View File

@ -1233,3 +1233,80 @@ test("revert with overlapping files across patches uses first patch hash", async
},
})
})
test("revert preserves patch order when the same hash appears again", async () => {
await using tmp = await bootstrap()
await Instance.provide({
directory: tmp.path,
fn: async () => {
await $`mkdir -p ${tmp.path}/foo`.quiet()
await Filesystem.write(`${tmp.path}/foo/bar`, "v1")
await Filesystem.write(`${tmp.path}/a.txt`, "v1")
const snap1 = await Snapshot.track()
expect(snap1).toBeTruthy()
await $`rm -rf ${tmp.path}/foo`.quiet()
await Filesystem.write(`${tmp.path}/foo`, "v2")
await Filesystem.write(`${tmp.path}/a.txt`, "v2")
const snap2 = await Snapshot.track()
expect(snap2).toBeTruthy()
await $`rm -rf ${tmp.path}/foo`.quiet()
await Filesystem.write(`${tmp.path}/a.txt`, "v3")
await Snapshot.revert([
{ hash: snap1!, files: [fwd(tmp.path, "a.txt")] },
{ hash: snap2!, files: [fwd(tmp.path, "foo")] },
{ hash: snap1!, files: [fwd(tmp.path, "foo", "bar")] },
])
expect(await fs.readFile(`${tmp.path}/a.txt`, "utf-8")).toBe("v1")
expect((await fs.stat(`${tmp.path}/foo`)).isDirectory()).toBe(true)
expect(await fs.readFile(`${tmp.path}/foo/bar`, "utf-8")).toBe("v1")
},
})
})
test("revert handles large mixed batches across chunk boundaries", async () => {
await using tmp = await bootstrap()
await Instance.provide({
directory: tmp.path,
fn: async () => {
const base = Array.from({ length: 140 }, (_, i) => fwd(tmp.path, "batch", `${i}.txt`))
const fresh = Array.from({ length: 140 }, (_, i) => fwd(tmp.path, "fresh", `${i}.txt`))
await $`mkdir -p ${tmp.path}/batch ${tmp.path}/fresh`.quiet()
await Promise.all(base.map((file, i) => Filesystem.write(file, `base-${i}`)))
const snap = await Snapshot.track()
expect(snap).toBeTruthy()
await Promise.all(base.map((file, i) => Filesystem.write(file, `next-${i}`)))
await Promise.all(fresh.map((file, i) => Filesystem.write(file, `fresh-${i}`)))
const patch = await Snapshot.patch(snap!)
expect(patch.files.length).toBe(base.length + fresh.length)
await Snapshot.revert([patch])
await Promise.all(
base.map(async (file, i) => {
expect(await fs.readFile(file, "utf-8")).toBe(`base-${i}`)
}),
)
await Promise.all(
fresh.map(async (file) => {
expect(
await fs
.access(file)
.then(() => true)
.catch(() => false),
).toBe(false)
}),
)
},
})
})