diff --git a/packages/opencode/migration/0000_watery_shinobi_shaw.sql b/packages/opencode/migration/0000_normal_wind_dancer.sql similarity index 98% rename from packages/opencode/migration/0000_watery_shinobi_shaw.sql rename to packages/opencode/migration/0000_normal_wind_dancer.sql index a251a81e84..da429dbac1 100644 --- a/packages/opencode/migration/0000_watery_shinobi_shaw.sql +++ b/packages/opencode/migration/0000_normal_wind_dancer.sql @@ -14,6 +14,7 @@ CREATE TABLE `project` ( CREATE TABLE `message` ( `id` text PRIMARY KEY NOT NULL, `session_id` text NOT NULL, + `role` text NOT NULL, `data` text NOT NULL, FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade ); @@ -22,6 +23,7 @@ CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-bre CREATE TABLE `part` ( `id` text PRIMARY KEY NOT NULL, `message_id` text NOT NULL, + `type` text NOT NULL, `data` text NOT NULL, FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON UPDATE no action ON DELETE cascade ); diff --git a/packages/opencode/migration/meta/0000_snapshot.json b/packages/opencode/migration/meta/0000_snapshot.json index ee8461c187..2bc7d196b9 100644 --- a/packages/opencode/migration/meta/0000_snapshot.json +++ b/packages/opencode/migration/meta/0000_snapshot.json @@ -1,7 +1,7 @@ { "version": "6", "dialect": "sqlite", - "id": "9970ec30-1179-4dd7-a0ab-6d0cf0f42219", + "id": "f7bf061b-aa6c-4b68-a29f-c210c54f109d", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "project": { @@ -101,6 +101,13 @@ "notNull": true, "autoincrement": false }, + "role": { + "name": "role", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, "data": { "name": "data", "type": "text", @@ -154,6 +161,13 @@ "notNull": true, "autoincrement": false }, + "type": { + "name": "type", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, "data": { "name": "data", "type": "text", diff --git a/packages/opencode/migration/meta/_journal.json b/packages/opencode/migration/meta/_journal.json index 0d30f42477..7697a6da34 100644 --- a/packages/opencode/migration/meta/_journal.json +++ b/packages/opencode/migration/meta/_journal.json @@ -5,8 +5,8 @@ { "idx": 0, "version": "6", - "when": 1768609466939, - "tag": "0000_watery_shinobi_shaw", + "when": 1768625754197, + "tag": "0000_normal_wind_dancer", "breakpoints": true } ] diff --git a/packages/opencode/src/cli/cmd/import.ts b/packages/opencode/src/cli/cmd/import.ts index 1f13bb689d..4e64dec500 100644 --- a/packages/opencode/src/cli/cmd/import.ts +++ b/packages/opencode/src/cli/cmd/import.ts @@ -111,30 +111,32 @@ export const ImportCommand = cmd({ ) for (const msg of exportData.messages) { - const { id: msgId, sessionID: msgSessionID, ...msgData } = msg.info + const { id: msgId, sessionID: msgSessionID, role: msgRole, ...msgData } = msg.info Database.use((db) => db .insert(MessageTable) .values({ id: msgId, sessionID: exportData.info.id, + role: msgRole, data: msgData, }) - .onConflictDoUpdate({ target: MessageTable.id, set: { data: msgData } }) + .onConflictDoUpdate({ target: MessageTable.id, set: { role: msgRole, data: msgData } }) .run(), ) for (const part of msg.parts) { - const { id: partId, messageID: _, sessionID: __, ...partData } = part + const { id: partId, messageID: _, sessionID: __, type: partType, ...partData } = part Database.use((db) => db .insert(PartTable) .values({ id: partId, messageID: msg.info.id, + type: partType, data: partData, }) - .onConflictDoUpdate({ target: PartTable.id, set: { data: partData } }) + .onConflictDoUpdate({ target: PartTable.id, set: { type: partType, data: partData } }) .run(), ) } diff --git a/packages/opencode/src/session/compaction.ts b/packages/opencode/src/session/compaction.ts index fb38253029..b485ed8e61 100644 --- a/packages/opencode/src/session/compaction.ts +++ b/packages/opencode/src/session/compaction.ts @@ -2,7 +2,6 @@ import { BusEvent } from "@/bus/bus-event" import { Bus } from "@/bus" import { Session } from "." import { Identifier } from "../id/id" -import { Instance } from "../project/instance" import { Provider } from "../provider/provider" import { MessageV2 } from "./message-v2" import z from "zod" @@ -109,10 +108,6 @@ export namespace SessionCompaction { mode: "compaction", agent: "compaction", summary: true, - path: { - cwd: Instance.directory, - root: Instance.worktree, - }, cost: 0, tokens: { output: 0, diff --git a/packages/opencode/src/session/message-v2.ts b/packages/opencode/src/session/message-v2.ts index 828d8db43b..4f9cac1b7d 100644 --- a/packages/opencode/src/session/message-v2.ts +++ b/packages/opencode/src/session/message-v2.ts @@ -372,10 +372,6 @@ export namespace MessageV2 { */ mode: z.string(), agent: z.string(), - path: z.object({ - cwd: z.string(), - root: z.string(), - }), summary: z.boolean().optional(), cost: z.number(), tokens: z.object({ @@ -633,7 +629,7 @@ export namespace MessageV2 { part: PartTable, }) .from(MessageTable) - .leftJoin(PartTable, eq(PartTable.messageID, MessageTable.id)) + .leftJoin(PartTable, eq(PartTable.message_id, MessageTable.id)) .where( inArray( MessageTable.id, @@ -650,13 +646,19 @@ export namespace MessageV2 { const first = group[0] if (!first) continue yield { - info: { ...first.message.data, id: first.message.id, sessionID: first.message.sessionID } as Info, + info: { + ...first.message.data, + role: first.message.role, + id: first.message.id, + sessionID: first.message.sessionID, + } as Info, parts: group .filter((row) => row.part) .map((row) => ({ ...row.part!.data, + type: row.part!.type, id: row.part!.id, - messageID: row.part!.messageID, + messageID: row.part!.message_id, sessionID: first.message.sessionID, })) as Part[], } @@ -672,17 +674,19 @@ export namespace MessageV2 { db .select({ id: PartTable.id, - messageID: PartTable.messageID, + messageID: PartTable.message_id, sessionID: MessageTable.sessionID, + type: PartTable.type, data: PartTable.data, }) .from(PartTable) - .innerJoin(MessageTable, eq(PartTable.messageID, MessageTable.id)) - .where(eq(PartTable.messageID, messageID)) + .innerJoin(MessageTable, eq(PartTable.message_id, MessageTable.id)) + .where(eq(PartTable.message_id, messageID)) .all(), ) const result = rows.map((row) => ({ ...row.data, + type: row.type, id: row.id, messageID: row.messageID, sessionID: row.sessionID, @@ -704,7 +708,7 @@ export namespace MessageV2 { part: PartTable, }) .from(MessageTable) - .leftJoin(PartTable, eq(PartTable.messageID, MessageTable.id)) + .leftJoin(PartTable, eq(PartTable.message_id, MessageTable.id)) .where(eq(MessageTable.id, input.messageID)) .orderBy(PartTable.id) .all(), @@ -712,13 +716,19 @@ export namespace MessageV2 { const first = rows[0] if (!first) throw new Error(`Message not found: ${input.messageID}`) return { - info: { ...first.message.data, id: first.message.id, sessionID: first.message.sessionID } as Info, + info: { + ...first.message.data, + role: first.message.role, + id: first.message.id, + sessionID: first.message.sessionID, + } as Info, parts: rows .filter((row) => row.part) .map((row) => ({ ...row.part!.data, + type: row.part!.type, id: row.part!.id, - messageID: row.part!.messageID, + messageID: row.part!.message_id, sessionID: first.message.sessionID, })) as Part[], } diff --git a/packages/opencode/src/session/prompt.ts b/packages/opencode/src/session/prompt.ts index a8085b5831..721b375ea2 100644 --- a/packages/opencode/src/session/prompt.ts +++ b/packages/opencode/src/session/prompt.ts @@ -328,10 +328,6 @@ export namespace SessionPrompt { sessionID, mode: task.agent, agent: task.agent, - path: { - cwd: Instance.directory, - root: Instance.worktree, - }, cost: 0, tokens: { input: 0, @@ -530,10 +526,6 @@ export namespace SessionPrompt { role: "assistant", mode: agent.name, agent: agent.name, - path: { - cwd: Instance.directory, - root: Instance.worktree, - }, cost: 0, tokens: { input: 0, @@ -1387,10 +1379,6 @@ NOTE: At any point in time through this workflow you should feel free to ask the mode: input.agent, agent: input.agent, cost: 0, - path: { - cwd: Instance.directory, - root: Instance.worktree, - }, time: { created: Date.now(), }, diff --git a/packages/opencode/src/storage/json-migration.ts b/packages/opencode/src/storage/json-migration.ts index a08bfcc084..7db25c37c3 100644 --- a/packages/opencode/src/storage/json-migration.ts +++ b/packages/opencode/src/storage/json-migration.ts @@ -1,6 +1,5 @@ import { Database } from "bun:sqlite" import { drizzle } from "drizzle-orm/bun-sqlite" -import { eq } from "drizzle-orm" import { Global } from "../global" import { Log } from "../util/log" import { ProjectTable } from "../project/project.sql" @@ -41,17 +40,29 @@ export function migrateFromJson(sqlite: Database, customStorageDir?: string) { errors: [] as string[], } - // Migrate projects first (no FK deps) - const projectGlob = new Bun.Glob("project/*.json") - for (const file of projectGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - if (!data.id) { - stats.errors.push(`project missing id: ${file}`) - continue - } - db.insert(ProjectTable) - .values({ + // Run entire migration in a single transaction for performance + sqlite.run("BEGIN TRANSACTION") + + try { + // Track existing IDs to avoid repeated DB lookups + const projectIDs = new Set() + const sessionIDs = new Set() + const messageIDs = new Set() + + // Migrate projects first (no FK deps) + const projectGlob = new Bun.Glob("project/*.json") + const projectFiles = Array.from(projectGlob.scanSync({ cwd: storageDir, absolute: true })) + const projectValues: (typeof ProjectTable.$inferInsert)[] = [] + + for (const file of projectFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + if (!data.id) { + stats.errors.push(`project missing id: ${file}`) + continue + } + projectIDs.add(data.id) + projectValues.push({ id: data.id, worktree: data.worktree ?? "/", vcs: data.vcs, @@ -63,32 +74,35 @@ export function migrateFromJson(sqlite: Database, customStorageDir?: string) { time_initialized: data.time?.initialized, sandboxes: data.sandboxes ?? [], }) - .onConflictDoNothing() - .run() - stats.projects++ - } catch (e) { - stats.errors.push(`failed to migrate project ${file}: ${e}`) + } catch (e) { + stats.errors.push(`failed to migrate project ${file}: ${e}`) + } } - } - log.info("migrated projects", { count: stats.projects }) - // Migrate sessions (depends on projects) - const sessionGlob = new Bun.Glob("session/*/*.json") - for (const file of sessionGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - if (!data.id || !data.projectID) { - stats.errors.push(`session missing id or projectID: ${file}`) - continue - } - // Check if project exists (skip orphaned sessions) - const project = db.select().from(ProjectTable).where(eq(ProjectTable.id, data.projectID)).get() - if (!project) { - log.warn("skipping orphaned session", { sessionID: data.id, projectID: data.projectID }) - continue - } - db.insert(SessionTable) - .values({ + if (projectValues.length > 0) { + db.insert(ProjectTable).values(projectValues).onConflictDoNothing().run() + stats.projects = projectValues.length + } + log.info("migrated projects", { count: stats.projects }) + + // Migrate sessions (depends on projects) + const sessionGlob = new Bun.Glob("session/*/*.json") + const sessionFiles = Array.from(sessionGlob.scanSync({ cwd: storageDir, absolute: true })) + const sessionValues: (typeof SessionTable.$inferInsert)[] = [] + + for (const file of sessionFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + if (!data.id || !data.projectID) { + stats.errors.push(`session missing id or projectID: ${file}`) + continue + } + if (!projectIDs.has(data.projectID)) { + log.warn("skipping orphaned session", { sessionID: data.id, projectID: data.projectID }) + continue + } + sessionIDs.add(data.id) + sessionValues.push({ id: data.id, projectID: data.projectID, parentID: data.parentID ?? null, @@ -111,167 +125,195 @@ export function migrateFromJson(sqlite: Database, customStorageDir?: string) { time_compacting: data.time?.compacting ?? null, time_archived: data.time?.archived ?? null, }) - .onConflictDoNothing() - .run() - stats.sessions++ - } catch (e) { - stats.errors.push(`failed to migrate session ${file}: ${e}`) + } catch (e) { + stats.errors.push(`failed to migrate session ${file}: ${e}`) + } } - } - log.info("migrated sessions", { count: stats.sessions }) - // Migrate messages (depends on sessions) - const messageGlob = new Bun.Glob("message/*/*.json") - for (const file of messageGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - if (!data.id || !data.sessionID) { - stats.errors.push(`message missing id or sessionID: ${file}`) - continue - } - // Check if session exists - const session = db.select().from(SessionTable).where(eq(SessionTable.id, data.sessionID)).get() - if (!session) { - log.warn("skipping orphaned message", { messageID: data.id, sessionID: data.sessionID }) - continue - } - const { id, sessionID, ...rest } = data - db.insert(MessageTable) - .values({ - id, - sessionID, - data: rest, - }) - .onConflictDoNothing() - .run() - stats.messages++ - } catch (e) { - stats.errors.push(`failed to migrate message ${file}: ${e}`) + if (sessionValues.length > 0) { + db.insert(SessionTable).values(sessionValues).onConflictDoNothing().run() + stats.sessions = sessionValues.length } - } - log.info("migrated messages", { count: stats.messages }) + log.info("migrated sessions", { count: stats.sessions }) - // Migrate parts (depends on messages) - const partGlob = new Bun.Glob("part/*/*.json") - for (const file of partGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - if (!data.id || !data.messageID) { - stats.errors.push(`part missing id or messageID: ${file}`) - continue - } - // Check if message exists - const message = db.select().from(MessageTable).where(eq(MessageTable.id, data.messageID)).get() - if (!message) { - log.warn("skipping orphaned part", { partID: data.id, messageID: data.messageID }) - continue - } - const { id, messageID, sessionID: _, ...rest } = data - db.insert(PartTable) - .values({ - id, - messageID, - data: rest, - }) - .onConflictDoNothing() - .run() - stats.parts++ - } catch (e) { - stats.errors.push(`failed to migrate part ${file}: ${e}`) - } - } - log.info("migrated parts", { count: stats.parts }) + // Migrate messages (depends on sessions) + const messageGlob = new Bun.Glob("message/*/*.json") + const messageFiles = Array.from(messageGlob.scanSync({ cwd: storageDir, absolute: true })) + const messageValues: (typeof MessageTable.$inferInsert)[] = [] - // Migrate session diffs (use raw SQL since TypeScript schema doesn't match migration) - const diffGlob = new Bun.Glob("session_diff/*.json") - for (const file of diffGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - const sessionID = path.basename(file, ".json") - // Check if session exists - const session = db.select().from(SessionTable).where(eq(SessionTable.id, sessionID)).get() - if (!session) { - log.warn("skipping orphaned session_diff", { sessionID }) - continue + for (const file of messageFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + if (!data.id || !data.sessionID) { + stats.errors.push(`message missing id or sessionID: ${file}`) + continue + } + if (!sessionIDs.has(data.sessionID)) { + log.warn("skipping orphaned message", { messageID: data.id, sessionID: data.sessionID }) + continue + } + messageIDs.add(data.id) + const { id, sessionID, role, ...rest } = data + messageValues.push({ id, sessionID, role, data: rest }) + } catch (e) { + stats.errors.push(`failed to migrate message ${file}: ${e}`) } - sqlite.run("INSERT OR IGNORE INTO session_diff (session_id, data) VALUES (?, ?)", [ - sessionID, - JSON.stringify(data), - ]) - stats.diffs++ - } catch (e) { - stats.errors.push(`failed to migrate session_diff ${file}: ${e}`) } - } - log.info("migrated session diffs", { count: stats.diffs }) - // Migrate todos - const todoGlob = new Bun.Glob("todo/*.json") - for (const file of todoGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - const sessionID = path.basename(file, ".json") - const session = db.select().from(SessionTable).where(eq(SessionTable.id, sessionID)).get() - if (!session) { - log.warn("skipping orphaned todo", { sessionID }) - continue + if (messageValues.length > 0) { + db.insert(MessageTable).values(messageValues).onConflictDoNothing().run() + stats.messages = messageValues.length + } + log.info("migrated messages", { count: stats.messages }) + + // Migrate parts (depends on messages) + const partGlob = new Bun.Glob("part/*/*.json") + const partFiles = Array.from(partGlob.scanSync({ cwd: storageDir, absolute: true })) + const partValues: (typeof PartTable.$inferInsert)[] = [] + + for (const file of partFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + if (!data.id || !data.messageID) { + stats.errors.push(`part missing id or messageID: ${file}`) + continue + } + if (!messageIDs.has(data.messageID)) { + log.warn("skipping orphaned part", { partID: data.id, messageID: data.messageID }) + continue + } + const { id, messageID, sessionID: _, type, ...rest } = data + partValues.push({ id, message_id: messageID, type, data: rest }) + } catch (e) { + stats.errors.push(`failed to migrate part ${file}: ${e}`) } - db.insert(TodoTable).values({ sessionID, data }).onConflictDoNothing().run() - stats.todos++ - } catch (e) { - stats.errors.push(`failed to migrate todo ${file}: ${e}`) } - } - log.info("migrated todos", { count: stats.todos }) - // Migrate permissions - const permGlob = new Bun.Glob("permission/*.json") - for (const file of permGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - const projectID = path.basename(file, ".json") - const project = db.select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get() - if (!project) { - log.warn("skipping orphaned permission", { projectID }) - continue + if (partValues.length > 0) { + db.insert(PartTable).values(partValues).onConflictDoNothing().run() + stats.parts = partValues.length + } + log.info("migrated parts", { count: stats.parts }) + + // Migrate session diffs (use prepared statement for batch insert) + const diffGlob = new Bun.Glob("session_diff/*.json") + const diffFiles = Array.from(diffGlob.scanSync({ cwd: storageDir, absolute: true })) + const diffStmt = sqlite.prepare("INSERT OR IGNORE INTO session_diff (session_id, data) VALUES (?, ?)") + + for (const file of diffFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + const sessionID = path.basename(file, ".json") + if (!sessionIDs.has(sessionID)) { + log.warn("skipping orphaned session_diff", { sessionID }) + continue + } + diffStmt.run(sessionID, JSON.stringify(data)) + stats.diffs++ + } catch (e) { + stats.errors.push(`failed to migrate session_diff ${file}: ${e}`) } - db.insert(PermissionTable).values({ projectID, data }).onConflictDoNothing().run() - stats.permissions++ - } catch (e) { - stats.errors.push(`failed to migrate permission ${file}: ${e}`) } - } - log.info("migrated permissions", { count: stats.permissions }) + log.info("migrated session diffs", { count: stats.diffs }) - // Migrate session shares - const shareGlob = new Bun.Glob("session_share/*.json") - for (const file of shareGlob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - const sessionID = path.basename(file, ".json") - const session = db.select().from(SessionTable).where(eq(SessionTable.id, sessionID)).get() - if (!session) { - log.warn("skipping orphaned session_share", { sessionID }) - continue + // Migrate todos + const todoGlob = new Bun.Glob("todo/*.json") + const todoFiles = Array.from(todoGlob.scanSync({ cwd: storageDir, absolute: true })) + const todoValues: (typeof TodoTable.$inferInsert)[] = [] + + for (const file of todoFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + const sessionID = path.basename(file, ".json") + if (!sessionIDs.has(sessionID)) { + log.warn("skipping orphaned todo", { sessionID }) + continue + } + todoValues.push({ sessionID, data }) + } catch (e) { + stats.errors.push(`failed to migrate todo ${file}: ${e}`) } - db.insert(SessionShareTable).values({ sessionID, data }).onConflictDoNothing().run() - stats.shares++ - } catch (e) { - stats.errors.push(`failed to migrate session_share ${file}: ${e}`) } - } - log.info("migrated session shares", { count: stats.shares }) - // Migrate shares (downloaded shared sessions, no FK) - const share2Glob = new Bun.Glob("share/*.json") - for (const file of share2Glob.scanSync({ cwd: storageDir, absolute: true })) { - try { - const data = JSON.parse(fs.readFileSync(file, "utf-8")) - const sessionID = path.basename(file, ".json") - db.insert(ShareTable).values({ sessionID, data }).onConflictDoNothing().run() - } catch (e) { - stats.errors.push(`failed to migrate share ${file}: ${e}`) + if (todoValues.length > 0) { + db.insert(TodoTable).values(todoValues).onConflictDoNothing().run() + stats.todos = todoValues.length } + log.info("migrated todos", { count: stats.todos }) + + // Migrate permissions + const permGlob = new Bun.Glob("permission/*.json") + const permFiles = Array.from(permGlob.scanSync({ cwd: storageDir, absolute: true })) + const permValues: (typeof PermissionTable.$inferInsert)[] = [] + + for (const file of permFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + const projectID = path.basename(file, ".json") + if (!projectIDs.has(projectID)) { + log.warn("skipping orphaned permission", { projectID }) + continue + } + permValues.push({ projectID, data }) + } catch (e) { + stats.errors.push(`failed to migrate permission ${file}: ${e}`) + } + } + + if (permValues.length > 0) { + db.insert(PermissionTable).values(permValues).onConflictDoNothing().run() + stats.permissions = permValues.length + } + log.info("migrated permissions", { count: stats.permissions }) + + // Migrate session shares + const shareGlob = new Bun.Glob("session_share/*.json") + const shareFiles = Array.from(shareGlob.scanSync({ cwd: storageDir, absolute: true })) + const shareValues: (typeof SessionShareTable.$inferInsert)[] = [] + + for (const file of shareFiles) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + const sessionID = path.basename(file, ".json") + if (!sessionIDs.has(sessionID)) { + log.warn("skipping orphaned session_share", { sessionID }) + continue + } + shareValues.push({ sessionID, data }) + } catch (e) { + stats.errors.push(`failed to migrate session_share ${file}: ${e}`) + } + } + + if (shareValues.length > 0) { + db.insert(SessionShareTable).values(shareValues).onConflictDoNothing().run() + stats.shares = shareValues.length + } + log.info("migrated session shares", { count: stats.shares }) + + // Migrate shares (downloaded shared sessions, no FK) + const share2Glob = new Bun.Glob("share/*.json") + const share2Files = Array.from(share2Glob.scanSync({ cwd: storageDir, absolute: true })) + const share2Values: (typeof ShareTable.$inferInsert)[] = [] + + for (const file of share2Files) { + try { + const data = JSON.parse(fs.readFileSync(file, "utf-8")) + const sessionID = path.basename(file, ".json") + share2Values.push({ sessionID, data }) + } catch (e) { + stats.errors.push(`failed to migrate share ${file}: ${e}`) + } + } + + if (share2Values.length > 0) { + db.insert(ShareTable).values(share2Values).onConflictDoNothing().run() + } + + sqlite.run("COMMIT") + } catch (e) { + sqlite.run("ROLLBACK") + throw e } // Mark migration complete diff --git a/packages/opencode/src/storage/migrations.generated.ts b/packages/opencode/src/storage/migrations.generated.ts index bb3286feb7..5ac9f44a2a 100644 --- a/packages/opencode/src/storage/migrations.generated.ts +++ b/packages/opencode/src/storage/migrations.generated.ts @@ -1,6 +1,6 @@ // Auto-generated - do not edit -import m0 from "../../migration/0000_watery_shinobi_shaw.sql" with { type: "text" } +import m0 from "../../migration/0000_normal_wind_dancer.sql" with { type: "text" } export const migrations = [ - { name: "0000_watery_shinobi_shaw", sql: m0 }, + { name: "0000_normal_wind_dancer", sql: m0 }, ] diff --git a/packages/opencode/test/session/revert-compact.test.ts b/packages/opencode/test/session/revert-compact.test.ts index de2b14573f..fc39fb42c1 100644 --- a/packages/opencode/test/session/revert-compact.test.ts +++ b/packages/opencode/test/session/revert-compact.test.ts @@ -1,8 +1,7 @@ -import { describe, expect, test, beforeEach, afterEach } from "bun:test" +import { describe, expect, test } from "bun:test" import path from "path" import { Session } from "../../src/session" import { SessionRevert } from "../../src/session/revert" -import { SessionCompaction } from "../../src/session/compaction" import { MessageV2 } from "../../src/session/message-v2" import { Log } from "../../src/util/log" import { Instance } from "../../src/project/instance" @@ -53,10 +52,6 @@ describe("revert + compact workflow", () => { sessionID, mode: "default", agent: "default", - path: { - cwd: tmp.path, - root: tmp.path, - }, cost: 0, tokens: { output: 0, @@ -113,10 +108,6 @@ describe("revert + compact workflow", () => { sessionID, mode: "default", agent: "default", - path: { - cwd: tmp.path, - root: tmp.path, - }, cost: 0, tokens: { output: 0, @@ -227,10 +218,6 @@ describe("revert + compact workflow", () => { sessionID, mode: "default", agent: "default", - path: { - cwd: tmp.path, - root: tmp.path, - }, cost: 0, tokens: { output: 0, diff --git a/packages/opencode/test/storage/json-migration.test.ts b/packages/opencode/test/storage/json-migration.test.ts index f918152cfd..b04c9a8f6f 100644 --- a/packages/opencode/test/storage/json-migration.test.ts +++ b/packages/opencode/test/storage/json-migration.test.ts @@ -384,7 +384,7 @@ describe("JSON to SQLite migration", () => { const db = drizzle(sqlite) const row = db.select().from(PartTable).where(eq(PartTable.id, fixtures.part.id)).get() expect(row?.id).toBe(fixtures.part.id) - expect(row?.messageID).toBe(fixtures.message.id) + expect(row?.message_id).toBe(fixtures.message.id) }) test("skips orphaned part (missing message)", async () => { diff --git a/src/storage/migrations.generated.ts b/src/storage/migrations.generated.ts new file mode 100644 index 0000000000..560a13af8f --- /dev/null +++ b/src/storage/migrations.generated.ts @@ -0,0 +1,2 @@ +// Auto-generated - do not edit +export const migrations: { name: string; sql: string }[] = []