pull/8586/head
Dax Raad 2026-01-14 21:07:36 -05:00
parent af5e405391
commit 7bc8851fc4
34 changed files with 1360 additions and 403 deletions

View File

@ -311,6 +311,7 @@
"clipboardy": "4.0.0",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "0.45.1",
"fuzzysort": "3.1.0",
"gray-matter": "4.0.3",
"hono": "catalog:",
@ -352,6 +353,7 @@
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"drizzle-kit": "0.31.8",
"typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2",
@ -4414,6 +4416,10 @@
"opencode/@ai-sdk/openai-compatible": ["@ai-sdk/openai-compatible@1.0.30", "", { "dependencies": { "@ai-sdk/provider": "2.0.1", "@ai-sdk/provider-utils": "3.0.20" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-thubwhRtv9uicAxSWwNpinM7hiL/0CkhL/ymPaHuKvI494J7HIzn8KQZQ2ymRz284WTIZnI7VMyyejxW4RMM6w=="],
"opencode/drizzle-kit": ["drizzle-kit@0.31.8", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-O9EC/miwdnRDY10qRxM8P3Pg8hXe3LyU4ZipReKOgTwn4OqANmftj8XJz1UPUAS6NMHf0E2htjsbQujUTkncCg=="],
"opencode/drizzle-orm": ["drizzle-orm@0.45.1", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-Te0FOdKIistGNPMq2jscdqngBRfBpC8uMFVwqjf6gtTVJHIQ/dosgV/CLBU2N4ZJBsXL5savCba9b0YJskKdcA=="],
"opencontrol/@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.6.1", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^4.1.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-oxzMzYCkZHMntzuyerehK3fV6A2Kwh5BD6CGEJSVDU2QNEhfLOptf2X7esQgaHZXHZY0oHmMsOtIDLP71UJXgA=="],
"opencontrol/@tsconfig/bun": ["@tsconfig/bun@1.0.7", "", {}, "sha512-udGrGJBNQdXGVulehc1aWT73wkR9wdaGBtB6yL70RJsqwW/yJhIg6ZbRlPOfIUiFNrnBuYLBi9CSmMKfDC7dvA=="],
@ -5020,6 +5026,8 @@
"lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
"opencode/drizzle-kit/esbuild": ["esbuild@0.25.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.12", "@esbuild/android-arm": "0.25.12", "@esbuild/android-arm64": "0.25.12", "@esbuild/android-x64": "0.25.12", "@esbuild/darwin-arm64": "0.25.12", "@esbuild/darwin-x64": "0.25.12", "@esbuild/freebsd-arm64": "0.25.12", "@esbuild/freebsd-x64": "0.25.12", "@esbuild/linux-arm": "0.25.12", "@esbuild/linux-arm64": "0.25.12", "@esbuild/linux-ia32": "0.25.12", "@esbuild/linux-loong64": "0.25.12", "@esbuild/linux-mips64el": "0.25.12", "@esbuild/linux-ppc64": "0.25.12", "@esbuild/linux-riscv64": "0.25.12", "@esbuild/linux-s390x": "0.25.12", "@esbuild/linux-x64": "0.25.12", "@esbuild/netbsd-arm64": "0.25.12", "@esbuild/netbsd-x64": "0.25.12", "@esbuild/openbsd-arm64": "0.25.12", "@esbuild/openbsd-x64": "0.25.12", "@esbuild/openharmony-arm64": "0.25.12", "@esbuild/sunos-x64": "0.25.12", "@esbuild/win32-arm64": "0.25.12", "@esbuild/win32-ia32": "0.25.12", "@esbuild/win32-x64": "0.25.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg=="],
"opencontrol/@modelcontextprotocol/sdk/express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="],
"opencontrol/@modelcontextprotocol/sdk/pkce-challenge": ["pkce-challenge@4.1.0", "", {}, "sha512-ZBmhE1C9LcPoH9XZSdwiPtbPHZROwAnMy+kIFQVrnMCxY4Cudlz3gBOpzilgc0jOgRaiT3sIWfpMomW2ar2orQ=="],
@ -5192,6 +5200,56 @@
"js-beautify/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="],
"opencode/drizzle-kit/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="],
"opencode/drizzle-kit/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.12", "", { "os": "android", "cpu": "arm64" }, "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg=="],
"opencode/drizzle-kit/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.25.12", "", { "os": "android", "cpu": "x64" }, "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg=="],
"opencode/drizzle-kit/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg=="],
"opencode/drizzle-kit/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA=="],
"opencode/drizzle-kit/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg=="],
"opencode/drizzle-kit/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.12", "", { "os": "linux", "cpu": "arm" }, "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg=="],
"opencode/drizzle-kit/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.12", "", { "os": "linux", "cpu": "x64" }, "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw=="],
"opencode/drizzle-kit/esbuild/@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg=="],
"opencode/drizzle-kit/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.12", "", { "os": "none", "cpu": "x64" }, "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.12", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A=="],
"opencode/drizzle-kit/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw=="],
"opencode/drizzle-kit/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w=="],
"opencode/drizzle-kit/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg=="],
"opencode/drizzle-kit/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ=="],
"opencode/drizzle-kit/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="],
"opencontrol/@modelcontextprotocol/sdk/express/accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="],
"opencontrol/@modelcontextprotocol/sdk/express/body-parser": ["body-parser@2.2.0", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="],

View File

@ -2,3 +2,4 @@ research
dist
gen
app.log
src/storage/migrations.generated.ts

View File

@ -0,0 +1,14 @@
import { defineConfig } from "drizzle-kit"
export default defineConfig({
dialect: "sqlite",
schema: [
"./src/project/project.sql.ts",
"./src/session/session.sql.ts",
"./src/session/message.sql.ts",
"./src/session/part.sql.ts",
"./src/session/session-aux.sql.ts",
"./src/share/share.sql.ts",
],
out: "./drizzle",
})

View File

@ -0,0 +1,64 @@
CREATE TABLE `project` (
`id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL
);
--> statement-breakpoint
CREATE TABLE `session` (
`id` text PRIMARY KEY NOT NULL,
`project_id` text NOT NULL,
`parent_id` text,
`created_at` integer NOT NULL,
`updated_at` integer NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
CREATE TABLE `message` (
`id` text PRIMARY KEY NOT NULL,
`session_id` text NOT NULL,
`created_at` integer NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
CREATE TABLE `part` (
`id` text PRIMARY KEY NOT NULL,
`message_id` text NOT NULL,
`session_id` text NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
CREATE TABLE `permission` (
`project_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `session_diff` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `todo` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `session_share` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL,
FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON UPDATE no action ON DELETE cascade
);
--> statement-breakpoint
CREATE TABLE `share` (
`session_id` text PRIMARY KEY NOT NULL,
`data` text NOT NULL
);

View File

@ -0,0 +1,426 @@
{
"version": "6",
"dialect": "sqlite",
"id": "7255471a-8cff-422c-b0ef-419a2aa7d952",
"prevId": "00000000-0000-0000-0000-000000000000",
"tables": {
"project": {
"name": "project",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"session": {
"name": "session",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"project_id": {
"name": "project_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"parent_id": {
"name": "parent_id",
"type": "text",
"primaryKey": false,
"notNull": false,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"updated_at": {
"name": "updated_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {
"session_project_idx": {
"name": "session_project_idx",
"columns": [
"project_id"
],
"isUnique": false
},
"session_parent_idx": {
"name": "session_parent_idx",
"columns": [
"parent_id"
],
"isUnique": false
}
},
"foreignKeys": {
"session_project_id_project_id_fk": {
"name": "session_project_id_project_id_fk",
"tableFrom": "session",
"tableTo": "project",
"columnsFrom": [
"project_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"message": {
"name": "message",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"created_at": {
"name": "created_at",
"type": "integer",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {
"message_session_idx": {
"name": "message_session_idx",
"columns": [
"session_id"
],
"isUnique": false
}
},
"foreignKeys": {
"message_session_id_session_id_fk": {
"name": "message_session_id_session_id_fk",
"tableFrom": "message",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"part": {
"name": "part",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"message_id": {
"name": "message_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {
"part_message_idx": {
"name": "part_message_idx",
"columns": [
"message_id"
],
"isUnique": false
},
"part_session_idx": {
"name": "part_session_idx",
"columns": [
"session_id"
],
"isUnique": false
}
},
"foreignKeys": {
"part_message_id_message_id_fk": {
"name": "part_message_id_message_id_fk",
"tableFrom": "part",
"tableTo": "message",
"columnsFrom": [
"message_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"permission": {
"name": "permission",
"columns": {
"project_id": {
"name": "project_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"permission_project_id_project_id_fk": {
"name": "permission_project_id_project_id_fk",
"tableFrom": "permission",
"tableTo": "project",
"columnsFrom": [
"project_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"session_diff": {
"name": "session_diff",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"session_diff_session_id_session_id_fk": {
"name": "session_diff_session_id_session_id_fk",
"tableFrom": "session_diff",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"todo": {
"name": "todo",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"todo_session_id_session_id_fk": {
"name": "todo_session_id_session_id_fk",
"tableFrom": "todo",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"session_share": {
"name": "session_share",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {
"session_share_session_id_session_id_fk": {
"name": "session_share_session_id_session_id_fk",
"tableFrom": "session_share",
"tableTo": "session",
"columnsFrom": [
"session_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
},
"share": {
"name": "share",
"columns": {
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": true,
"notNull": true,
"autoincrement": false
},
"data": {
"name": "data",
"type": "text",
"primaryKey": false,
"notNull": true,
"autoincrement": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"checkConstraints": {}
}
},
"views": {},
"enums": {},
"_meta": {
"schemas": {},
"tables": {},
"columns": {}
},
"internal": {
"indexes": {}
}
}

View File

@ -0,0 +1,13 @@
{
"version": "7",
"dialect": "sqlite",
"entries": [
{
"idx": 0,
"version": "6",
"when": 1768425777461,
"tag": "0000_initial",
"breakpoints": true
}
]
}

View File

@ -41,6 +41,7 @@
"@types/turndown": "5.0.5",
"@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:",
"drizzle-kit": "0.31.8",
"typescript": "catalog:",
"vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2",
@ -97,6 +98,7 @@
"clipboardy": "4.0.0",
"decimal.js": "10.5.0",
"diff": "catalog:",
"drizzle-orm": "0.45.1",
"fuzzysort": "3.1.0",
"gray-matter": "4.0.3",
"hono": "catalog:",

View File

@ -99,6 +99,12 @@ const targets = singleFlag
})
: allTargets
// Check migrations are up to date and generate embedded migrations file
console.log("Checking migrations...")
await $`bun run script/check-migrations.ts`
console.log("Generating migrations embed...")
await $`bun run script/generate-migrations.ts`
await $`rm -rf dist`
const binaries: Record<string, string> = {}

View File

@ -0,0 +1,16 @@
#!/usr/bin/env bun
import { $ } from "bun"
// drizzle-kit check compares schema to migrations, exits non-zero if drift
const result = await $`bun drizzle-kit check`.quiet().nothrow()
if (result.exitCode !== 0) {
console.error("Schema has changes not captured in migrations!")
console.error("Run: bun drizzle-kit generate")
console.error("")
console.error(result.stderr.toString())
process.exit(1)
}
console.log("Migrations are up to date")

View File

@ -0,0 +1,49 @@
#!/usr/bin/env bun
import { Glob } from "bun"
import path from "path"
import fs from "fs"
const migrationsDir = "./drizzle"
const outFile = "./src/storage/migrations.generated.ts"
if (!fs.existsSync(migrationsDir)) {
console.log("No migrations directory found, creating empty migrations file")
await Bun.write(
outFile,
`// Auto-generated - do not edit
export const migrations: { name: string; sql: string }[] = []
`,
)
process.exit(0)
}
const files = Array.from(new Glob("*.sql").scanSync({ cwd: migrationsDir })).sort()
if (files.length === 0) {
console.log("No migrations found, creating empty migrations file")
await Bun.write(
outFile,
`// Auto-generated - do not edit
export const migrations: { name: string; sql: string }[] = []
`,
)
process.exit(0)
}
const imports = files.map((f, i) => `import m${i} from "../../drizzle/${f}" with { type: "text" }`).join("\n")
const entries = files.map((f, i) => ` { name: "${path.basename(f, ".sql")}", sql: m${i} },`).join("\n")
await Bun.write(
outFile,
`// Auto-generated - do not edit
${imports}
export const migrations = [
${entries}
]
`,
)
console.log(`Generated migrations file with ${files.length} migrations`)

View File

@ -2,7 +2,10 @@ import type { Argv } from "yargs"
import { Session } from "../../session"
import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { db } from "../../storage/db"
import { SessionTable } from "../../session/session.sql"
import { MessageTable } from "../../session/message.sql"
import { PartTable } from "../../session/part.sql"
import { Instance } from "../../project/instance"
import { EOL } from "os"
@ -81,13 +84,42 @@ export const ImportCommand = cmd({
return
}
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info)
db()
.insert(SessionTable)
.values({
id: exportData.info.id,
projectID: Instance.project.id,
parentID: exportData.info.parentID,
createdAt: exportData.info.time.created,
updatedAt: exportData.info.time.updated,
data: exportData.info,
})
.onConflictDoUpdate({ target: SessionTable.id, set: { data: exportData.info } })
.run()
for (const msg of exportData.messages) {
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info)
db()
.insert(MessageTable)
.values({
id: msg.info.id,
sessionID: exportData.info.id,
createdAt: msg.info.time?.created ?? Date.now(),
data: msg.info,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data: msg.info } })
.run()
for (const part of msg.parts) {
await Storage.write(["part", msg.info.id, part.id], part)
db()
.insert(PartTable)
.values({
id: part.id,
messageID: msg.info.id,
sessionID: exportData.info.id,
data: part,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data: part } })
.run()
}
}

View File

@ -2,7 +2,9 @@ import type { Argv } from "yargs"
import { cmd } from "./cmd"
import { Session } from "../../session"
import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage"
import { db } from "../../storage/db"
import { ProjectTable } from "../../project/project.sql"
import { SessionTable } from "../../session/session.sql"
import { Project } from "../../project/project"
import { Instance } from "../../project/instance"
@ -83,25 +85,8 @@ async function getCurrentProject(): Promise<Project.Info> {
}
async function getAllSessions(): Promise<Session.Info[]> {
const sessions: Session.Info[] = []
const projectKeys = await Storage.list(["project"])
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
for (const project of projects) {
if (!project) continue
const sessionKeys = await Storage.list(["session", project.id])
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
for (const session of projectSessions) {
if (session) {
sessions.push(session)
}
}
}
return sessions
const sessionRows = db().select().from(SessionTable).all()
return sessionRows.map((row) => row.data)
}
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {

View File

@ -3,7 +3,9 @@ import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config"
import { Identifier } from "@/id/id"
import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage"
import { db } from "@/storage/db"
import { PermissionTable } from "@/session/session-aux.sql"
import { eq } from "drizzle-orm"
import { fn } from "@/util/fn"
import { Log } from "@/util/log"
import { Wildcard } from "@/util/wildcard"
@ -107,7 +109,8 @@ export namespace PermissionNext {
const state = Instance.state(async () => {
const projectID = Instance.project.id
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset)
const row = db().select().from(PermissionTable).where(eq(PermissionTable.projectID, projectID)).get()
const stored = row?.data ?? ([] as Ruleset)
const pending: Record<
string,

View File

@ -0,0 +1,7 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import type { Project } from "./project"
export const ProjectTable = sqliteTable("project", {
id: text("id").primaryKey(),
data: text("data", { mode: "json" }).notNull().$type<Project.Info>(),
})

View File

@ -3,10 +3,13 @@ import fs from "fs/promises"
import { Filesystem } from "../util/filesystem"
import path from "path"
import { $ } from "bun"
import { Storage } from "../storage/storage"
import { db } from "../storage/db"
import { ProjectTable } from "./project.sql"
import { SessionTable } from "../session/session.sql"
import { eq } from "drizzle-orm"
import { Log } from "../util/log"
import { Flag } from "@/flag/flag"
import { Session } from "../session"
import type { Session } from "../session"
import { work } from "../util/queue"
import { fn } from "@opencode-ai/util/fn"
import { BusEvent } from "@/bus/bus-event"
@ -175,7 +178,8 @@ export namespace Project {
}
})
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined)
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, id)).get()
let existing = row?.data
if (!existing) {
existing = {
id,
@ -208,7 +212,11 @@ export namespace Project {
}
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox)
result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
await Storage.write<Info>(["project", id], result)
db()
.insert(ProjectTable)
.values({ id, data: result })
.onConflictDoUpdate({ target: ProjectTable.id, set: { data: result } })
.run()
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
@ -249,42 +257,44 @@ export namespace Project {
}
async function migrateFromGlobal(newProjectID: string, worktree: string) {
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined)
if (!globalProject) return
const globalRow = db().select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get()
if (!globalRow) return
const globalSessions = await Storage.list(["session", "global"]).catch(() => [])
const globalSessions = db().select().from(SessionTable).where(eq(SessionTable.projectID, "global")).all()
if (globalSessions.length === 0) return
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length })
await work(10, globalSessions, async (key) => {
const sessionID = key[key.length - 1]
const session = await Storage.read<Session.Info>(key).catch(() => undefined)
await work(10, globalSessions, async (row) => {
const session = row.data as Session.Info
if (!session) return
if (session.directory && session.directory !== worktree) return
session.projectID = newProjectID
log.info("migrating session", { sessionID, from: "global", to: newProjectID })
await Storage.write(["session", newProjectID, sessionID], session)
await Storage.remove(key)
log.info("migrating session", { sessionID: session.id, from: "global", to: newProjectID })
db()
.update(SessionTable)
.set({ projectID: newProjectID, data: session })
.where(eq(SessionTable.id, session.id))
.run()
}).catch((error) => {
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID })
})
}
export async function setInitialized(projectID: string) {
await Storage.update<Info>(["project", projectID], (draft) => {
draft.time.initialized = Date.now()
})
export function setInitialized(projectID: string) {
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!row) return
const data = { ...row.data, time: { ...row.data.time, initialized: Date.now() } }
db().update(ProjectTable).set({ data }).where(eq(ProjectTable.id, projectID)).run()
}
export async function list() {
const keys = await Storage.list(["project"])
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x)))
return projects.map((project) => ({
...project,
sandboxes: project.sandboxes?.filter((x) => existsSync(x)),
}))
export function list() {
return db()
.select()
.from(ProjectTable)
.all()
.map((row) => row.data)
}
export const update = fn(
@ -295,43 +305,39 @@ export namespace Project {
commands: Info.shape.commands.optional(),
}),
async (input) => {
const result = await Storage.update<Info>(["project", input.projectID], (draft) => {
if (input.name !== undefined) draft.name = input.name
if (input.icon !== undefined) {
draft.icon = {
...draft.icon,
}
if (input.icon.url !== undefined) draft.icon.url = input.icon.url
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined
if (input.icon.color !== undefined) draft.icon.color = input.icon.color
}
if (input.commands?.start !== undefined) {
const start = input.commands.start || undefined
draft.commands = {
...(draft.commands ?? {}),
}
draft.commands.start = start
if (!draft.commands.start) draft.commands = undefined
}
draft.time.updated = Date.now()
})
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get()
if (!row) throw new Error(`Project not found: ${input.projectID}`)
const data = { ...row.data }
if (input.name !== undefined) data.name = input.name
if (input.icon !== undefined) {
data.icon = { ...data.icon }
if (input.icon.url !== undefined) data.icon.url = input.icon.url
if (input.icon.override !== undefined) data.icon.override = input.icon.override || undefined
if (input.icon.color !== undefined) data.icon.color = input.icon.color
}
if (input.commands?.start !== undefined) {
const start = input.commands.start || undefined
data.commands = { ...(data.commands ?? {}) }
data.commands.start = start
if (!data.commands.start) data.commands = undefined
}
data.time.updated = Date.now()
db().update(ProjectTable).set({ data }).where(eq(ProjectTable.id, input.projectID)).run()
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
},
)
export async function sandboxes(projectID: string) {
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined)
if (!project?.sandboxes) return []
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!row?.data.sandboxes) return []
const valid: string[] = []
for (const dir of project.sandboxes) {
for (const dir of row.data.sandboxes) {
const stat = await fs.stat(dir).catch(() => undefined)
if (stat?.isDirectory()) valid.push(dir)
}
@ -339,33 +345,37 @@ export namespace Project {
}
export async function addSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
if (!sandboxes.includes(directory)) sandboxes.push(directory)
draft.sandboxes = sandboxes
draft.time.updated = Date.now()
})
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!row) throw new Error(`Project not found: ${projectID}`)
const data = { ...row.data }
const sandboxes = data.sandboxes ?? []
if (!sandboxes.includes(directory)) sandboxes.push(directory)
data.sandboxes = sandboxes
data.time.updated = Date.now()
db().update(ProjectTable).set({ data }).where(eq(ProjectTable.id, projectID)).run()
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
export async function removeSandbox(projectID: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => {
const sandboxes = draft.sandboxes ?? []
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
draft.time.updated = Date.now()
})
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!row) throw new Error(`Project not found: ${projectID}`)
const data = { ...row.data }
const sandboxes = data.sandboxes ?? []
data.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory)
data.time.updated = Date.now()
db().update(ProjectTable).set({ data }).where(eq(ProjectTable.id, projectID)).run()
GlobalBus.emit("event", {
payload: {
type: Event.Updated.type,
properties: result,
properties: data,
},
})
return result
return data
}
}

View File

@ -1,6 +1,6 @@
import { resolver } from "hono-openapi"
import z from "zod"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
export const ERRORS = {
400: {
@ -25,7 +25,7 @@ export const ERRORS = {
description: "Not found",
content: {
"application/json": {
schema: resolver(Storage.NotFoundError.Schema),
schema: resolver(NotFoundError.Schema),
},
},
},

View File

@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
import { ProviderRoutes } from "./routes/provider"
import { lazy } from "../util/lazy"
import { InstanceBootstrap } from "../project/bootstrap"
import { Storage } from "../storage/storage"
import { NotFoundError } from "../storage/db"
import type { ContentfulStatusCode } from "hono/utils/http-status"
import { websocket } from "hono/bun"
import { HTTPException } from "hono/http-exception"
@ -65,7 +65,7 @@ export namespace Server {
})
if (err instanceof NamedError) {
let status: ContentfulStatusCode
if (err instanceof Storage.NotFoundError) status = 404
if (err instanceof NotFoundError) status = 404
else if (err instanceof Provider.ModelNotFoundError) status = 400
else if (err.name.startsWith("Worktree")) status = 400
else status = 500

View File

@ -10,7 +10,13 @@ import { Flag } from "../flag/flag"
import { Identifier } from "../id/id"
import { Installation } from "../installation"
import { Storage } from "../storage/storage"
import { db, NotFoundError } from "../storage/db"
import { SessionTable } from "./session.sql"
import { MessageTable } from "./message.sql"
import { PartTable } from "./part.sql"
import { SessionDiffTable } from "./session-aux.sql"
import { ShareTable } from "../share/share.sql"
import { eq } from "drizzle-orm"
import { Log } from "../util/log"
import { MessageV2 } from "./message-v2"
import { Instance } from "../project/instance"
@ -211,7 +217,17 @@ export namespace Session {
},
}
log.info("created", result)
await Storage.write(["session", Instance.project.id, result.id], result)
db()
.insert(SessionTable)
.values({
id: result.id,
projectID: result.projectID,
parentID: result.parentID,
createdAt: result.time.created,
updatedAt: result.time.updated,
data: result,
})
.run()
Bus.publish(Event.Created, {
info: result,
})
@ -240,12 +256,14 @@ export namespace Session {
}
export const get = fn(Identifier.schema("session"), async (id) => {
const read = await Storage.read<Info>(["session", Instance.project.id, id])
return read as Info
const row = db().select().from(SessionTable).where(eq(SessionTable.id, id)).get()
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
return row.data
})
export const getShare = fn(Identifier.schema("session"), async (id) => {
return Storage.read<ShareInfo>(["share", id])
const row = db().select().from(ShareTable).where(eq(ShareTable.sessionID, id)).get()
return row?.data
})
export const share = fn(Identifier.schema("session"), async (id) => {
@ -280,23 +298,24 @@ export namespace Session {
)
})
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
const project = Instance.project
const result = await Storage.update<Info>(["session", project.id, id], (draft) => {
editor(draft)
if (options?.touch !== false) {
draft.time.updated = Date.now()
}
})
export function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) {
const row = db().select().from(SessionTable).where(eq(SessionTable.id, id)).get()
if (!row) throw new Error(`Session not found: ${id}`)
const data = { ...row.data }
editor(data)
if (options?.touch !== false) {
data.time.updated = Date.now()
}
db().update(SessionTable).set({ updatedAt: data.time.updated, data }).where(eq(SessionTable.id, id)).run()
Bus.publish(Event.Updated, {
info: result,
info: data,
})
return result
return data
}
export const diff = fn(Identifier.schema("session"), async (sessionID) => {
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
return diffs ?? []
const row = db().select().from(SessionDiffTable).where(eq(SessionDiffTable.sessionID, sessionID)).get()
return row?.data ?? []
})
export const messages = fn(
@ -315,22 +334,17 @@ export namespace Session {
},
)
export async function* list() {
export function* list() {
const project = Instance.project
for (const item of await Storage.list(["session", project.id])) {
yield Storage.read<Info>(item)
const rows = db().select().from(SessionTable).where(eq(SessionTable.projectID, project.id)).all()
for (const row of rows) {
yield row.data
}
}
export const children = fn(Identifier.schema("session"), async (parentID) => {
const project = Instance.project
const result = [] as Session.Info[]
for (const item of await Storage.list(["session", project.id])) {
const session = await Storage.read<Info>(item)
if (session.parentID !== parentID) continue
result.push(session)
}
return result
const rows = db().select().from(SessionTable).where(eq(SessionTable.parentID, parentID)).all()
return rows.map((row) => row.data)
})
export const remove = fn(Identifier.schema("session"), async (sessionID) => {
@ -341,13 +355,8 @@ export namespace Session {
await remove(child.id)
}
await unshare(sessionID).catch(() => {})
for (const msg of await Storage.list(["message", sessionID])) {
for (const part of await Storage.list(["part", msg.at(-1)!])) {
await Storage.remove(part)
}
await Storage.remove(msg)
}
await Storage.remove(["session", project.id, sessionID])
// CASCADE delete handles messages and parts automatically
db().delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
Bus.publish(Event.Deleted, {
info: session,
})
@ -357,7 +366,17 @@ export namespace Session {
})
export const updateMessage = fn(MessageV2.Info, async (msg) => {
await Storage.write(["message", msg.sessionID, msg.id], msg)
const createdAt = msg.role === "user" ? msg.time.created : msg.time.created
db()
.insert(MessageTable)
.values({
id: msg.id,
sessionID: msg.sessionID,
createdAt,
data: msg,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data: msg } })
.run()
Bus.publish(MessageV2.Event.Updated, {
info: msg,
})
@ -370,7 +389,8 @@ export namespace Session {
messageID: Identifier.schema("message"),
}),
async (input) => {
await Storage.remove(["message", input.sessionID, input.messageID])
// CASCADE delete handles parts automatically
db().delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
@ -386,7 +406,7 @@ export namespace Session {
partID: Identifier.schema("part"),
}),
async (input) => {
await Storage.remove(["part", input.messageID, input.partID])
db().delete(PartTable).where(eq(PartTable.id, input.partID)).run()
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
@ -411,7 +431,16 @@ export namespace Session {
export const updatePart = fn(UpdatePartInput, async (input) => {
const part = "delta" in input ? input.part : input
const delta = "delta" in input ? input.delta : undefined
await Storage.write(["part", part.messageID, part.id], part)
db()
.insert(PartTable)
.values({
id: part.id,
messageID: part.messageID,
sessionID: part.sessionID,
data: part,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data: part } })
.run()
Bus.publish(MessageV2.Event.PartUpdated, {
part,
delta,

View File

@ -6,7 +6,10 @@ import { Identifier } from "../id/id"
import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn"
import { Storage } from "@/storage/storage"
import { db } from "@/storage/db"
import { MessageTable } from "./message.sql"
import { PartTable } from "./part.sql"
import { eq, desc } from "drizzle-orm"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
import { iife } from "@/util/iife"
@ -607,21 +610,23 @@ export namespace MessageV2 {
}
export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
const list = await Array.fromAsync(await Storage.list(["message", sessionID]))
for (let i = list.length - 1; i >= 0; i--) {
const rows = db()
.select()
.from(MessageTable)
.where(eq(MessageTable.sessionID, sessionID))
.orderBy(desc(MessageTable.createdAt))
.all()
for (const row of rows) {
yield await get({
sessionID,
messageID: list[i][2],
messageID: row.id,
})
}
})
export const parts = fn(Identifier.schema("message"), async (messageID) => {
const result = [] as MessageV2.Part[]
for (const item of await Storage.list(["part", messageID])) {
const read = await Storage.read<MessageV2.Part>(item)
result.push(read)
}
const rows = db().select().from(PartTable).where(eq(PartTable.messageID, messageID)).all()
const result = rows.map((row) => row.data)
result.sort((a, b) => (a.id > b.id ? 1 : -1))
return result
})
@ -632,8 +637,10 @@ export namespace MessageV2 {
messageID: Identifier.schema("message"),
}),
async (input) => {
const row = db().select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get()
if (!row) throw new Error(`Message not found: ${input.messageID}`)
return {
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]),
info: row.data,
parts: await parts(input.messageID),
}
},

View File

@ -0,0 +1,16 @@
import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core"
import { SessionTable } from "./session.sql"
import type { MessageV2 } from "./message-v2"
export const MessageTable = sqliteTable(
"message",
{
id: text("id").primaryKey(),
sessionID: text("session_id")
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
createdAt: integer("created_at").notNull(),
data: text("data", { mode: "json" }).notNull().$type<MessageV2.Info>(),
},
(table) => [index("message_session_idx").on(table.sessionID)],
)

View File

@ -0,0 +1,16 @@
import { sqliteTable, text, index } from "drizzle-orm/sqlite-core"
import { MessageTable } from "./message.sql"
import type { MessageV2 } from "./message-v2"
export const PartTable = sqliteTable(
"part",
{
id: text("id").primaryKey(),
messageID: text("message_id")
.notNull()
.references(() => MessageTable.id, { onDelete: "cascade" }),
sessionID: text("session_id").notNull(),
data: text("data", { mode: "json" }).notNull().$type<MessageV2.Part>(),
},
(table) => [index("part_message_idx").on(table.messageID), index("part_session_idx").on(table.sessionID)],
)

View File

@ -5,7 +5,10 @@ import { MessageV2 } from "./message-v2"
import { Session } from "."
import { Log } from "../util/log"
import { splitWhen } from "remeda"
import { Storage } from "../storage/storage"
import { db } from "../storage/db"
import { MessageTable } from "./message.sql"
import { PartTable } from "./part.sql"
import { eq } from "drizzle-orm"
import { Bus } from "../bus"
import { SessionPrompt } from "./prompt"
import { SessionSummary } from "./summary"
@ -97,7 +100,7 @@ export namespace SessionRevert {
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID)
msgs = preserve
for (const msg of remove) {
await Storage.remove(["message", sessionID, msg.info.id])
db().delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run()
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
}
const last = preserve.at(-1)
@ -106,7 +109,7 @@ export namespace SessionRevert {
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID)
last.parts = preserveParts
for (const part of removeParts) {
await Storage.remove(["part", last.info.id, part.id])
db().delete(PartTable).where(eq(PartTable.id, part.id)).run()
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID,
messageID: last.info.id,

View File

@ -0,0 +1,27 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import { SessionTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql"
import type { Snapshot } from "@/snapshot"
import type { Todo } from "./todo"
import type { PermissionNext } from "@/permission/next"
export const SessionDiffTable = sqliteTable("session_diff", {
sessionID: text("session_id")
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<Snapshot.FileDiff[]>(),
})
export const TodoTable = sqliteTable("todo", {
sessionID: text("session_id")
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<Todo.Info[]>(),
})
export const PermissionTable = sqliteTable("permission", {
projectID: text("project_id")
.primaryKey()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
})

View File

@ -0,0 +1,18 @@
import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { Session } from "./index"
export const SessionTable = sqliteTable(
"session",
{
id: text("id").primaryKey(),
projectID: text("project_id")
.notNull()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
parentID: text("parent_id"),
createdAt: integer("created_at").notNull(),
updatedAt: integer("updated_at").notNull(),
data: text("data", { mode: "json" }).notNull().$type<Session.Info>(),
},
(table) => [index("session_project_idx").on(table.projectID), index("session_parent_idx").on(table.parentID)],
)

View File

@ -11,7 +11,9 @@ import { Snapshot } from "@/snapshot"
import { Log } from "@/util/log"
import path from "path"
import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage"
import { db } from "@/storage/db"
import { SessionDiffTable } from "./session-aux.sql"
import { eq } from "drizzle-orm"
import { Bus } from "@/bus"
import { LLM } from "./llm"
@ -54,7 +56,11 @@ export namespace SessionSummary {
files: diffs.length,
}
})
await Storage.write(["session_diff", input.sessionID], diffs)
db()
.insert(SessionDiffTable)
.values({ sessionID: input.sessionID, data: diffs })
.onConflictDoUpdate({ target: SessionDiffTable.sessionID, set: { data: diffs } })
.run()
Bus.publish(Session.Event.Diff, {
sessionID: input.sessionID,
diff: diffs,
@ -116,7 +122,8 @@ export namespace SessionSummary {
messageID: Identifier.schema("message").optional(),
}),
async (input) => {
return Storage.read<Snapshot.FileDiff[]>(["session_diff", input.sessionID]).catch(() => [])
const row = db().select().from(SessionDiffTable).where(eq(SessionDiffTable.sessionID, input.sessionID)).get()
return row?.data ?? []
},
)

View File

@ -1,7 +1,9 @@
import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus"
import z from "zod"
import { Storage } from "../storage/storage"
import { db } from "../storage/db"
import { TodoTable } from "./session-aux.sql"
import { eq } from "drizzle-orm"
export namespace Todo {
export const Info = z
@ -24,14 +26,17 @@ export namespace Todo {
),
}
export async function update(input: { sessionID: string; todos: Info[] }) {
await Storage.write(["todo", input.sessionID], input.todos)
export function update(input: { sessionID: string; todos: Info[] }) {
db()
.insert(TodoTable)
.values({ sessionID: input.sessionID, data: input.todos })
.onConflictDoUpdate({ target: TodoTable.sessionID, set: { data: input.todos } })
.run()
Bus.publish(Event.Updated, input)
}
export async function get(sessionID: string) {
return Storage.read<Info[]>(["todo", sessionID])
.then((x) => x || [])
.catch(() => [])
export function get(sessionID: string) {
const row = db().select().from(TodoTable).where(eq(TodoTable.sessionID, sessionID)).get()
return row?.data ?? []
}
}

View File

@ -4,7 +4,9 @@ import { ulid } from "ulid"
import { Provider } from "@/provider/provider"
import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2"
import { Storage } from "@/storage/storage"
import { db } from "@/storage/db"
import { SessionShareTable } from "./share.sql"
import { eq } from "drizzle-orm"
import { Log } from "@/util/log"
import type * as SDK from "@opencode-ai/sdk/v2"
@ -77,17 +79,18 @@ export namespace ShareNext {
})
.then((x) => x.json())
.then((x) => x as { id: string; url: string; secret: string })
await Storage.write(["session_share", sessionID], result)
db()
.insert(SessionShareTable)
.values({ sessionID, data: result })
.onConflictDoUpdate({ target: SessionShareTable.sessionID, set: { data: result } })
.run()
fullSync(sessionID)
return result
}
function get(sessionID: string) {
return Storage.read<{
id: string
secret: string
url: string
}>(["session_share", sessionID])
const row = db().select().from(SessionShareTable).where(eq(SessionShareTable.sessionID, sessionID)).get()
return row?.data
}
type Data =
@ -132,7 +135,7 @@ export namespace ShareNext {
const queued = queue.get(sessionID)
if (!queued) return
queue.delete(sessionID)
const share = await get(sessionID).catch(() => undefined)
const share = get(sessionID)
if (!share) return
await fetch(`${await url()}/api/share/${share.id}/sync`, {
@ -163,7 +166,7 @@ export namespace ShareNext {
secret: share.secret,
}),
})
await Storage.remove(["session_share", sessionID])
db().delete(SessionShareTable).where(eq(SessionShareTable.sessionID, sessionID)).run()
}
async function fullSync(sessionID: string) {

View File

@ -0,0 +1,19 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import { SessionTable } from "../session/session.sql"
import type { Session } from "../session"
export const SessionShareTable = sqliteTable("session_share", {
sessionID: text("session_id")
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
data: text("data", { mode: "json" }).notNull().$type<{
id: string
secret: string
url: string
}>(),
})
export const ShareTable = sqliteTable("share", {
sessionID: text("session_id").primaryKey(),
data: text("data", { mode: "json" }).notNull().$type<Session.ShareInfo>(),
})

5
packages/opencode/src/sql.d.ts vendored 100644
View File

@ -0,0 +1,5 @@
// Type declarations for SQL file imports with { type: "text" }
declare module "*.sql" {
const content: string
export default content
}

View File

@ -0,0 +1,73 @@
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { lazy } from "../util/lazy"
import { Global } from "../global"
import { Log } from "../util/log"
import { migrations } from "./migrations.generated"
import { migrateFromJson } from "./json-migration"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
import path from "path"
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const log = Log.create({ service: "db" })
export type DB = ReturnType<typeof drizzle>
let initialized = false
const connection = lazy(() => {
const dbPath = path.join(Global.Path.data, "opencode.db")
log.info("opening database", { path: dbPath })
const sqlite = new Database(dbPath, { create: true })
sqlite.exec("PRAGMA journal_mode = WAL")
sqlite.exec("PRAGMA synchronous = NORMAL")
sqlite.exec("PRAGMA busy_timeout = 5000")
sqlite.exec("PRAGMA cache_size = -64000")
sqlite.exec("PRAGMA foreign_keys = ON")
runMigrations(sqlite)
// Run JSON migration asynchronously after schema is ready
if (!initialized) {
initialized = true
migrateFromJson(sqlite).catch((e) => log.error("json migration failed", { error: e }))
}
return drizzle(sqlite)
})
function runMigrations(sqlite: Database) {
sqlite.exec(`
CREATE TABLE IF NOT EXISTS _migrations (
name TEXT PRIMARY KEY,
applied_at INTEGER NOT NULL
)
`)
const applied = new Set(
sqlite
.query<{ name: string }, []>("SELECT name FROM _migrations")
.all()
.map((r) => r.name),
)
for (const migration of migrations) {
if (applied.has(migration.name)) continue
log.info("applying migration", { name: migration.name })
sqlite.exec(migration.sql)
sqlite.run("INSERT INTO _migrations (name, applied_at) VALUES (?, ?)", [migration.name, Date.now()])
}
}
export function db() {
return connection()
}

View File

@ -0,0 +1,267 @@
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { eq } from "drizzle-orm"
import { Global } from "../global"
import { Log } from "../util/log"
import { ProjectTable } from "../project/project.sql"
import { SessionTable } from "../session/session.sql"
import { MessageTable } from "../session/message.sql"
import { PartTable } from "../session/part.sql"
import { SessionDiffTable, TodoTable, PermissionTable } from "../session/session-aux.sql"
import { SessionShareTable, ShareTable } from "../share/share.sql"
import path from "path"
const log = Log.create({ service: "json-migration" })
export async function migrateFromJson(sqlite: Database) {
const storageDir = path.join(Global.Path.data, "storage")
const migrationMarker = path.join(storageDir, "sqlite-migrated")
if (await Bun.file(migrationMarker).exists()) {
log.info("json migration already completed")
return
}
if (!(await Bun.file(path.join(storageDir, "migration")).exists())) {
log.info("no json storage found, skipping migration")
await Bun.write(migrationMarker, Date.now().toString())
return
}
log.info("starting json to sqlite migration", { storageDir })
const db = drizzle(sqlite)
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
diffs: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
// Migrate projects first (no FK deps)
const projectGlob = new Bun.Glob("project/*.json")
for await (const file of projectGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
if (!data.id) {
stats.errors.push(`project missing id: ${file}`)
continue
}
db.insert(ProjectTable).values({ id: data.id, data }).onConflictDoNothing().run()
stats.projects++
} catch (e) {
stats.errors.push(`failed to migrate project ${file}: ${e}`)
}
}
log.info("migrated projects", { count: stats.projects })
// Migrate sessions (depends on projects)
const sessionGlob = new Bun.Glob("session/*/*.json")
for await (const file of sessionGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
if (!data.id || !data.projectID) {
stats.errors.push(`session missing id or projectID: ${file}`)
continue
}
// Check if project exists (skip orphaned sessions)
const project = db.select().from(ProjectTable).where(eq(ProjectTable.id, data.projectID)).get()
if (!project) {
log.warn("skipping orphaned session", { sessionID: data.id, projectID: data.projectID })
continue
}
db.insert(SessionTable)
.values({
id: data.id,
projectID: data.projectID,
parentID: data.parentID,
createdAt: data.time?.created ?? Date.now(),
updatedAt: data.time?.updated ?? Date.now(),
data,
})
.onConflictDoNothing()
.run()
stats.sessions++
} catch (e) {
stats.errors.push(`failed to migrate session ${file}: ${e}`)
}
}
log.info("migrated sessions", { count: stats.sessions })
// Migrate messages (depends on sessions)
const messageGlob = new Bun.Glob("message/*/*.json")
for await (const file of messageGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
if (!data.id || !data.sessionID) {
stats.errors.push(`message missing id or sessionID: ${file}`)
continue
}
// Check if session exists
const session = db.select().from(SessionTable).where(eq(SessionTable.id, data.sessionID)).get()
if (!session) {
log.warn("skipping orphaned message", { messageID: data.id, sessionID: data.sessionID })
continue
}
db.insert(MessageTable)
.values({
id: data.id,
sessionID: data.sessionID,
createdAt: data.time?.created ?? Date.now(),
data,
})
.onConflictDoNothing()
.run()
stats.messages++
} catch (e) {
stats.errors.push(`failed to migrate message ${file}: ${e}`)
}
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts (depends on messages)
const partGlob = new Bun.Glob("part/*/*.json")
for await (const file of partGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
if (!data.id || !data.messageID || !data.sessionID) {
stats.errors.push(`part missing id, messageID, or sessionID: ${file}`)
continue
}
// Check if message exists
const message = db.select().from(MessageTable).where(eq(MessageTable.id, data.messageID)).get()
if (!message) {
log.warn("skipping orphaned part", { partID: data.id, messageID: data.messageID })
continue
}
db.insert(PartTable)
.values({
id: data.id,
messageID: data.messageID,
sessionID: data.sessionID,
data,
})
.onConflictDoNothing()
.run()
stats.parts++
} catch (e) {
stats.errors.push(`failed to migrate part ${file}: ${e}`)
}
}
log.info("migrated parts", { count: stats.parts })
// Migrate session diffs
const diffGlob = new Bun.Glob("session_diff/*.json")
for await (const file of diffGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
const sessionID = path.basename(file, ".json")
// Check if session exists
const session = db.select().from(SessionTable).where(eq(SessionTable.id, sessionID)).get()
if (!session) {
log.warn("skipping orphaned session_diff", { sessionID })
continue
}
db.insert(SessionDiffTable).values({ sessionID, data }).onConflictDoNothing().run()
stats.diffs++
} catch (e) {
stats.errors.push(`failed to migrate session_diff ${file}: ${e}`)
}
}
log.info("migrated session diffs", { count: stats.diffs })
// Migrate todos
const todoGlob = new Bun.Glob("todo/*.json")
for await (const file of todoGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
const sessionID = path.basename(file, ".json")
const session = db.select().from(SessionTable).where(eq(SessionTable.id, sessionID)).get()
if (!session) {
log.warn("skipping orphaned todo", { sessionID })
continue
}
db.insert(TodoTable).values({ sessionID, data }).onConflictDoNothing().run()
stats.todos++
} catch (e) {
stats.errors.push(`failed to migrate todo ${file}: ${e}`)
}
}
log.info("migrated todos", { count: stats.todos })
// Migrate permissions
const permGlob = new Bun.Glob("permission/*.json")
for await (const file of permGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
const projectID = path.basename(file, ".json")
const project = db.select().from(ProjectTable).where(eq(ProjectTable.id, projectID)).get()
if (!project) {
log.warn("skipping orphaned permission", { projectID })
continue
}
db.insert(PermissionTable).values({ projectID, data }).onConflictDoNothing().run()
stats.permissions++
} catch (e) {
stats.errors.push(`failed to migrate permission ${file}: ${e}`)
}
}
log.info("migrated permissions", { count: stats.permissions })
// Migrate session shares
const shareGlob = new Bun.Glob("session_share/*.json")
for await (const file of shareGlob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
const sessionID = path.basename(file, ".json")
const session = db.select().from(SessionTable).where(eq(SessionTable.id, sessionID)).get()
if (!session) {
log.warn("skipping orphaned session_share", { sessionID })
continue
}
db.insert(SessionShareTable).values({ sessionID, data }).onConflictDoNothing().run()
stats.shares++
} catch (e) {
stats.errors.push(`failed to migrate session_share ${file}: ${e}`)
}
}
log.info("migrated session shares", { count: stats.shares })
// Migrate shares (downloaded shared sessions, no FK)
const share2Glob = new Bun.Glob("share/*.json")
for await (const file of share2Glob.scan({ cwd: storageDir, absolute: true })) {
try {
const data = await Bun.file(file).json()
const sessionID = path.basename(file, ".json")
db.insert(ShareTable).values({ sessionID, data }).onConflictDoNothing().run()
} catch (e) {
stats.errors.push(`failed to migrate share ${file}: ${e}`)
}
}
// Mark migration complete
await Bun.write(migrationMarker, Date.now().toString())
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
diffs: stats.diffs,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
return stats
}

View File

@ -1,227 +0,0 @@
import { Log } from "../util/log"
import path from "path"
import fs from "fs/promises"
import { Global } from "../global"
import { Filesystem } from "../util/filesystem"
import { lazy } from "../util/lazy"
import { Lock } from "../util/lock"
import { $ } from "bun"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
export namespace Storage {
const log = Log.create({ service: "storage" })
type Migration = (dir: string) => Promise<void>
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const MIGRATIONS: Migration[] = [
async (dir) => {
const project = path.resolve(dir, "../project")
if (!(await Filesystem.isDir(project))) return
for await (const projectDir of new Bun.Glob("*").scan({
cwd: project,
onlyFiles: false,
})) {
log.info(`migrating project ${projectDir}`)
let projectID = projectDir
const fullProjectDir = path.join(project, projectDir)
let worktree = "/"
if (projectID !== "global") {
for await (const msgFile of new Bun.Glob("storage/session/message/*/*.json").scan({
cwd: path.join(project, projectDir),
absolute: true,
})) {
const json = await Bun.file(msgFile).json()
worktree = json.path?.root
if (worktree) break
}
if (!worktree) continue
if (!(await Filesystem.isDir(worktree))) continue
const [id] = await $`git rev-list --max-parents=0 --all`
.quiet()
.nothrow()
.cwd(worktree)
.text()
.then((x) =>
x
.split("\n")
.filter(Boolean)
.map((x) => x.trim())
.toSorted(),
)
if (!id) continue
projectID = id
await Bun.write(
path.join(dir, "project", projectID + ".json"),
JSON.stringify({
id,
vcs: "git",
worktree,
time: {
created: Date.now(),
initialized: Date.now(),
},
}),
)
log.info(`migrating sessions for project ${projectID}`)
for await (const sessionFile of new Bun.Glob("storage/session/info/*.json").scan({
cwd: fullProjectDir,
absolute: true,
})) {
const dest = path.join(dir, "session", projectID, path.basename(sessionFile))
log.info("copying", {
sessionFile,
dest,
})
const session = await Bun.file(sessionFile).json()
await Bun.write(dest, JSON.stringify(session))
log.info(`migrating messages for session ${session.id}`)
for await (const msgFile of new Bun.Glob(`storage/session/message/${session.id}/*.json`).scan({
cwd: fullProjectDir,
absolute: true,
})) {
const dest = path.join(dir, "message", session.id, path.basename(msgFile))
log.info("copying", {
msgFile,
dest,
})
const message = await Bun.file(msgFile).json()
await Bun.write(dest, JSON.stringify(message))
log.info(`migrating parts for message ${message.id}`)
for await (const partFile of new Bun.Glob(`storage/session/part/${session.id}/${message.id}/*.json`).scan(
{
cwd: fullProjectDir,
absolute: true,
},
)) {
const dest = path.join(dir, "part", message.id, path.basename(partFile))
const part = await Bun.file(partFile).json()
log.info("copying", {
partFile,
dest,
})
await Bun.write(dest, JSON.stringify(part))
}
}
}
}
}
},
async (dir) => {
for await (const item of new Bun.Glob("session/*/*.json").scan({
cwd: dir,
absolute: true,
})) {
const session = await Bun.file(item).json()
if (!session.projectID) continue
if (!session.summary?.diffs) continue
const { diffs } = session.summary
await Bun.file(path.join(dir, "session_diff", session.id + ".json")).write(JSON.stringify(diffs))
await Bun.file(path.join(dir, "session", session.projectID, session.id + ".json")).write(
JSON.stringify({
...session,
summary: {
additions: diffs.reduce((sum: any, x: any) => sum + x.additions, 0),
deletions: diffs.reduce((sum: any, x: any) => sum + x.deletions, 0),
},
}),
)
}
},
]
const state = lazy(async () => {
const dir = path.join(Global.Path.data, "storage")
const migration = await Bun.file(path.join(dir, "migration"))
.json()
.then((x) => parseInt(x))
.catch(() => 0)
for (let index = migration; index < MIGRATIONS.length; index++) {
log.info("running migration", { index })
const migration = MIGRATIONS[index]
await migration(dir).catch(() => log.error("failed to run migration", { index }))
await Bun.write(path.join(dir, "migration"), (index + 1).toString())
}
return {
dir,
}
})
export async function remove(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
await fs.unlink(target).catch(() => {})
})
}
export async function read<T>(key: string[]) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.read(target)
const result = await Bun.file(target).json()
return result as T
})
}
export async function update<T>(key: string[], fn: (draft: T) => void) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
const content = await Bun.file(target).json()
fn(content)
await Bun.write(target, JSON.stringify(content, null, 2))
return content as T
})
}
export async function write<T>(key: string[], content: T) {
const dir = await state().then((x) => x.dir)
const target = path.join(dir, ...key) + ".json"
return withErrorHandling(async () => {
using _ = await Lock.write(target)
await Bun.write(target, JSON.stringify(content, null, 2))
})
}
async function withErrorHandling<T>(body: () => Promise<T>) {
return body().catch((e) => {
if (!(e instanceof Error)) throw e
const errnoException = e as NodeJS.ErrnoException
if (errnoException.code === "ENOENT") {
throw new NotFoundError({ message: `Resource not found: ${errnoException.path}` })
}
throw e
})
}
const glob = new Bun.Glob("**/*")
export async function list(prefix: string[]) {
const dir = await state().then((x) => x.dir)
try {
const result = await Array.fromAsync(
glob.scan({
cwd: path.join(dir, ...prefix),
onlyFiles: true,
}),
).then((results) => results.map((x) => [...prefix, ...x.slice(0, -5).split(path.sep)]))
result.sort()
return result
} catch {
return []
}
}
}

View File

@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
import os from "os"
import { PermissionNext } from "../../src/permission/next"
import { Instance } from "../../src/project/instance"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture"
// fromConfig tests

View File

@ -1,7 +1,9 @@
import { describe, expect, test } from "bun:test"
import { Project } from "../../src/project/project"
import { Log } from "../../src/util/log"
import { Storage } from "../../src/storage/storage"
import { db } from "../../src/storage/db"
import { ProjectTable } from "../../src/project/project.sql"
import { eq } from "drizzle-orm"
import { $ } from "bun"
import path from "path"
import { tmpdir } from "../fixture/fixture"
@ -99,11 +101,12 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeDefined()
expect(updated.icon?.url).toStartWith("data:")
expect(updated.icon?.url).toContain("base64")
expect(updated.icon?.color).toBeUndefined()
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get()
const updated = row?.data
expect(updated?.icon).toBeDefined()
expect(updated?.icon?.url).toStartWith("data:")
expect(updated?.icon?.url).toContain("base64")
expect(updated?.icon?.color).toBeUndefined()
})
test("should not discover non-image files", async () => {
@ -114,7 +117,8 @@ describe("Project.discover", () => {
await Project.discover(project)
const updated = await Storage.read<Project.Info>(["project", project.id])
expect(updated.icon).toBeUndefined()
const row = db().select().from(ProjectTable).where(eq(ProjectTable.id, project.id)).get()
const updated = row?.data
expect(updated?.icon).toBeUndefined()
})
})