sqlite again (#10597)

Co-authored-by: Github Action <action@github.com>
Co-authored-by: opencode-agent[bot] <opencode-agent[bot]@users.noreply.github.com>
Co-authored-by: Brendan Allan <git@brendonovich.dev>
pull/13559/head
Dax 2026-02-13 23:19:02 -05:00 committed by GitHub
parent d018903887
commit 6d95f0d14c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
61 changed files with 5756 additions and 878 deletions

View File

@ -16,15 +16,12 @@ wip:
For anything in the packages/web use the docs: prefix. For anything in the packages/web use the docs: prefix.
For anything in the packages/app use the ignore: prefix.
prefer to explain WHY something was done from an end user perspective instead of prefer to explain WHY something was done from an end user perspective instead of
WHAT was done. WHAT was done.
do not do generic messages like "improved agent experience" be very specific do not do generic messages like "improved agent experience" be very specific
about what user facing changes were made about what user facing changes were made
if there are changes do a git pull --rebase
if there are conflicts DO NOT FIX THEM. notify me and I will fix them if there are conflicts DO NOT FIX THEM. notify me and I will fix them
## GIT DIFF ## GIT DIFF

View File

@ -110,3 +110,4 @@ const table = sqliteTable("session", {
- Avoid mocks as much as possible - Avoid mocks as much as possible
- Test actual implementation, do not duplicate logic into tests - Test actual implementation, do not duplicate logic into tests
- Tests cannot run from repo root (guard: `do-not-run-tests-from-root`); run from package dirs like `packages/opencode`.

551
bun.lock

File diff suppressed because it is too large Load Diff

View File

@ -40,6 +40,8 @@
"@tailwindcss/vite": "4.1.11", "@tailwindcss/vite": "4.1.11",
"diff": "8.0.2", "diff": "8.0.2",
"dompurify": "3.3.1", "dompurify": "3.3.1",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"ai": "5.0.124", "ai": "5.0.124",
"hono": "4.10.7", "hono": "4.10.7",
"hono-openapi": "1.1.2", "hono-openapi": "1.1.2",

View File

@ -231,6 +231,24 @@ export function applyDirectoryEvent(input: {
} }
break break
} }
case "message.part.delta": {
const props = event.properties as { messageID: string; partID: string; field: string; delta: string }
const parts = input.store.part[props.messageID]
if (!parts) break
const result = Binary.search(parts, props.partID, (p) => p.id)
if (!result.found) break
input.setStore(
"part",
props.messageID,
produce((draft) => {
const part = draft[result.index]
const field = props.field as keyof typeof part
const existing = part[field] as string | undefined
;(part[field] as string) = (existing ?? "") + props.delta
}),
)
break
}
case "vcs.branch.updated": { case "vcs.branch.updated": {
const props = event.properties as { branch: string } const props = event.properties as { branch: string }
if (input.store.vcs?.branch === props.branch) break if (input.store.vcs?.branch === props.branch) break

View File

@ -12,7 +12,7 @@
"@opencode-ai/console-resource": "workspace:*", "@opencode-ai/console-resource": "workspace:*",
"@planetscale/database": "1.19.0", "@planetscale/database": "1.19.0",
"aws4fetch": "1.0.20", "aws4fetch": "1.0.20",
"drizzle-orm": "0.41.0", "drizzle-orm": "catalog:",
"postgres": "3.4.7", "postgres": "3.4.7",
"stripe": "18.0.0", "stripe": "18.0.0",
"ulid": "catalog:", "ulid": "catalog:",
@ -44,7 +44,7 @@
"@tsconfig/node22": "22.0.2", "@tsconfig/node22": "22.0.2",
"@types/bun": "1.3.0", "@types/bun": "1.3.0",
"@types/node": "catalog:", "@types/node": "catalog:",
"drizzle-kit": "0.30.5", "drizzle-kit": "catalog:",
"mysql2": "3.14.4", "mysql2": "3.14.4",
"typescript": "catalog:", "typescript": "catalog:",
"@typescript/native-preview": "catalog:" "@typescript/native-preview": "catalog:"

View File

@ -4,7 +4,6 @@ export * from "drizzle-orm"
import { Client } from "@planetscale/database" import { Client } from "@planetscale/database"
import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core" import { MySqlTransaction, type MySqlTransactionConfig } from "drizzle-orm/mysql-core"
import type { ExtractTablesWithRelations } from "drizzle-orm"
import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless" import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from "drizzle-orm/planetscale-serverless"
import { Context } from "../context" import { Context } from "../context"
import { memo } from "../util/memo" import { memo } from "../util/memo"
@ -14,7 +13,7 @@ export namespace Database {
PlanetscaleQueryResultHKT, PlanetscaleQueryResultHKT,
PlanetScalePreparedQueryHKT, PlanetScalePreparedQueryHKT,
Record<string, never>, Record<string, never>,
ExtractTablesWithRelations<Record<string, never>> any
> >
const client = memo(() => { const client = memo(() => {
@ -23,7 +22,7 @@ export namespace Database {
username: Resource.Database.username, username: Resource.Database.username,
password: Resource.Database.password, password: Resource.Database.password,
}) })
const db = drizzle(result, {}) const db = drizzle({ client: result })
return db return db
}) })

View File

@ -1,27 +1,10 @@
# opencode agent guidelines # opencode database guide
## Build/Test Commands ## Database
- **Install**: `bun install` - **Schema**: Drizzle schema lives in `src/**/*.sql.ts`.
- **Run**: `bun run --conditions=browser ./src/index.ts` - **Naming**: tables and columns use snake*case; join columns are `<entity>_id`; indexes are `<table>*<column>\_idx`.
- **Typecheck**: `bun run typecheck` (npm run typecheck) - **Migrations**: generated by Drizzle Kit using `drizzle.config.ts` (schema: `./src/**/*.sql.ts`, output: `./migration`).
- **Test**: `bun test` (runs all tests) - **Command**: `bun run db generate --name <slug>`.
- **Single test**: `bun test test/tool/tool.test.ts` (specific test file) - **Output**: creates `migration/<timestamp>_<slug>/migration.sql` and `snapshot.json`.
- **Tests**: migration tests should read the per-folder layout (no `_journal.json`).
## Code Style
- **Runtime**: Bun with TypeScript ESM modules
- **Imports**: Use relative imports for local modules, named imports preferred
- **Types**: Zod schemas for validation, TypeScript interfaces for structure
- **Naming**: camelCase for variables/functions, PascalCase for classes/namespaces
- **Error handling**: Use Result patterns, avoid throwing exceptions in tools
- **File structure**: Namespace-based organization (e.g., `Tool.define()`, `Session.create()`)
## Architecture
- **Tools**: Implement `Tool.Info` interface with `execute()` method
- **Context**: Pass `sessionID` in tool context, use `App.provide()` for DI
- **Validation**: All inputs validated with Zod schemas
- **Logging**: Use `Log.create({ service: "name" })` pattern
- **Storage**: Use `Storage` namespace for persistence
- **API Client**: The TypeScript TUI (built with SolidJS + OpenTUI) communicates with the OpenCode server using `@opencode-ai/sdk`. When adding/modifying server endpoints in `packages/opencode/src/server/server.ts`, run `./script/generate.ts` to regenerate the SDK and related files.

View File

@ -0,0 +1,10 @@
import { defineConfig } from "drizzle-kit"
export default defineConfig({
dialect: "sqlite",
schema: "./src/**/*.sql.ts",
out: "./migration",
dbCredentials: {
url: "/home/thdxr/.local/share/opencode/opencode.db",
},
})

View File

@ -0,0 +1,90 @@
CREATE TABLE `project` (
`id` text PRIMARY KEY,
`worktree` text NOT NULL,
`vcs` text,
`name` text,
`icon_url` text,
`icon_color` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_initialized` integer,
`sandboxes` text NOT NULL
);
--> statement-breakpoint
CREATE TABLE `message` (
`id` text PRIMARY KEY,
`session_id` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_message_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `part` (
`id` text PRIMARY KEY,
`message_id` text NOT NULL,
`session_id` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_part_message_id_message_id_fk` FOREIGN KEY (`message_id`) REFERENCES `message`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `permission` (
`project_id` text PRIMARY KEY,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_permission_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `session` (
`id` text PRIMARY KEY,
`project_id` text NOT NULL,
`parent_id` text,
`slug` text NOT NULL,
`directory` text NOT NULL,
`title` text NOT NULL,
`version` text NOT NULL,
`share_url` text,
`summary_additions` integer,
`summary_deletions` integer,
`summary_files` integer,
`summary_diffs` text,
`revert` text,
`permission` text,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
`time_compacting` integer,
`time_archived` integer,
CONSTRAINT `fk_session_project_id_project_id_fk` FOREIGN KEY (`project_id`) REFERENCES `project`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `todo` (
`session_id` text NOT NULL,
`content` text NOT NULL,
`status` text NOT NULL,
`priority` text NOT NULL,
`position` integer NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `todo_pk` PRIMARY KEY(`session_id`, `position`),
CONSTRAINT `fk_todo_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE TABLE `session_share` (
`session_id` text PRIMARY KEY,
`id` text NOT NULL,
`secret` text NOT NULL,
`url` text NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `fk_session_share_session_id_session_id_fk` FOREIGN KEY (`session_id`) REFERENCES `session`(`id`) ON DELETE CASCADE
);
--> statement-breakpoint
CREATE INDEX `message_session_idx` ON `message` (`session_id`);--> statement-breakpoint
CREATE INDEX `part_message_idx` ON `part` (`message_id`);--> statement-breakpoint
CREATE INDEX `part_session_idx` ON `part` (`session_id`);--> statement-breakpoint
CREATE INDEX `session_project_idx` ON `session` (`project_id`);--> statement-breakpoint
CREATE INDEX `session_parent_idx` ON `session` (`parent_id`);--> statement-breakpoint
CREATE INDEX `todo_session_idx` ON `todo` (`session_id`);

View File

@ -0,0 +1,796 @@
{
"version": "7",
"dialect": "sqlite",
"id": "068758ed-a97a-46f6-8a59-6c639ae7c20c",
"prevIds": ["00000000-0000-0000-0000-000000000000"],
"ddl": [
{
"name": "project",
"entityType": "tables"
},
{
"name": "message",
"entityType": "tables"
},
{
"name": "part",
"entityType": "tables"
},
{
"name": "permission",
"entityType": "tables"
},
{
"name": "session",
"entityType": "tables"
},
{
"name": "todo",
"entityType": "tables"
},
{
"name": "session_share",
"entityType": "tables"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "worktree",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "vcs",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "name",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_url",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_color",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_initialized",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "sandboxes",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "message_id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "parent_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "slug",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "directory",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "title",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "version",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "share_url",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_additions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_deletions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_files",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_diffs",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "revert",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "permission",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_compacting",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_archived",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "content",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "status",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "priority",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "position",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "secret",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_share"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_message_session_id_session_id_fk",
"entityType": "fks",
"table": "message"
},
{
"columns": ["message_id"],
"tableTo": "message",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_part_message_id_message_id_fk",
"entityType": "fks",
"table": "part"
},
{
"columns": ["project_id"],
"tableTo": "project",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_permission_project_id_project_id_fk",
"entityType": "fks",
"table": "permission"
},
{
"columns": ["project_id"],
"tableTo": "project",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_project_id_project_id_fk",
"entityType": "fks",
"table": "session"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_todo_session_id_session_id_fk",
"entityType": "fks",
"table": "todo"
},
{
"columns": ["session_id"],
"tableTo": "session",
"columnsTo": ["id"],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_share_session_id_session_id_fk",
"entityType": "fks",
"table": "session_share"
},
{
"columns": ["session_id", "position"],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": ["project_id"],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": ["id"],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": ["session_id"],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "message_session_idx",
"entityType": "indexes",
"table": "message"
},
{
"columns": [
{
"value": "message_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_message_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_session_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "project_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_project_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "parent_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_parent_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "todo_session_idx",
"entityType": "indexes",
"table": "todo"
}
],
"renames": []
}

View File

@ -0,0 +1 @@
ALTER TABLE `project` ADD `commands` text;

View File

@ -0,0 +1,847 @@
{
"version": "7",
"dialect": "sqlite",
"id": "8bc2d11d-97fa-4ba8-8bfa-6c5956c49aeb",
"prevIds": [
"068758ed-a97a-46f6-8a59-6c639ae7c20c"
],
"ddl": [
{
"name": "project",
"entityType": "tables"
},
{
"name": "message",
"entityType": "tables"
},
{
"name": "part",
"entityType": "tables"
},
{
"name": "permission",
"entityType": "tables"
},
{
"name": "session",
"entityType": "tables"
},
{
"name": "todo",
"entityType": "tables"
},
{
"name": "session_share",
"entityType": "tables"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "worktree",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "vcs",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "name",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_url",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_color",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_initialized",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "sandboxes",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "commands",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "message_id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "parent_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "slug",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "directory",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "title",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "version",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "share_url",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_additions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_deletions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_files",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_diffs",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "revert",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "permission",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_compacting",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_archived",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "content",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "status",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "priority",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "position",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "secret",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_share"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_message_session_id_session_id_fk",
"entityType": "fks",
"table": "message"
},
{
"columns": [
"message_id"
],
"tableTo": "message",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_part_message_id_message_id_fk",
"entityType": "fks",
"table": "part"
},
{
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_permission_project_id_project_id_fk",
"entityType": "fks",
"table": "permission"
},
{
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_project_id_project_id_fk",
"entityType": "fks",
"table": "session"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_todo_session_id_session_id_fk",
"entityType": "fks",
"table": "todo"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_share_session_id_session_id_fk",
"entityType": "fks",
"table": "session_share"
},
{
"columns": [
"session_id",
"position"
],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": [
"project_id"
],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": [
"session_id"
],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "message_session_idx",
"entityType": "indexes",
"table": "message"
},
{
"columns": [
{
"value": "message_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_message_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_session_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "project_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_project_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "parent_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_parent_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "todo_session_idx",
"entityType": "indexes",
"table": "todo"
}
],
"renames": []
}

View File

@ -0,0 +1,11 @@
CREATE TABLE `control_account` (
`email` text NOT NULL,
`url` text NOT NULL,
`access_token` text NOT NULL,
`refresh_token` text NOT NULL,
`token_expiry` integer,
`active` integer NOT NULL,
`time_created` integer NOT NULL,
`time_updated` integer NOT NULL,
CONSTRAINT `control_account_pk` PRIMARY KEY(`email`, `url`)
);

View File

@ -0,0 +1,941 @@
{
"version": "7",
"dialect": "sqlite",
"id": "d2736e43-700f-4e9e-8151-9f2f0d967bc8",
"prevIds": [
"8bc2d11d-97fa-4ba8-8bfa-6c5956c49aeb"
],
"ddl": [
{
"name": "control_account",
"entityType": "tables"
},
{
"name": "project",
"entityType": "tables"
},
{
"name": "message",
"entityType": "tables"
},
{
"name": "part",
"entityType": "tables"
},
{
"name": "permission",
"entityType": "tables"
},
{
"name": "session",
"entityType": "tables"
},
{
"name": "todo",
"entityType": "tables"
},
{
"name": "session_share",
"entityType": "tables"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "email",
"entityType": "columns",
"table": "control_account"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "control_account"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "access_token",
"entityType": "columns",
"table": "control_account"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "refresh_token",
"entityType": "columns",
"table": "control_account"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "token_expiry",
"entityType": "columns",
"table": "control_account"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "active",
"entityType": "columns",
"table": "control_account"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "control_account"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "control_account"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "worktree",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "vcs",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "name",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_url",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "icon_color",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "project"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_initialized",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "sandboxes",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "commands",
"entityType": "columns",
"table": "project"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "message"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "message"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "message_id",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "part"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "part"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "permission"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "data",
"entityType": "columns",
"table": "permission"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "project_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "parent_id",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "slug",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "directory",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "title",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "version",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "share_url",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_additions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_deletions",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_files",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "summary_diffs",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "revert",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "permission",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_compacting",
"entityType": "columns",
"table": "session"
},
{
"type": "integer",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_archived",
"entityType": "columns",
"table": "session"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "content",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "status",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "priority",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "position",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "todo"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "todo"
},
{
"type": "text",
"notNull": false,
"autoincrement": false,
"default": null,
"generated": null,
"name": "session_id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "id",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "secret",
"entityType": "columns",
"table": "session_share"
},
{
"type": "text",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "url",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_created",
"entityType": "columns",
"table": "session_share"
},
{
"type": "integer",
"notNull": true,
"autoincrement": false,
"default": null,
"generated": null,
"name": "time_updated",
"entityType": "columns",
"table": "session_share"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_message_session_id_session_id_fk",
"entityType": "fks",
"table": "message"
},
{
"columns": [
"message_id"
],
"tableTo": "message",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_part_message_id_message_id_fk",
"entityType": "fks",
"table": "part"
},
{
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_permission_project_id_project_id_fk",
"entityType": "fks",
"table": "permission"
},
{
"columns": [
"project_id"
],
"tableTo": "project",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_project_id_project_id_fk",
"entityType": "fks",
"table": "session"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_todo_session_id_session_id_fk",
"entityType": "fks",
"table": "todo"
},
{
"columns": [
"session_id"
],
"tableTo": "session",
"columnsTo": [
"id"
],
"onUpdate": "NO ACTION",
"onDelete": "CASCADE",
"nameExplicit": false,
"name": "fk_session_share_session_id_session_id_fk",
"entityType": "fks",
"table": "session_share"
},
{
"columns": [
"email",
"url"
],
"nameExplicit": false,
"name": "control_account_pk",
"entityType": "pks",
"table": "control_account"
},
{
"columns": [
"session_id",
"position"
],
"nameExplicit": false,
"name": "todo_pk",
"entityType": "pks",
"table": "todo"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "project_pk",
"table": "project",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "message_pk",
"table": "message",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "part_pk",
"table": "part",
"entityType": "pks"
},
{
"columns": [
"project_id"
],
"nameExplicit": false,
"name": "permission_pk",
"table": "permission",
"entityType": "pks"
},
{
"columns": [
"id"
],
"nameExplicit": false,
"name": "session_pk",
"table": "session",
"entityType": "pks"
},
{
"columns": [
"session_id"
],
"nameExplicit": false,
"name": "session_share_pk",
"table": "session_share",
"entityType": "pks"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "message_session_idx",
"entityType": "indexes",
"table": "message"
},
{
"columns": [
{
"value": "message_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_message_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "part_session_idx",
"entityType": "indexes",
"table": "part"
},
{
"columns": [
{
"value": "project_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_project_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "parent_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "session_parent_idx",
"entityType": "indexes",
"table": "session"
},
{
"columns": [
{
"value": "session_id",
"isExpression": false
}
],
"isUnique": false,
"where": null,
"origin": "manual",
"name": "todo_session_idx",
"entityType": "indexes",
"table": "todo"
}
],
"renames": []
}

View File

@ -15,7 +15,8 @@
"lint": "echo 'Running lint checks...' && bun test --coverage", "lint": "echo 'Running lint checks...' && bun test --coverage",
"format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts", "format": "echo 'Formatting code...' && bun run --prettier --write src/**/*.ts",
"docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;", "docs": "echo 'Generating documentation...' && find src -name '*.ts' -exec echo 'Processing: {}' \\;",
"deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'" "deploy": "echo 'Deploying application...' && bun run build && echo 'Deployment completed successfully'",
"db": "bun drizzle-kit"
}, },
"bin": { "bin": {
"opencode": "./bin/opencode" "opencode": "./bin/opencode"
@ -42,6 +43,8 @@
"@types/turndown": "5.0.5", "@types/turndown": "5.0.5",
"@types/yargs": "17.0.33", "@types/yargs": "17.0.33",
"@typescript/native-preview": "catalog:", "@typescript/native-preview": "catalog:",
"drizzle-kit": "1.0.0-beta.12-a5629fb",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"typescript": "catalog:", "typescript": "catalog:",
"vscode-languageserver-types": "3.17.5", "vscode-languageserver-types": "3.17.5",
"why-is-node-running": "3.2.2", "why-is-node-running": "3.2.2",
@ -100,6 +103,7 @@
"clipboardy": "4.0.0", "clipboardy": "4.0.0",
"decimal.js": "10.5.0", "decimal.js": "10.5.0",
"diff": "catalog:", "diff": "catalog:",
"drizzle-orm": "1.0.0-beta.12-a5629fb",
"fuzzysort": "3.1.0", "fuzzysort": "3.1.0",
"gray-matter": "4.0.3", "gray-matter": "4.0.3",
"hono": "catalog:", "hono": "catalog:",
@ -122,5 +126,8 @@
"yargs": "18.0.0", "yargs": "18.0.0",
"zod": "catalog:", "zod": "catalog:",
"zod-to-json-schema": "3.24.5" "zod-to-json-schema": "3.24.5"
},
"overrides": {
"drizzle-orm": "1.0.0-beta.12-a5629fb"
} }
} }

View File

@ -25,6 +25,32 @@ await Bun.write(
) )
console.log("Generated models-snapshot.ts") console.log("Generated models-snapshot.ts")
// Load migrations from migration directories
const migrationDirs = (await fs.promises.readdir(path.join(dir, "migration"), { withFileTypes: true }))
.filter((entry) => entry.isDirectory() && /^\d{4}\d{2}\d{2}\d{2}\d{2}\d{2}/.test(entry.name))
.map((entry) => entry.name)
.sort()
const migrations = await Promise.all(
migrationDirs.map(async (name) => {
const file = path.join(dir, "migration", name, "migration.sql")
const sql = await Bun.file(file).text()
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(name)
const timestamp = match
? Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
: 0
return { sql, timestamp }
}),
)
console.log(`Loaded ${migrations.length} migrations`)
const singleFlag = process.argv.includes("--single") const singleFlag = process.argv.includes("--single")
const baselineFlag = process.argv.includes("--baseline") const baselineFlag = process.argv.includes("--baseline")
const skipInstall = process.argv.includes("--skip-install") const skipInstall = process.argv.includes("--skip-install")
@ -156,6 +182,7 @@ for (const item of targets) {
entrypoints: ["./src/index.ts", parserWorker, workerPath], entrypoints: ["./src/index.ts", parserWorker, workerPath],
define: { define: {
OPENCODE_VERSION: `'${Script.version}'`, OPENCODE_VERSION: `'${Script.version}'`,
OPENCODE_MIGRATIONS: JSON.stringify(migrations),
OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath, OTUI_TREE_SITTER_WORKER_PATH: bunfsRoot + workerRelativePath,
OPENCODE_WORKER_PATH: workerPath, OPENCODE_WORKER_PATH: workerPath,
OPENCODE_CHANNEL: `'${Script.channel}'`, OPENCODE_CHANNEL: `'${Script.channel}'`,

View File

@ -0,0 +1,16 @@
#!/usr/bin/env bun
import { $ } from "bun"
// drizzle-kit check compares schema to migrations, exits non-zero if drift
const result = await $`bun drizzle-kit check`.quiet().nothrow()
if (result.exitCode !== 0) {
console.error("Schema has changes not captured in migrations!")
console.error("Run: bun drizzle-kit generate")
console.error("")
console.error(result.stderr.toString())
process.exit(1)
}
console.log("Migrations are up to date")

View File

@ -435,46 +435,68 @@ export namespace ACP {
return return
} }
} }
return
}
if (part.type === "text") { case "message.part.delta": {
const delta = props.delta const props = event.properties
if (delta && part.ignored !== true) { const session = this.sessionManager.tryGet(props.sessionID)
await this.connection if (!session) return
.sessionUpdate({ const sessionId = session.id
sessionId,
update: { const message = await this.sdk.session
sessionUpdate: "agent_message_chunk", .message(
content: { {
type: "text", sessionID: props.sessionID,
text: delta, messageID: props.messageID,
}, directory: session.cwd,
},
{ throwOnError: true },
)
.then((x) => x.data)
.catch((error) => {
log.error("unexpected error when fetching message", { error })
return undefined
})
if (!message || message.info.role !== "assistant") return
const part = message.parts.find((p) => p.id === props.partID)
if (!part) return
if (part.type === "text" && props.field === "text" && part.ignored !== true) {
await this.connection
.sessionUpdate({
sessionId,
update: {
sessionUpdate: "agent_message_chunk",
content: {
type: "text",
text: props.delta,
}, },
}) },
.catch((error) => { })
log.error("failed to send text to ACP", { error }) .catch((error) => {
}) log.error("failed to send text delta to ACP", { error })
} })
return return
} }
if (part.type === "reasoning") { if (part.type === "reasoning" && props.field === "text") {
const delta = props.delta await this.connection
if (delta) { .sessionUpdate({
await this.connection sessionId,
.sessionUpdate({ update: {
sessionId, sessionUpdate: "agent_thought_chunk",
update: { content: {
sessionUpdate: "agent_thought_chunk", type: "text",
content: { text: props.delta,
type: "text",
text: delta,
},
}, },
}) },
.catch((error) => { })
log.error("failed to send reasoning to ACP", { error }) .catch((error) => {
}) log.error("failed to send reasoning delta to ACP", { error })
} })
} }
return return
} }

View File

@ -3,7 +3,8 @@ import type { Session as SDKSession, Message, Part } from "@opencode-ai/sdk/v2"
import { Session } from "../../session" import { Session } from "../../session"
import { cmd } from "./cmd" import { cmd } from "./cmd"
import { bootstrap } from "../bootstrap" import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage" import { Database } from "../../storage/db"
import { SessionTable, MessageTable, PartTable } from "../../session/session.sql"
import { Instance } from "../../project/instance" import { Instance } from "../../project/instance"
import { ShareNext } from "../../share/share-next" import { ShareNext } from "../../share/share-next"
import { EOL } from "os" import { EOL } from "os"
@ -130,13 +131,35 @@ export const ImportCommand = cmd({
return return
} }
await Storage.write(["session", Instance.project.id, exportData.info.id], exportData.info) Database.use((db) => db.insert(SessionTable).values(Session.toRow(exportData.info)).onConflictDoNothing().run())
for (const msg of exportData.messages) { for (const msg of exportData.messages) {
await Storage.write(["message", exportData.info.id, msg.info.id], msg.info) Database.use((db) =>
db
.insert(MessageTable)
.values({
id: msg.info.id,
session_id: exportData.info.id,
time_created: msg.info.time?.created ?? Date.now(),
data: msg.info,
})
.onConflictDoNothing()
.run(),
)
for (const part of msg.parts) { for (const part of msg.parts) {
await Storage.write(["part", msg.info.id, part.id], part) Database.use((db) =>
db
.insert(PartTable)
.values({
id: part.id,
message_id: msg.info.id,
session_id: exportData.info.id,
data: part,
})
.onConflictDoNothing()
.run(),
)
} }
} }

View File

@ -2,7 +2,8 @@ import type { Argv } from "yargs"
import { cmd } from "./cmd" import { cmd } from "./cmd"
import { Session } from "../../session" import { Session } from "../../session"
import { bootstrap } from "../bootstrap" import { bootstrap } from "../bootstrap"
import { Storage } from "../../storage/storage" import { Database } from "../../storage/db"
import { SessionTable } from "../../session/session.sql"
import { Project } from "../../project/project" import { Project } from "../../project/project"
import { Instance } from "../../project/instance" import { Instance } from "../../project/instance"
@ -87,25 +88,8 @@ async function getCurrentProject(): Promise<Project.Info> {
} }
async function getAllSessions(): Promise<Session.Info[]> { async function getAllSessions(): Promise<Session.Info[]> {
const sessions: Session.Info[] = [] const rows = Database.use((db) => db.select().from(SessionTable).all())
return rows.map((row) => Session.fromRow(row))
const projectKeys = await Storage.list(["project"])
const projects = await Promise.all(projectKeys.map((key) => Storage.read<Project.Info>(key)))
for (const project of projects) {
if (!project) continue
const sessionKeys = await Storage.list(["session", project.id])
const projectSessions = await Promise.all(sessionKeys.map((key) => Storage.read<Session.Info>(key)))
for (const session of projectSessions) {
if (session) {
sessions.push(session)
}
}
}
return sessions
} }
export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> { export async function aggregateSessionStats(days?: number, projectFilter?: string): Promise<SessionStats> {

View File

@ -299,6 +299,24 @@ export const { use: useSync, provider: SyncProvider } = createSimpleContext({
break break
} }
case "message.part.delta": {
const parts = store.part[event.properties.messageID]
if (!parts) break
const result = Binary.search(parts, event.properties.partID, (p) => p.id)
if (!result.found) break
setStore(
"part",
event.properties.messageID,
produce((draft) => {
const part = draft[result.index]
const field = event.properties.field as keyof typeof part
const existing = part[field] as string | undefined
;(part[field] as string) = (existing ?? "") + event.properties.delta
}),
)
break
}
case "message.part.removed": { case "message.part.removed": {
const parts = store.part[event.properties.messageID] const parts = store.part[event.properties.messageID]
const result = Binary.search(parts, event.properties.partID, (p) => p.id) const result = Binary.search(parts, event.properties.partID, (p) => p.id)

View File

@ -2042,8 +2042,8 @@ function ApplyPatch(props: ToolProps<typeof ApplyPatchTool>) {
</For> </For>
</Match> </Match>
<Match when={true}> <Match when={true}>
<InlineTool icon="%" pending="Preparing apply_patch..." complete={false} part={props.part}> <InlineTool icon="%" pending="Preparing patch..." complete={false} part={props.part}>
apply_patch Patch
</InlineTool> </InlineTool>
</Match> </Match>
</Switch> </Switch>

View File

@ -31,6 +31,7 @@ import { Event } from "../server/event"
import { PackageRegistry } from "@/bun/registry" import { PackageRegistry } from "@/bun/registry"
import { proxied } from "@/util/proxied" import { proxied } from "@/util/proxied"
import { iife } from "@/util/iife" import { iife } from "@/util/iife"
import { Control } from "@/control"
export namespace Config { export namespace Config {
const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" }) const ModelId = z.string().meta({ $ref: "https://models.dev/model-schema.json#/$defs/Model" })
@ -53,7 +54,7 @@ export namespace Config {
const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || getManagedConfigDir() const managedConfigDir = process.env.OPENCODE_TEST_MANAGED_CONFIG_DIR || getManagedConfigDir()
// Custom merge function that concatenates array fields instead of replacing them // Custom merge function that concatenates array fields instead of replacing them
function mergeConfigConcatArrays(target: Info, source: Info): Info { function merge(target: Info, source: Info): Info {
const merged = mergeDeep(target, source) const merged = mergeDeep(target, source)
if (target.plugin && source.plugin) { if (target.plugin && source.plugin) {
merged.plugin = Array.from(new Set([...target.plugin, ...source.plugin])) merged.plugin = Array.from(new Set([...target.plugin, ...source.plugin]))
@ -88,20 +89,21 @@ export namespace Config {
const remoteConfig = wellknown.config ?? {} const remoteConfig = wellknown.config ?? {}
// Add $schema to prevent load() from trying to write back to a non-existent file // Add $schema to prevent load() from trying to write back to a non-existent file
if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json" if (!remoteConfig.$schema) remoteConfig.$schema = "https://opencode.ai/config.json"
result = mergeConfigConcatArrays( result = merge(result, await load(JSON.stringify(remoteConfig), `${key}/.well-known/opencode`))
result,
await load(JSON.stringify(remoteConfig), `${key}/.well-known/opencode`),
)
log.debug("loaded remote config from well-known", { url: key }) log.debug("loaded remote config from well-known", { url: key })
} }
} }
const token = await Control.token()
if (token) {
}
// Global user config overrides remote config. // Global user config overrides remote config.
result = mergeConfigConcatArrays(result, await global()) result = merge(result, await global())
// Custom config path overrides global config. // Custom config path overrides global config.
if (Flag.OPENCODE_CONFIG) { if (Flag.OPENCODE_CONFIG) {
result = mergeConfigConcatArrays(result, await loadFile(Flag.OPENCODE_CONFIG)) result = merge(result, await loadFile(Flag.OPENCODE_CONFIG))
log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG }) log.debug("loaded custom config", { path: Flag.OPENCODE_CONFIG })
} }
@ -110,7 +112,7 @@ export namespace Config {
for (const file of ["opencode.jsonc", "opencode.json"]) { for (const file of ["opencode.jsonc", "opencode.json"]) {
const found = await Filesystem.findUp(file, Instance.directory, Instance.worktree) const found = await Filesystem.findUp(file, Instance.directory, Instance.worktree)
for (const resolved of found.toReversed()) { for (const resolved of found.toReversed()) {
result = mergeConfigConcatArrays(result, await loadFile(resolved)) result = merge(result, await loadFile(resolved))
} }
} }
} }
@ -153,7 +155,7 @@ export namespace Config {
if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) { if (dir.endsWith(".opencode") || dir === Flag.OPENCODE_CONFIG_DIR) {
for (const file of ["opencode.jsonc", "opencode.json"]) { for (const file of ["opencode.jsonc", "opencode.json"]) {
log.debug(`loading config from ${path.join(dir, file)}`) log.debug(`loading config from ${path.join(dir, file)}`)
result = mergeConfigConcatArrays(result, await loadFile(path.join(dir, file))) result = merge(result, await loadFile(path.join(dir, file)))
// to satisfy the type checker // to satisfy the type checker
result.agent ??= {} result.agent ??= {}
result.mode ??= {} result.mode ??= {}
@ -176,7 +178,7 @@ export namespace Config {
// Inline config content overrides all non-managed config sources. // Inline config content overrides all non-managed config sources.
if (Flag.OPENCODE_CONFIG_CONTENT) { if (Flag.OPENCODE_CONFIG_CONTENT) {
result = mergeConfigConcatArrays(result, JSON.parse(Flag.OPENCODE_CONFIG_CONTENT)) result = merge(result, JSON.parse(Flag.OPENCODE_CONFIG_CONTENT))
log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT") log.debug("loaded custom config from OPENCODE_CONFIG_CONTENT")
} }
@ -186,7 +188,7 @@ export namespace Config {
// This way it only loads config file and not skills/plugins/commands // This way it only loads config file and not skills/plugins/commands
if (existsSync(managedConfigDir)) { if (existsSync(managedConfigDir)) {
for (const file of ["opencode.jsonc", "opencode.json"]) { for (const file of ["opencode.jsonc", "opencode.json"]) {
result = mergeConfigConcatArrays(result, await loadFile(path.join(managedConfigDir, file))) result = merge(result, await loadFile(path.join(managedConfigDir, file)))
} }
} }

View File

@ -0,0 +1,22 @@
import { sqliteTable, text, integer, primaryKey, uniqueIndex } from "drizzle-orm/sqlite-core"
import { eq } from "drizzle-orm"
import { Timestamps } from "@/storage/schema.sql"
export const ControlAccountTable = sqliteTable(
"control_account",
{
email: text().notNull(),
url: text().notNull(),
access_token: text().notNull(),
refresh_token: text().notNull(),
token_expiry: integer(),
active: integer({ mode: "boolean" })
.notNull()
.$default(() => false),
...Timestamps,
},
(table) => [
primaryKey({ columns: [table.email, table.url] }),
// uniqueIndex("control_account_active_idx").on(table.email).where(eq(table.active, true)),
],
)

View File

@ -0,0 +1,67 @@
import { eq, and } from "drizzle-orm"
import { Database } from "@/storage/db"
import { ControlAccountTable } from "./control.sql"
import z from "zod"
export * from "./control.sql"
export namespace Control {
export const Account = z.object({
email: z.string(),
url: z.string(),
})
export type Account = z.infer<typeof Account>
function fromRow(row: (typeof ControlAccountTable)["$inferSelect"]): Account {
return {
email: row.email,
url: row.url,
}
}
export function account(): Account | undefined {
const row = Database.use((db) =>
db.select().from(ControlAccountTable).where(eq(ControlAccountTable.active, true)).get(),
)
return row ? fromRow(row) : undefined
}
export async function token(): Promise<string | undefined> {
const row = Database.use((db) =>
db.select().from(ControlAccountTable).where(eq(ControlAccountTable.active, true)).get(),
)
if (!row) return undefined
if (row.token_expiry && row.token_expiry > Date.now()) return row.access_token
const res = await fetch(`${row.url}/oauth/token`, {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded" },
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: row.refresh_token,
}).toString(),
})
if (!res.ok) return
const json = (await res.json()) as {
access_token: string
refresh_token?: string
expires_in?: number
}
Database.use((db) =>
db
.update(ControlAccountTable)
.set({
access_token: json.access_token,
refresh_token: json.refresh_token ?? row.refresh_token,
token_expiry: json.expires_in ? Date.now() + json.expires_in * 1000 : undefined,
})
.where(and(eq(ControlAccountTable.email, row.email), eq(ControlAccountTable.url, row.url)))
.run(),
)
return json.access_token
}
}

View File

@ -26,6 +26,10 @@ import { EOL } from "os"
import { WebCommand } from "./cli/cmd/web" import { WebCommand } from "./cli/cmd/web"
import { PrCommand } from "./cli/cmd/pr" import { PrCommand } from "./cli/cmd/pr"
import { SessionCommand } from "./cli/cmd/session" import { SessionCommand } from "./cli/cmd/session"
import path from "path"
import { Global } from "./global"
import { JsonMigration } from "./storage/json-migration"
import { Database } from "./storage/db"
process.on("unhandledRejection", (e) => { process.on("unhandledRejection", (e) => {
Log.Default.error("rejection", { Log.Default.error("rejection", {
@ -74,6 +78,43 @@ const cli = yargs(hideBin(process.argv))
version: Installation.VERSION, version: Installation.VERSION,
args: process.argv.slice(2), args: process.argv.slice(2),
}) })
const marker = path.join(Global.Path.data, "opencode.db")
if (!(await Bun.file(marker).exists())) {
console.log("Performing one time database migration, may take a few minutes...")
const tty = process.stdout.isTTY
const width = 36
const orange = "\x1b[38;5;214m"
const muted = "\x1b[0;2m"
const reset = "\x1b[0m"
let last = -1
if (tty) process.stdout.write("\x1b[?25l")
try {
await JsonMigration.run(Database.Client().$client, {
progress: (event) => {
const percent = Math.floor((event.current / event.total) * 100)
if (percent === last && event.current !== event.total) return
last = percent
if (tty) {
const fill = Math.round((percent / 100) * width)
const bar = `${"■".repeat(fill)}${"・".repeat(width - fill)}`
process.stdout.write(
`\r${orange}${bar} ${percent.toString().padStart(3)}%${reset} ${muted}${event.label.padEnd(12)} ${event.current}/${event.total}${reset}`,
)
if (event.current === event.total) process.stdout.write("\n")
} else {
console.log(`sqlite-migration:${percent}`)
}
},
})
} finally {
if (tty) process.stdout.write("\x1b[?25h")
else {
console.log(`sqlite-migration:done`)
}
}
console.log("Database migration complete.")
}
}) })
.usage("\n" + UI.logo()) .usage("\n" + UI.logo())
.completion("completion", "generate shell completion script") .completion("completion", "generate shell completion script")

View File

@ -3,7 +3,8 @@ import { BusEvent } from "@/bus/bus-event"
import { Config } from "@/config/config" import { Config } from "@/config/config"
import { Identifier } from "@/id/id" import { Identifier } from "@/id/id"
import { Instance } from "@/project/instance" import { Instance } from "@/project/instance"
import { Storage } from "@/storage/storage" import { Database, eq } from "@/storage/db"
import { PermissionTable } from "@/session/session.sql"
import { fn } from "@/util/fn" import { fn } from "@/util/fn"
import { Log } from "@/util/log" import { Log } from "@/util/log"
import { Wildcard } from "@/util/wildcard" import { Wildcard } from "@/util/wildcard"
@ -105,9 +106,12 @@ export namespace PermissionNext {
), ),
} }
const state = Instance.state(async () => { const state = Instance.state(() => {
const projectID = Instance.project.id const projectID = Instance.project.id
const stored = await Storage.read<Ruleset>(["permission", projectID]).catch(() => [] as Ruleset) const row = Database.use((db) =>
db.select().from(PermissionTable).where(eq(PermissionTable.project_id, projectID)).get(),
)
const stored = row?.data ?? ([] as Ruleset)
const pending: Record< const pending: Record<
string, string,
@ -222,7 +226,8 @@ export namespace PermissionNext {
// TODO: we don't save the permission ruleset to disk yet until there's // TODO: we don't save the permission ruleset to disk yet until there's
// UI to manage it // UI to manage it
// await Storage.write(["permission", Instance.project.id], s.approved) // db().insert(PermissionTable).values({ projectID: Instance.project.id, data: s.approved })
// .onConflictDoUpdate({ target: PermissionTable.projectID, set: { data: s.approved } }).run()
return return
} }
}, },
@ -275,6 +280,7 @@ export namespace PermissionNext {
} }
export async function list() { export async function list() {
return state().then((x) => Object.values(x.pending).map((x) => x.info)) const s = await state()
return Object.values(s.pending).map((x) => x.info)
} }
} }

View File

@ -1,5 +1,4 @@
import { Plugin } from "../plugin" import { Plugin } from "../plugin"
import { Share } from "../share/share"
import { Format } from "../format" import { Format } from "../format"
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { FileWatcher } from "../file/watcher" import { FileWatcher } from "../file/watcher"
@ -17,7 +16,6 @@ import { Truncate } from "../tool/truncation"
export async function InstanceBootstrap() { export async function InstanceBootstrap() {
Log.Default.info("bootstrapping", { directory: Instance.directory }) Log.Default.info("bootstrapping", { directory: Instance.directory })
await Plugin.init() await Plugin.init()
Share.init()
ShareNext.init() ShareNext.init()
Format.init() Format.init()
await LSP.init() await LSP.init()

View File

@ -0,0 +1,15 @@
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
import { Timestamps } from "@/storage/schema.sql"
export const ProjectTable = sqliteTable("project", {
id: text().primaryKey(),
worktree: text().notNull(),
vcs: text(),
name: text(),
icon_url: text(),
icon_color: text(),
...Timestamps,
time_initialized: integer(),
sandboxes: text({ mode: "json" }).notNull().$type<string[]>(),
commands: text({ mode: "json" }).$type<{ start?: string }>(),
})

View File

@ -1,11 +1,11 @@
import z from "zod" import z from "zod"
import fs from "fs/promises"
import { Filesystem } from "../util/filesystem" import { Filesystem } from "../util/filesystem"
import path from "path" import path from "path"
import { Storage } from "../storage/storage" import { Database, eq } from "../storage/db"
import { ProjectTable } from "./project.sql"
import { SessionTable } from "../session/session.sql"
import { Log } from "../util/log" import { Log } from "../util/log"
import { Flag } from "@/flag/flag" import { Flag } from "@/flag/flag"
import { Session } from "../session"
import { work } from "../util/queue" import { work } from "../util/queue"
import { fn } from "@opencode-ai/util/fn" import { fn } from "@opencode-ai/util/fn"
import { BusEvent } from "@/bus/bus-event" import { BusEvent } from "@/bus/bus-event"
@ -50,10 +50,33 @@ export namespace Project {
Updated: BusEvent.define("project.updated", Info), Updated: BusEvent.define("project.updated", Info),
} }
type Row = typeof ProjectTable.$inferSelect
export function fromRow(row: Row): Info {
const icon =
row.icon_url || row.icon_color
? { url: row.icon_url ?? undefined, color: row.icon_color ?? undefined }
: undefined
return {
id: row.id,
worktree: row.worktree,
vcs: row.vcs ? Info.shape.vcs.parse(row.vcs) : undefined,
name: row.name ?? undefined,
icon,
time: {
created: row.time_created,
updated: row.time_updated,
initialized: row.time_initialized ?? undefined,
},
sandboxes: row.sandboxes,
commands: row.commands ?? undefined,
}
}
export async function fromDirectory(directory: string) { export async function fromDirectory(directory: string) {
log.info("fromDirectory", { directory }) log.info("fromDirectory", { directory })
const { id, sandbox, worktree, vcs } = await iife(async () => { const data = await iife(async () => {
const matches = Filesystem.up({ targets: [".git"], start: directory }) const matches = Filesystem.up({ targets: [".git"], start: directory })
const dotgit = await matches.next().then((x) => x.value) const dotgit = await matches.next().then((x) => x.value)
await matches.return() await matches.return()
@ -169,47 +192,73 @@ export namespace Project {
} }
}) })
let existing = await Storage.read<Info>(["project", id]).catch(() => undefined) const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, data.id)).get())
if (!existing) { const existing = await iife(async () => {
existing = { if (row) return fromRow(row)
id, const fresh: Info = {
worktree, id: data.id,
vcs: vcs as Info["vcs"], worktree: data.worktree,
vcs: data.vcs as Info["vcs"],
sandboxes: [], sandboxes: [],
time: { time: {
created: Date.now(), created: Date.now(),
updated: Date.now(), updated: Date.now(),
}, },
} }
if (id !== "global") { if (data.id !== "global") {
await migrateFromGlobal(id, worktree) await migrateFromGlobal(data.id, data.worktree)
} }
} return fresh
})
// migrate old projects before sandboxes
if (!existing.sandboxes) existing.sandboxes = []
if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing) if (Flag.OPENCODE_EXPERIMENTAL_ICON_DISCOVERY) discover(existing)
const result: Info = { const result: Info = {
...existing, ...existing,
worktree, worktree: data.worktree,
vcs: vcs as Info["vcs"], vcs: data.vcs as Info["vcs"],
time: { time: {
...existing.time, ...existing.time,
updated: Date.now(), updated: Date.now(),
}, },
} }
if (sandbox !== result.worktree && !result.sandboxes.includes(sandbox)) result.sandboxes.push(sandbox) if (data.sandbox !== result.worktree && !result.sandboxes.includes(data.sandbox))
result.sandboxes.push(data.sandbox)
result.sandboxes = result.sandboxes.filter((x) => existsSync(x)) result.sandboxes = result.sandboxes.filter((x) => existsSync(x))
await Storage.write<Info>(["project", id], result) const insert = {
id: result.id,
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_created: result.time.created,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
}
const updateSet = {
worktree: result.worktree,
vcs: result.vcs ?? null,
name: result.name,
icon_url: result.icon?.url,
icon_color: result.icon?.color,
time_updated: result.time.updated,
time_initialized: result.time.initialized,
sandboxes: result.sandboxes,
commands: result.commands,
}
Database.use((db) =>
db.insert(ProjectTable).values(insert).onConflictDoUpdate({ target: ProjectTable.id, set: updateSet }).run(),
)
GlobalBus.emit("event", { GlobalBus.emit("event", {
payload: { payload: {
type: Event.Updated.type, type: Event.Updated.type,
properties: result, properties: result,
}, },
}) })
return { project: result, sandbox } return { project: result, sandbox: data.sandbox }
} }
export async function discover(input: Info) { export async function discover(input: Info) {
@ -242,43 +291,54 @@ export namespace Project {
return return
} }
async function migrateFromGlobal(newProjectID: string, worktree: string) { async function migrateFromGlobal(id: string, worktree: string) {
const globalProject = await Storage.read<Info>(["project", "global"]).catch(() => undefined) const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, "global")).get())
if (!globalProject) return if (!row) return
const globalSessions = await Storage.list(["session", "global"]).catch(() => []) const sessions = Database.use((db) =>
if (globalSessions.length === 0) return db.select().from(SessionTable).where(eq(SessionTable.project_id, "global")).all(),
)
if (sessions.length === 0) return
log.info("migrating sessions from global", { newProjectID, worktree, count: globalSessions.length }) log.info("migrating sessions from global", { newProjectID: id, worktree, count: sessions.length })
await work(10, globalSessions, async (key) => { await work(10, sessions, async (row) => {
const sessionID = key[key.length - 1] // Skip sessions that belong to a different directory
const session = await Storage.read<Session.Info>(key).catch(() => undefined) if (row.directory && row.directory !== worktree) return
if (!session) return
if (session.directory && session.directory !== worktree) return
session.projectID = newProjectID log.info("migrating session", { sessionID: row.id, from: "global", to: id })
log.info("migrating session", { sessionID, from: "global", to: newProjectID }) Database.use((db) => db.update(SessionTable).set({ project_id: id }).where(eq(SessionTable.id, row.id)).run())
await Storage.write(["session", newProjectID, sessionID], session)
await Storage.remove(key)
}).catch((error) => { }).catch((error) => {
log.error("failed to migrate sessions from global to project", { error, projectId: newProjectID }) log.error("failed to migrate sessions from global to project", { error, projectId: id })
}) })
} }
export async function setInitialized(projectID: string) { export function setInitialized(id: string) {
await Storage.update<Info>(["project", projectID], (draft) => { Database.use((db) =>
draft.time.initialized = Date.now() db
}) .update(ProjectTable)
.set({
time_initialized: Date.now(),
})
.where(eq(ProjectTable.id, id))
.run(),
)
} }
export async function list() { export function list() {
const keys = await Storage.list(["project"]) return Database.use((db) =>
const projects = await Promise.all(keys.map((x) => Storage.read<Info>(x))) db
return projects.map((project) => ({ .select()
...project, .from(ProjectTable)
sandboxes: project.sandboxes?.filter((x) => existsSync(x)), .all()
})) .map((row) => fromRow(row)),
)
}
export function get(id: string): Info | undefined {
const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!row) return undefined
return fromRow(row)
} }
export const update = fn( export const update = fn(
@ -289,77 +349,90 @@ export namespace Project {
commands: Info.shape.commands.optional(), commands: Info.shape.commands.optional(),
}), }),
async (input) => { async (input) => {
const result = await Storage.update<Info>(["project", input.projectID], (draft) => { const result = Database.use((db) =>
if (input.name !== undefined) draft.name = input.name db
if (input.icon !== undefined) { .update(ProjectTable)
draft.icon = { .set({
...draft.icon, name: input.name,
} icon_url: input.icon?.url,
if (input.icon.url !== undefined) draft.icon.url = input.icon.url icon_color: input.icon?.color,
if (input.icon.override !== undefined) draft.icon.override = input.icon.override || undefined commands: input.commands,
if (input.icon.color !== undefined) draft.icon.color = input.icon.color time_updated: Date.now(),
} })
.where(eq(ProjectTable.id, input.projectID))
if (input.commands?.start !== undefined) { .returning()
const start = input.commands.start || undefined .get(),
draft.commands = { )
...(draft.commands ?? {}), if (!result) throw new Error(`Project not found: ${input.projectID}`)
} const data = fromRow(result)
draft.commands.start = start
if (!draft.commands.start) draft.commands = undefined
}
draft.time.updated = Date.now()
})
GlobalBus.emit("event", { GlobalBus.emit("event", {
payload: { payload: {
type: Event.Updated.type, type: Event.Updated.type,
properties: result, properties: data,
}, },
}) })
return result return data
}, },
) )
export async function sandboxes(projectID: string) { export async function sandboxes(id: string) {
const project = await Storage.read<Info>(["project", projectID]).catch(() => undefined) const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
if (!project?.sandboxes) return [] if (!row) return []
const data = fromRow(row)
const valid: string[] = [] const valid: string[] = []
for (const dir of project.sandboxes) { for (const dir of data.sandboxes) {
const stat = await fs.stat(dir).catch(() => undefined) const stat = await Bun.file(dir)
.stat()
.catch(() => undefined)
if (stat?.isDirectory()) valid.push(dir) if (stat?.isDirectory()) valid.push(dir)
} }
return valid return valid
} }
export async function addSandbox(projectID: string, directory: string) { export async function addSandbox(id: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
const sandboxes = draft.sandboxes ?? [] if (!row) throw new Error(`Project not found: ${id}`)
if (!sandboxes.includes(directory)) sandboxes.push(directory) const sandboxes = [...row.sandboxes]
draft.sandboxes = sandboxes if (!sandboxes.includes(directory)) sandboxes.push(directory)
draft.time.updated = Date.now() const result = Database.use((db) =>
}) db
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
const data = fromRow(result)
GlobalBus.emit("event", { GlobalBus.emit("event", {
payload: { payload: {
type: Event.Updated.type, type: Event.Updated.type,
properties: result, properties: data,
}, },
}) })
return result return data
} }
export async function removeSandbox(projectID: string, directory: string) { export async function removeSandbox(id: string, directory: string) {
const result = await Storage.update<Info>(["project", projectID], (draft) => { const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, id)).get())
const sandboxes = draft.sandboxes ?? [] if (!row) throw new Error(`Project not found: ${id}`)
draft.sandboxes = sandboxes.filter((sandbox) => sandbox !== directory) const sandboxes = row.sandboxes.filter((s) => s !== directory)
draft.time.updated = Date.now() const result = Database.use((db) =>
}) db
.update(ProjectTable)
.set({ sandboxes, time_updated: Date.now() })
.where(eq(ProjectTable.id, id))
.returning()
.get(),
)
if (!result) throw new Error(`Project not found: ${id}`)
const data = fromRow(result)
GlobalBus.emit("event", { GlobalBus.emit("event", {
payload: { payload: {
type: Event.Updated.type, type: Event.Updated.type,
properties: result, properties: data,
}, },
}) })
return result return data
} }
} }

View File

@ -1,6 +1,6 @@
import { resolver } from "hono-openapi" import { resolver } from "hono-openapi"
import z from "zod" import z from "zod"
import { Storage } from "../storage/storage" import { NotFoundError } from "../storage/db"
export const ERRORS = { export const ERRORS = {
400: { 400: {
@ -25,7 +25,7 @@ export const ERRORS = {
description: "Not found", description: "Not found",
content: { content: {
"application/json": { "application/json": {
schema: resolver(Storage.NotFoundError.Schema), schema: resolver(NotFoundError.Schema),
}, },
}, },
}, },

View File

@ -3,7 +3,7 @@ import { describeRoute, validator, resolver } from "hono-openapi"
import { upgradeWebSocket } from "hono/bun" import { upgradeWebSocket } from "hono/bun"
import z from "zod" import z from "zod"
import { Pty } from "@/pty" import { Pty } from "@/pty"
import { Storage } from "../../storage/storage" import { NotFoundError } from "../../storage/db"
import { errors } from "../error" import { errors } from "../error"
import { lazy } from "../../util/lazy" import { lazy } from "../../util/lazy"
@ -76,7 +76,7 @@ export const PtyRoutes = lazy(() =>
async (c) => { async (c) => {
const info = Pty.get(c.req.valid("param").ptyID) const info = Pty.get(c.req.valid("param").ptyID)
if (!info) { if (!info) {
throw new Storage.NotFoundError({ message: "Session not found" }) throw new NotFoundError({ message: "Session not found" })
} }
return c.json(info) return c.json(info)
}, },

View File

@ -276,18 +276,15 @@ export const SessionRoutes = lazy(() =>
const sessionID = c.req.valid("param").sessionID const sessionID = c.req.valid("param").sessionID
const updates = c.req.valid("json") const updates = c.req.valid("json")
const updatedSession = await Session.update( let session = await Session.get(sessionID)
sessionID, if (updates.title !== undefined) {
(session) => { session = await Session.setTitle({ sessionID, title: updates.title })
if (updates.title !== undefined) { }
session.title = updates.title if (updates.time?.archived !== undefined) {
} session = await Session.setArchived({ sessionID, time: updates.time.archived })
if (updates.time?.archived !== undefined) session.time.archived = updates.time.archived }
},
{ touch: false },
)
return c.json(updatedSession) return c.json(session)
}, },
) )
.post( .post(

View File

@ -31,7 +31,7 @@ import { ExperimentalRoutes } from "./routes/experimental"
import { ProviderRoutes } from "./routes/provider" import { ProviderRoutes } from "./routes/provider"
import { lazy } from "../util/lazy" import { lazy } from "../util/lazy"
import { InstanceBootstrap } from "../project/bootstrap" import { InstanceBootstrap } from "../project/bootstrap"
import { Storage } from "../storage/storage" import { NotFoundError } from "../storage/db"
import type { ContentfulStatusCode } from "hono/utils/http-status" import type { ContentfulStatusCode } from "hono/utils/http-status"
import { websocket } from "hono/bun" import { websocket } from "hono/bun"
import { HTTPException } from "hono/http-exception" import { HTTPException } from "hono/http-exception"
@ -65,7 +65,7 @@ export namespace Server {
}) })
if (err instanceof NamedError) { if (err instanceof NamedError) {
let status: ContentfulStatusCode let status: ContentfulStatusCode
if (err instanceof Storage.NotFoundError) status = 404 if (err instanceof NotFoundError) status = 404
else if (err instanceof Provider.ModelNotFoundError) status = 400 else if (err instanceof Provider.ModelNotFoundError) status = 400
else if (err.name.startsWith("Worktree")) status = 400 else if (err.name.startsWith("Worktree")) status = 400
else status = 500 else status = 500

View File

@ -10,7 +10,9 @@ import { Flag } from "../flag/flag"
import { Identifier } from "../id/id" import { Identifier } from "../id/id"
import { Installation } from "../installation" import { Installation } from "../installation"
import { Storage } from "../storage/storage" import { Database, NotFoundError, eq, and, or, like } from "../storage/db"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage"
import { Log } from "../util/log" import { Log } from "../util/log"
import { MessageV2 } from "./message-v2" import { MessageV2 } from "./message-v2"
import { Instance } from "../project/instance" import { Instance } from "../project/instance"
@ -41,6 +43,64 @@ export namespace Session {
).test(title) ).test(title)
} }
type SessionRow = typeof SessionTable.$inferSelect
export function fromRow(row: SessionRow): Info {
const summary =
row.summary_additions !== null || row.summary_deletions !== null || row.summary_files !== null
? {
additions: row.summary_additions ?? 0,
deletions: row.summary_deletions ?? 0,
files: row.summary_files ?? 0,
diffs: row.summary_diffs ?? undefined,
}
: undefined
const share = row.share_url ? { url: row.share_url } : undefined
const revert = row.revert ?? undefined
return {
id: row.id,
slug: row.slug,
projectID: row.project_id,
directory: row.directory,
parentID: row.parent_id ?? undefined,
title: row.title,
version: row.version,
summary,
share,
revert,
permission: row.permission ?? undefined,
time: {
created: row.time_created,
updated: row.time_updated,
compacting: row.time_compacting ?? undefined,
archived: row.time_archived ?? undefined,
},
}
}
export function toRow(info: Info) {
return {
id: info.id,
project_id: info.projectID,
parent_id: info.parentID,
slug: info.slug,
directory: info.directory,
title: info.title,
version: info.version,
share_url: info.share?.url,
summary_additions: info.summary?.additions,
summary_deletions: info.summary?.deletions,
summary_files: info.summary?.files,
summary_diffs: info.summary?.diffs,
revert: info.revert ?? null,
permission: info.permission,
time_created: info.time.created,
time_updated: info.time.updated,
time_compacting: info.time.compacting,
time_archived: info.time.archived,
}
}
function getForkedTitle(title: string): string { function getForkedTitle(title: string): string {
const match = title.match(/^(.+) \(fork #(\d+)\)$/) const match = title.match(/^(.+) \(fork #(\d+)\)$/)
if (match) { if (match) {
@ -94,16 +154,6 @@ export namespace Session {
}) })
export type Info = z.output<typeof Info> export type Info = z.output<typeof Info>
export const ShareInfo = z
.object({
secret: z.string(),
url: z.string(),
})
.meta({
ref: "SessionShare",
})
export type ShareInfo = z.output<typeof ShareInfo>
export const Event = { export const Event = {
Created: BusEvent.define( Created: BusEvent.define(
"session.created", "session.created",
@ -200,8 +250,17 @@ export namespace Session {
) )
export const touch = fn(Identifier.schema("session"), async (sessionID) => { export const touch = fn(Identifier.schema("session"), async (sessionID) => {
await update(sessionID, (draft) => { const now = Date.now()
draft.time.updated = Date.now() Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_updated: now })
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
}) })
}) })
@ -227,21 +286,19 @@ export namespace Session {
}, },
} }
log.info("created", result) log.info("created", result)
await Storage.write(["session", Instance.project.id, result.id], result) Database.use((db) => {
Bus.publish(Event.Created, { db.insert(SessionTable).values(toRow(result)).run()
info: result, Database.effect(() =>
Bus.publish(Event.Created, {
info: result,
}),
)
}) })
const cfg = await Config.get() const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto")) if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto"))
share(result.id) share(result.id).catch(() => {
.then((share) => { // Silently ignore sharing errors during session creation
update(result.id, (draft) => { })
draft.share = share
})
})
.catch(() => {
// Silently ignore sharing errors during session creation
})
Bus.publish(Event.Updated, { Bus.publish(Event.Updated, {
info: result, info: result,
}) })
@ -256,12 +313,9 @@ export namespace Session {
} }
export const get = fn(Identifier.schema("session"), async (id) => { export const get = fn(Identifier.schema("session"), async (id) => {
const read = await Storage.read<Info>(["session", Instance.project.id, id]) const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
return read as Info if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
}) return fromRow(row)
export const getShare = fn(Identifier.schema("session"), async (id) => {
return Storage.read<ShareInfo>(["share", id])
}) })
export const share = fn(Identifier.schema("session"), async (id) => { export const share = fn(Identifier.schema("session"), async (id) => {
@ -271,15 +325,12 @@ export namespace Session {
} }
const { ShareNext } = await import("@/share/share-next") const { ShareNext } = await import("@/share/share-next")
const share = await ShareNext.create(id) const share = await ShareNext.create(id)
await update( Database.use((db) => {
id, const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get()
(draft) => { if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
draft.share = { const info = fromRow(row)
url: share.url, Database.effect(() => Bus.publish(Event.Updated, { info }))
} })
},
{ touch: false },
)
return share return share
}) })
@ -287,32 +338,155 @@ export namespace Session {
// Use ShareNext to remove the share (same as share function uses ShareNext to create) // Use ShareNext to remove the share (same as share function uses ShareNext to create)
const { ShareNext } = await import("@/share/share-next") const { ShareNext } = await import("@/share/share-next")
await ShareNext.remove(id) await ShareNext.remove(id)
await update( Database.use((db) => {
id, const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get()
(draft) => { if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
draft.share = undefined const info = fromRow(row)
}, Database.effect(() => Bus.publish(Event.Updated, { info }))
{ touch: false }, })
)
}) })
export async function update(id: string, editor: (session: Info) => void, options?: { touch?: boolean }) { export const setTitle = fn(
const project = Instance.project z.object({
const result = await Storage.update<Info>(["session", project.id, id], (draft) => { sessionID: Identifier.schema("session"),
editor(draft) title: z.string(),
if (options?.touch !== false) { }),
draft.time.updated = Date.now() async (input) => {
} return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ title: input.title })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setArchived = fn(
z.object({
sessionID: Identifier.schema("session"),
time: z.number().optional(),
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ time_archived: input.time })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setPermission = fn(
z.object({
sessionID: Identifier.schema("session"),
permission: PermissionNext.Ruleset,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({ permission: input.permission, time_updated: Date.now() })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const setRevert = fn(
z.object({
sessionID: Identifier.schema("session"),
revert: Info.shape.revert,
summary: Info.shape.summary,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
revert: input.revert ?? null,
summary_additions: input.summary?.additions,
summary_deletions: input.summary?.deletions,
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const clearRevert = fn(Identifier.schema("session"), async (sessionID) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
revert: null,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
}) })
Bus.publish(Event.Updated, { })
info: result,
}) export const setSummary = fn(
return result z.object({
} sessionID: Identifier.schema("session"),
summary: Info.shape.summary,
}),
async (input) => {
return Database.use((db) => {
const row = db
.update(SessionTable)
.set({
summary_additions: input.summary?.additions,
summary_deletions: input.summary?.deletions,
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
},
)
export const diff = fn(Identifier.schema("session"), async (sessionID) => { export const diff = fn(Identifier.schema("session"), async (sessionID) => {
const diffs = await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID]) try {
return diffs ?? [] return await Storage.read<Snapshot.FileDiff[]>(["session_diff", sessionID])
} catch {
return []
}
}) })
export const messages = fn( export const messages = fn(
@ -331,25 +505,37 @@ export namespace Session {
}, },
) )
export async function* list() { export function* list() {
const project = Instance.project const project = Instance.project
for (const item of await Storage.list(["session", project.id])) { const rel = path.relative(Instance.worktree, Instance.directory)
const session = await Storage.read<Info>(item).catch(() => undefined) const suffix = path.sep + rel
if (!session) continue const rows = Database.use((db) =>
yield session db
.select()
.from(SessionTable)
.where(
and(
eq(SessionTable.project_id, project.id),
or(eq(SessionTable.directory, Instance.directory), like(SessionTable.directory, `%${suffix}`)),
),
)
.all(),
)
for (const row of rows) {
yield fromRow(row)
} }
} }
export const children = fn(Identifier.schema("session"), async (parentID) => { export const children = fn(Identifier.schema("session"), async (parentID) => {
const project = Instance.project const project = Instance.project
const result = [] as Session.Info[] const rows = Database.use((db) =>
for (const item of await Storage.list(["session", project.id])) { db
const session = await Storage.read<Info>(item).catch(() => undefined) .select()
if (!session) continue .from(SessionTable)
if (session.parentID !== parentID) continue .where(and(eq(SessionTable.project_id, project.id), eq(SessionTable.parent_id, parentID)))
result.push(session) .all(),
} )
return result return rows.map(fromRow)
}) })
export const remove = fn(Identifier.schema("session"), async (sessionID) => { export const remove = fn(Identifier.schema("session"), async (sessionID) => {
@ -360,15 +546,14 @@ export namespace Session {
await remove(child.id) await remove(child.id)
} }
await unshare(sessionID).catch(() => {}) await unshare(sessionID).catch(() => {})
for (const msg of await Storage.list(["message", sessionID])) { // CASCADE delete handles messages and parts automatically
for (const part of await Storage.list(["part", msg.at(-1)!])) { Database.use((db) => {
await Storage.remove(part) db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
} Database.effect(() =>
await Storage.remove(msg) Bus.publish(Event.Deleted, {
} info: session,
await Storage.remove(["session", project.id, sessionID]) }),
Bus.publish(Event.Deleted, { )
info: session,
}) })
} catch (e) { } catch (e) {
log.error(e) log.error(e)
@ -376,9 +561,23 @@ export namespace Session {
}) })
export const updateMessage = fn(MessageV2.Info, async (msg) => { export const updateMessage = fn(MessageV2.Info, async (msg) => {
await Storage.write(["message", msg.sessionID, msg.id], msg) const time_created = msg.role === "user" ? msg.time.created : msg.time.created
Bus.publish(MessageV2.Event.Updated, { const { id, sessionID, ...data } = msg
info: msg, Database.use((db) => {
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Updated, {
info: msg,
}),
)
}) })
return msg return msg
}) })
@ -389,10 +588,15 @@ export namespace Session {
messageID: Identifier.schema("message"), messageID: Identifier.schema("message"),
}), }),
async (input) => { async (input) => {
await Storage.remove(["message", input.sessionID, input.messageID]) // CASCADE delete handles parts automatically
Bus.publish(MessageV2.Event.Removed, { Database.use((db) => {
sessionID: input.sessionID, db.delete(MessageTable).where(eq(MessageTable.id, input.messageID)).run()
messageID: input.messageID, Database.effect(() =>
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
}),
)
}) })
return input.messageID return input.messageID
}, },
@ -405,39 +609,58 @@ export namespace Session {
partID: Identifier.schema("part"), partID: Identifier.schema("part"),
}), }),
async (input) => { async (input) => {
await Storage.remove(["part", input.messageID, input.partID]) Database.use((db) => {
Bus.publish(MessageV2.Event.PartRemoved, { db.delete(PartTable).where(eq(PartTable.id, input.partID)).run()
sessionID: input.sessionID, Database.effect(() =>
messageID: input.messageID, Bus.publish(MessageV2.Event.PartRemoved, {
partID: input.partID, sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
}),
)
}) })
return input.partID return input.partID
}, },
) )
const UpdatePartInput = z.union([ const UpdatePartInput = MessageV2.Part
MessageV2.Part,
z.object({
part: MessageV2.TextPart,
delta: z.string(),
}),
z.object({
part: MessageV2.ReasoningPart,
delta: z.string(),
}),
])
export const updatePart = fn(UpdatePartInput, async (input) => { export const updatePart = fn(UpdatePartInput, async (part) => {
const part = "delta" in input ? input.part : input const { id, messageID, sessionID, ...data } = part
const delta = "delta" in input ? input.delta : undefined const time = Date.now()
await Storage.write(["part", part.messageID, part.id], part) Database.use((db) => {
Bus.publish(MessageV2.Event.PartUpdated, { db.insert(PartTable)
part, .values({
delta, id,
message_id: messageID,
session_id: sessionID,
time_created: time,
data,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartUpdated, {
part,
}),
)
}) })
return part return part
}) })
export const updatePartDelta = fn(
z.object({
sessionID: z.string(),
messageID: z.string(),
partID: z.string(),
field: z.string(),
delta: z.string(),
}),
async (input) => {
Bus.publish(MessageV2.Event.PartDelta, input)
},
)
export const getUsage = fn( export const getUsage = fn(
z.object({ z.object({
model: z.custom<Provider.Model>(), model: z.custom<Provider.Model>(),

View File

@ -6,6 +6,10 @@ import { Identifier } from "../id/id"
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot" import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn" import { fn } from "@/util/fn"
import { Database, eq, desc, inArray } from "@/storage/db"
import { MessageTable, PartTable } from "./session.sql"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
import { Storage } from "@/storage/storage" import { Storage } from "@/storage/storage"
import { ProviderError } from "@/provider/error" import { ProviderError } from "@/provider/error"
import { iife } from "@/util/iife" import { iife } from "@/util/iife"
@ -456,7 +460,16 @@ export namespace MessageV2 {
"message.part.updated", "message.part.updated",
z.object({ z.object({
part: Part, part: Part,
delta: z.string().optional(), }),
),
PartDelta: BusEvent.define(
"message.part.delta",
z.object({
sessionID: z.string(),
messageID: z.string(),
partID: z.string(),
field: z.string(),
delta: z.string(),
}), }),
), ),
PartRemoved: BusEvent.define( PartRemoved: BusEvent.define(
@ -701,23 +714,65 @@ export namespace MessageV2 {
} }
export const stream = fn(Identifier.schema("session"), async function* (sessionID) { export const stream = fn(Identifier.schema("session"), async function* (sessionID) {
const list = await Array.fromAsync(await Storage.list(["message", sessionID])) const size = 50
for (let i = list.length - 1; i >= 0; i--) { let offset = 0
yield await get({ while (true) {
sessionID, const rows = Database.use((db) =>
messageID: list[i][2], db
}) .select()
.from(MessageTable)
.where(eq(MessageTable.session_id, sessionID))
.orderBy(desc(MessageTable.time_created))
.limit(size)
.offset(offset)
.all(),
)
if (rows.length === 0) break
const ids = rows.map((row) => row.id)
const partsByMessage = new Map<string, MessageV2.Part[]>()
if (ids.length > 0) {
const partRows = Database.use((db) =>
db
.select()
.from(PartTable)
.where(inArray(PartTable.message_id, ids))
.orderBy(PartTable.message_id, PartTable.id)
.all(),
)
for (const row of partRows) {
const part = {
...row.data,
id: row.id,
sessionID: row.session_id,
messageID: row.message_id,
} as MessageV2.Part
const list = partsByMessage.get(row.message_id)
if (list) list.push(part)
else partsByMessage.set(row.message_id, [part])
}
}
for (const row of rows) {
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
yield {
info,
parts: partsByMessage.get(row.id) ?? [],
}
}
offset += rows.length
if (rows.length < size) break
} }
}) })
export const parts = fn(Identifier.schema("message"), async (messageID) => { export const parts = fn(Identifier.schema("message"), async (message_id) => {
const result = [] as MessageV2.Part[] const rows = Database.use((db) =>
for (const item of await Storage.list(["part", messageID])) { db.select().from(PartTable).where(eq(PartTable.message_id, message_id)).orderBy(PartTable.id).all(),
const read = await Storage.read<MessageV2.Part>(item) )
result.push(read) return rows.map(
} (row) => ({ ...row.data, id: row.id, sessionID: row.session_id, messageID: row.message_id }) as MessageV2.Part,
result.sort((a, b) => (a.id > b.id ? 1 : -1)) )
return result
}) })
export const get = fn( export const get = fn(
@ -726,8 +781,11 @@ export namespace MessageV2 {
messageID: Identifier.schema("message"), messageID: Identifier.schema("message"),
}), }),
async (input): Promise<WithParts> => { async (input): Promise<WithParts> => {
const row = Database.use((db) => db.select().from(MessageTable).where(eq(MessageTable.id, input.messageID)).get())
if (!row) throw new Error(`Message not found: ${input.messageID}`)
const info = { ...row.data, id: row.id, sessionID: row.session_id } as MessageV2.Info
return { return {
info: await Storage.read<MessageV2.Info>(["message", input.sessionID, input.messageID]), info,
parts: await parts(input.messageID), parts: await parts(input.messageID),
} }
}, },

View File

@ -63,17 +63,19 @@ export namespace SessionProcessor {
if (value.id in reasoningMap) { if (value.id in reasoningMap) {
continue continue
} }
reasoningMap[value.id] = { const reasoningPart = {
id: Identifier.ascending("part"), id: Identifier.ascending("part"),
messageID: input.assistantMessage.id, messageID: input.assistantMessage.id,
sessionID: input.assistantMessage.sessionID, sessionID: input.assistantMessage.sessionID,
type: "reasoning", type: "reasoning" as const,
text: "", text: "",
time: { time: {
start: Date.now(), start: Date.now(),
}, },
metadata: value.providerMetadata, metadata: value.providerMetadata,
} }
reasoningMap[value.id] = reasoningPart
await Session.updatePart(reasoningPart)
break break
case "reasoning-delta": case "reasoning-delta":
@ -81,7 +83,13 @@ export namespace SessionProcessor {
const part = reasoningMap[value.id] const part = reasoningMap[value.id]
part.text += value.text part.text += value.text
if (value.providerMetadata) part.metadata = value.providerMetadata if (value.providerMetadata) part.metadata = value.providerMetadata
if (part.text) await Session.updatePart({ part, delta: value.text }) await Session.updatePartDelta({
sessionID: part.sessionID,
messageID: part.messageID,
partID: part.id,
field: "text",
delta: value.text,
})
} }
break break
@ -288,17 +296,20 @@ export namespace SessionProcessor {
}, },
metadata: value.providerMetadata, metadata: value.providerMetadata,
} }
await Session.updatePart(currentText)
break break
case "text-delta": case "text-delta":
if (currentText) { if (currentText) {
currentText.text += value.text currentText.text += value.text
if (value.providerMetadata) currentText.metadata = value.providerMetadata if (value.providerMetadata) currentText.metadata = value.providerMetadata
if (currentText.text) await Session.updatePartDelta({
await Session.updatePart({ sessionID: currentText.sessionID,
part: currentText, messageID: currentText.messageID,
delta: value.text, partID: currentText.id,
}) field: "text",
delta: value.text,
})
} }
break break

View File

@ -174,9 +174,7 @@ export namespace SessionPrompt {
} }
if (permissions.length > 0) { if (permissions.length > 0) {
session.permission = permissions session.permission = permissions
await Session.update(session.id, (draft) => { await Session.setPermission({ sessionID: session.id, permission: permissions })
draft.permission = permissions
})
} }
if (input.noReply === true) { if (input.noReply === true) {
@ -1946,21 +1944,16 @@ NOTE: At any point in time through this workflow you should feel free to ask the
], ],
}) })
const text = await result.text.catch((err) => log.error("failed to generate title", { error: err })) const text = await result.text.catch((err) => log.error("failed to generate title", { error: err }))
if (text) if (text) {
return Session.update( const cleaned = text
input.session.id, .replace(/<think>[\s\S]*?<\/think>\s*/g, "")
(draft) => { .split("\n")
const cleaned = text .map((line) => line.trim())
.replace(/<think>[\s\S]*?<\/think>\s*/g, "") .find((line) => line.length > 0)
.split("\n") if (!cleaned) return
.map((line) => line.trim())
.find((line) => line.length > 0)
if (!cleaned) return
const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned const title = cleaned.length > 100 ? cleaned.substring(0, 97) + "..." : cleaned
draft.title = title return Session.setTitle({ sessionID: input.session.id, title })
}, }
{ touch: false },
)
} }
} }

View File

@ -4,8 +4,9 @@ import { Snapshot } from "../snapshot"
import { MessageV2 } from "./message-v2" import { MessageV2 } from "./message-v2"
import { Session } from "." import { Session } from "."
import { Log } from "../util/log" import { Log } from "../util/log"
import { splitWhen } from "remeda" import { Database, eq } from "../storage/db"
import { Storage } from "../storage/storage" import { MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage"
import { Bus } from "../bus" import { Bus } from "../bus"
import { SessionPrompt } from "./prompt" import { SessionPrompt } from "./prompt"
import { SessionSummary } from "./summary" import { SessionSummary } from "./summary"
@ -65,13 +66,14 @@ export namespace SessionRevert {
sessionID: input.sessionID, sessionID: input.sessionID,
diff: diffs, diff: diffs,
}) })
return Session.update(input.sessionID, (draft) => { return Session.setRevert({
draft.revert = revert sessionID: input.sessionID,
draft.summary = { revert,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0), additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length, files: diffs.length,
} },
}) })
} }
return session return session
@ -83,39 +85,54 @@ export namespace SessionRevert {
const session = await Session.get(input.sessionID) const session = await Session.get(input.sessionID)
if (!session.revert) return session if (!session.revert) return session
if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot) if (session.revert.snapshot) await Snapshot.restore(session.revert.snapshot)
const next = await Session.update(input.sessionID, (draft) => { return Session.clearRevert(input.sessionID)
draft.revert = undefined
})
return next
} }
export async function cleanup(session: Session.Info) { export async function cleanup(session: Session.Info) {
if (!session.revert) return if (!session.revert) return
const sessionID = session.id const sessionID = session.id
let msgs = await Session.messages({ sessionID }) const msgs = await Session.messages({ sessionID })
const messageID = session.revert.messageID const messageID = session.revert.messageID
const [preserve, remove] = splitWhen(msgs, (x) => x.info.id === messageID) const preserve = [] as MessageV2.WithParts[]
msgs = preserve const remove = [] as MessageV2.WithParts[]
let target: MessageV2.WithParts | undefined
for (const msg of msgs) {
if (msg.info.id < messageID) {
preserve.push(msg)
continue
}
if (msg.info.id > messageID) {
remove.push(msg)
continue
}
if (session.revert.partID) {
preserve.push(msg)
target = msg
continue
}
remove.push(msg)
}
for (const msg of remove) { for (const msg of remove) {
await Storage.remove(["message", sessionID, msg.info.id]) Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run())
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id }) await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id })
} }
const last = preserve.at(-1) if (session.revert.partID && target) {
if (session.revert.partID && last) {
const partID = session.revert.partID const partID = session.revert.partID
const [preserveParts, removeParts] = splitWhen(last.parts, (x) => x.id === partID) const removeStart = target.parts.findIndex((part) => part.id === partID)
last.parts = preserveParts if (removeStart >= 0) {
for (const part of removeParts) { const preserveParts = target.parts.slice(0, removeStart)
await Storage.remove(["part", last.info.id, part.id]) const removeParts = target.parts.slice(removeStart)
await Bus.publish(MessageV2.Event.PartRemoved, { target.parts = preserveParts
sessionID: sessionID, for (const part of removeParts) {
messageID: last.info.id, Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run())
partID: part.id, await Bus.publish(MessageV2.Event.PartRemoved, {
}) sessionID: sessionID,
messageID: target.info.id,
partID: part.id,
})
}
} }
} }
await Session.update(sessionID, (draft) => { await Session.clearRevert(sessionID)
draft.revert = undefined
})
} }
} }

View File

@ -0,0 +1,88 @@
import { sqliteTable, text, integer, index, primaryKey } from "drizzle-orm/sqlite-core"
import { ProjectTable } from "../project/project.sql"
import type { MessageV2 } from "./message-v2"
import type { Snapshot } from "@/snapshot"
import type { PermissionNext } from "@/permission/next"
import { Timestamps } from "@/storage/schema.sql"
type PartData = Omit<MessageV2.Part, "id" | "sessionID" | "messageID">
type InfoData = Omit<MessageV2.Info, "id" | "sessionID">
export const SessionTable = sqliteTable(
"session",
{
id: text().primaryKey(),
project_id: text()
.notNull()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
parent_id: text(),
slug: text().notNull(),
directory: text().notNull(),
title: text().notNull(),
version: text().notNull(),
share_url: text(),
summary_additions: integer(),
summary_deletions: integer(),
summary_files: integer(),
summary_diffs: text({ mode: "json" }).$type<Snapshot.FileDiff[]>(),
revert: text({ mode: "json" }).$type<{ messageID: string; partID?: string; snapshot?: string; diff?: string }>(),
permission: text({ mode: "json" }).$type<PermissionNext.Ruleset>(),
...Timestamps,
time_compacting: integer(),
time_archived: integer(),
},
(table) => [index("session_project_idx").on(table.project_id), index("session_parent_idx").on(table.parent_id)],
)
export const MessageTable = sqliteTable(
"message",
{
id: text().primaryKey(),
session_id: text()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<InfoData>(),
},
(table) => [index("message_session_idx").on(table.session_id)],
)
export const PartTable = sqliteTable(
"part",
{
id: text().primaryKey(),
message_id: text()
.notNull()
.references(() => MessageTable.id, { onDelete: "cascade" }),
session_id: text().notNull(),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<PartData>(),
},
(table) => [index("part_message_idx").on(table.message_id), index("part_session_idx").on(table.session_id)],
)
export const TodoTable = sqliteTable(
"todo",
{
session_id: text()
.notNull()
.references(() => SessionTable.id, { onDelete: "cascade" }),
content: text().notNull(),
status: text().notNull(),
priority: text().notNull(),
position: integer().notNull(),
...Timestamps,
},
(table) => [
primaryKey({ columns: [table.session_id, table.position] }),
index("todo_session_idx").on(table.session_id),
],
)
export const PermissionTable = sqliteTable("permission", {
project_id: text()
.primaryKey()
.references(() => ProjectTable.id, { onDelete: "cascade" }),
...Timestamps,
data: text({ mode: "json" }).notNull().$type<PermissionNext.Ruleset>(),
})

View File

@ -90,12 +90,13 @@ export namespace SessionSummary {
async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) { async function summarizeSession(input: { sessionID: string; messages: MessageV2.WithParts[] }) {
const diffs = await computeDiff({ messages: input.messages }) const diffs = await computeDiff({ messages: input.messages })
await Session.update(input.sessionID, (draft) => { await Session.setSummary({
draft.summary = { sessionID: input.sessionID,
summary: {
additions: diffs.reduce((sum, x) => sum + x.additions, 0), additions: diffs.reduce((sum, x) => sum + x.additions, 0),
deletions: diffs.reduce((sum, x) => sum + x.deletions, 0), deletions: diffs.reduce((sum, x) => sum + x.deletions, 0),
files: diffs.length, files: diffs.length,
} },
}) })
await Storage.write(["session_diff", input.sessionID], diffs) await Storage.write(["session_diff", input.sessionID], diffs)
Bus.publish(Session.Event.Diff, { Bus.publish(Session.Event.Diff, {

View File

@ -1,7 +1,8 @@
import { BusEvent } from "@/bus/bus-event" import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus" import { Bus } from "@/bus"
import z from "zod" import z from "zod"
import { Storage } from "../storage/storage" import { Database, eq, asc } from "../storage/db"
import { TodoTable } from "./session.sql"
export namespace Todo { export namespace Todo {
export const Info = z export const Info = z
@ -9,7 +10,6 @@ export namespace Todo {
content: z.string().describe("Brief description of the task"), content: z.string().describe("Brief description of the task"),
status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"), status: z.string().describe("Current status of the task: pending, in_progress, completed, cancelled"),
priority: z.string().describe("Priority level of the task: high, medium, low"), priority: z.string().describe("Priority level of the task: high, medium, low"),
id: z.string().describe("Unique identifier for the todo item"),
}) })
.meta({ ref: "Todo" }) .meta({ ref: "Todo" })
export type Info = z.infer<typeof Info> export type Info = z.infer<typeof Info>
@ -24,14 +24,33 @@ export namespace Todo {
), ),
} }
export async function update(input: { sessionID: string; todos: Info[] }) { export function update(input: { sessionID: string; todos: Info[] }) {
await Storage.write(["todo", input.sessionID], input.todos) Database.transaction((db) => {
db.delete(TodoTable).where(eq(TodoTable.session_id, input.sessionID)).run()
if (input.todos.length === 0) return
db.insert(TodoTable)
.values(
input.todos.map((todo, position) => ({
session_id: input.sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
})),
)
.run()
})
Bus.publish(Event.Updated, input) Bus.publish(Event.Updated, input)
} }
export async function get(sessionID: string) { export function get(sessionID: string) {
return Storage.read<Info[]>(["todo", sessionID]) const rows = Database.use((db) =>
.then((x) => x || []) db.select().from(TodoTable).where(eq(TodoTable.session_id, sessionID)).orderBy(asc(TodoTable.position)).all(),
.catch(() => []) )
return rows.map((row) => ({
content: row.content,
status: row.status,
priority: row.priority,
}))
} }
} }

View File

@ -4,7 +4,8 @@ import { ulid } from "ulid"
import { Provider } from "@/provider/provider" import { Provider } from "@/provider/provider"
import { Session } from "@/session" import { Session } from "@/session"
import { MessageV2 } from "@/session/message-v2" import { MessageV2 } from "@/session/message-v2"
import { Storage } from "@/storage/storage" import { Database, eq } from "@/storage/db"
import { SessionShareTable } from "./share.sql"
import { Log } from "@/util/log" import { Log } from "@/util/log"
import type * as SDK from "@opencode-ai/sdk/v2" import type * as SDK from "@opencode-ai/sdk/v2"
@ -77,17 +78,26 @@ export namespace ShareNext {
}) })
.then((x) => x.json()) .then((x) => x.json())
.then((x) => x as { id: string; url: string; secret: string }) .then((x) => x as { id: string; url: string; secret: string })
await Storage.write(["session_share", sessionID], result) Database.use((db) =>
db
.insert(SessionShareTable)
.values({ session_id: sessionID, id: result.id, secret: result.secret, url: result.url })
.onConflictDoUpdate({
target: SessionShareTable.session_id,
set: { id: result.id, secret: result.secret, url: result.url },
})
.run(),
)
fullSync(sessionID) fullSync(sessionID)
return result return result
} }
function get(sessionID: string) { function get(sessionID: string) {
return Storage.read<{ const row = Database.use((db) =>
id: string db.select().from(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).get(),
secret: string )
url: string if (!row) return
}>(["session_share", sessionID]) return { id: row.id, secret: row.secret, url: row.url }
} }
type Data = type Data =
@ -132,7 +142,7 @@ export namespace ShareNext {
const queued = queue.get(sessionID) const queued = queue.get(sessionID)
if (!queued) return if (!queued) return
queue.delete(sessionID) queue.delete(sessionID)
const share = await get(sessionID).catch(() => undefined) const share = get(sessionID)
if (!share) return if (!share) return
await fetch(`${await url()}/api/share/${share.id}/sync`, { await fetch(`${await url()}/api/share/${share.id}/sync`, {
@ -152,7 +162,7 @@ export namespace ShareNext {
export async function remove(sessionID: string) { export async function remove(sessionID: string) {
if (disabled) return if (disabled) return
log.info("removing share", { sessionID }) log.info("removing share", { sessionID })
const share = await get(sessionID) const share = get(sessionID)
if (!share) return if (!share) return
await fetch(`${await url()}/api/share/${share.id}`, { await fetch(`${await url()}/api/share/${share.id}`, {
method: "DELETE", method: "DELETE",
@ -163,7 +173,7 @@ export namespace ShareNext {
secret: share.secret, secret: share.secret,
}), }),
}) })
await Storage.remove(["session_share", sessionID]) Database.use((db) => db.delete(SessionShareTable).where(eq(SessionShareTable.session_id, sessionID)).run())
} }
async function fullSync(sessionID: string) { async function fullSync(sessionID: string) {

View File

@ -0,0 +1,13 @@
import { sqliteTable, text } from "drizzle-orm/sqlite-core"
import { SessionTable } from "../session/session.sql"
import { Timestamps } from "@/storage/schema.sql"
export const SessionShareTable = sqliteTable("session_share", {
session_id: text()
.primaryKey()
.references(() => SessionTable.id, { onDelete: "cascade" }),
id: text().notNull(),
secret: text().notNull(),
url: text().notNull(),
...Timestamps,
})

View File

@ -1,92 +0,0 @@
import { Bus } from "../bus"
import { Installation } from "../installation"
import { Session } from "../session"
import { MessageV2 } from "../session/message-v2"
import { Log } from "../util/log"
export namespace Share {
const log = Log.create({ service: "share" })
let queue: Promise<void> = Promise.resolve()
const pending = new Map<string, any>()
export async function sync(key: string, content: any) {
if (disabled) return
const [root, ...splits] = key.split("/")
if (root !== "session") return
const [sub, sessionID] = splits
if (sub === "share") return
const share = await Session.getShare(sessionID).catch(() => {})
if (!share) return
const { secret } = share
pending.set(key, content)
queue = queue
.then(async () => {
const content = pending.get(key)
if (content === undefined) return
pending.delete(key)
return fetch(`${URL}/share_sync`, {
method: "POST",
body: JSON.stringify({
sessionID: sessionID,
secret,
key: key,
content,
}),
})
})
.then((x) => {
if (x) {
log.info("synced", {
key: key,
status: x.status,
})
}
})
}
export function init() {
Bus.subscribe(Session.Event.Updated, async (evt) => {
await sync("session/info/" + evt.properties.info.id, evt.properties.info)
})
Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
await sync("session/message/" + evt.properties.info.sessionID + "/" + evt.properties.info.id, evt.properties.info)
})
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => {
await sync(
"session/part/" +
evt.properties.part.sessionID +
"/" +
evt.properties.part.messageID +
"/" +
evt.properties.part.id,
evt.properties.part,
)
})
}
export const URL =
process.env["OPENCODE_API"] ??
(Installation.isPreview() || Installation.isLocal() ? "https://api.dev.opencode.ai" : "https://api.opencode.ai")
const disabled = process.env["OPENCODE_DISABLE_SHARE"] === "true" || process.env["OPENCODE_DISABLE_SHARE"] === "1"
export async function create(sessionID: string) {
if (disabled) return { url: "", secret: "" }
return fetch(`${URL}/share_create`, {
method: "POST",
body: JSON.stringify({ sessionID: sessionID }),
})
.then((x) => x.json())
.then((x) => x as { url: string; secret: string })
}
export async function remove(sessionID: string, secret: string) {
if (disabled) return {}
return fetch(`${URL}/share_delete`, {
method: "POST",
body: JSON.stringify({ sessionID, secret }),
}).then((x) => x.json())
}
}

4
packages/opencode/src/sql.d.ts vendored 100644
View File

@ -0,0 +1,4 @@
declare module "*.sql" {
const content: string
export default content
}

View File

@ -0,0 +1,140 @@
import { Database as BunDatabase } from "bun:sqlite"
import { drizzle, type SQLiteBunDatabase } from "drizzle-orm/bun-sqlite"
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
import { type SQLiteTransaction } from "drizzle-orm/sqlite-core"
export * from "drizzle-orm"
import { Context } from "../util/context"
import { lazy } from "../util/lazy"
import { Global } from "../global"
import { Log } from "../util/log"
import { NamedError } from "@opencode-ai/util/error"
import z from "zod"
import path from "path"
import { readFileSync, readdirSync } from "fs"
import * as schema from "./schema"
declare const OPENCODE_MIGRATIONS: { sql: string; timestamp: number }[] | undefined
export const NotFoundError = NamedError.create(
"NotFoundError",
z.object({
message: z.string(),
}),
)
const log = Log.create({ service: "db" })
export namespace Database {
type Schema = typeof schema
export type Transaction = SQLiteTransaction<"sync", void, Schema>
type Client = SQLiteBunDatabase<Schema>
type Journal = { sql: string; timestamp: number }[]
function time(tag: string) {
const match = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/.exec(tag)
if (!match) return 0
return Date.UTC(
Number(match[1]),
Number(match[2]) - 1,
Number(match[3]),
Number(match[4]),
Number(match[5]),
Number(match[6]),
)
}
function migrations(dir: string): Journal {
const dirs = readdirSync(dir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => entry.name)
const sql = dirs
.map((name) => {
const file = path.join(dir, name, "migration.sql")
if (!Bun.file(file).size) return
return {
sql: readFileSync(file, "utf-8"),
timestamp: time(name),
}
})
.filter(Boolean) as Journal
return sql.sort((a, b) => a.timestamp - b.timestamp)
}
export const Client = lazy(() => {
log.info("opening database", { path: path.join(Global.Path.data, "opencode.db") })
const sqlite = new BunDatabase(path.join(Global.Path.data, "opencode.db"), { create: true })
sqlite.run("PRAGMA journal_mode = WAL")
sqlite.run("PRAGMA synchronous = NORMAL")
sqlite.run("PRAGMA busy_timeout = 5000")
sqlite.run("PRAGMA cache_size = -64000")
sqlite.run("PRAGMA foreign_keys = ON")
const db = drizzle({ client: sqlite, schema })
// Apply schema migrations
const entries =
typeof OPENCODE_MIGRATIONS !== "undefined"
? OPENCODE_MIGRATIONS
: migrations(path.join(import.meta.dirname, "../../migration"))
if (entries.length > 0) {
log.info("applying migrations", {
count: entries.length,
mode: typeof OPENCODE_MIGRATIONS !== "undefined" ? "bundled" : "dev",
})
migrate(db, entries)
}
return db
})
export type TxOrDb = Transaction | Client
const ctx = Context.create<{
tx: TxOrDb
effects: (() => void | Promise<void>)[]
}>("database")
export function use<T>(callback: (trx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = ctx.provide({ effects, tx: Client() }, () => callback(Client()))
for (const effect of effects) effect()
return result
}
throw err
}
}
export function effect(fn: () => any | Promise<any>) {
try {
ctx.use().effects.push(fn)
} catch {
fn()
}
}
export function transaction<T>(callback: (tx: TxOrDb) => T): T {
try {
return callback(ctx.use().tx)
} catch (err) {
if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = []
const result = Client().transaction((tx) => {
return ctx.provide({ tx, effects }, () => callback(tx))
})
for (const effect of effects) effect()
return result
}
throw err
}
}
}

View File

@ -0,0 +1,437 @@
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { Global } from "../global"
import { Log } from "../util/log"
import { ProjectTable } from "../project/project.sql"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
import { SessionShareTable } from "../share/share.sql"
import path from "path"
import { existsSync } from "fs"
export namespace JsonMigration {
const log = Log.create({ service: "json-migration" })
export type Progress = {
current: number
total: number
label: string
}
type Options = {
progress?: (event: Progress) => void
}
export async function run(sqlite: Database, options?: Options) {
const storageDir = path.join(Global.Path.data, "storage")
if (!existsSync(storageDir)) {
log.info("storage directory does not exist, skipping migration")
return {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
}
log.info("starting json to sqlite migration", { storageDir })
const start = performance.now()
const db = drizzle({ client: sqlite })
// Optimize SQLite for bulk inserts
sqlite.exec("PRAGMA journal_mode = WAL")
sqlite.exec("PRAGMA synchronous = OFF")
sqlite.exec("PRAGMA cache_size = 10000")
sqlite.exec("PRAGMA temp_store = MEMORY")
const stats = {
projects: 0,
sessions: 0,
messages: 0,
parts: 0,
todos: 0,
permissions: 0,
shares: 0,
errors: [] as string[],
}
const orphans = {
sessions: 0,
todos: 0,
permissions: 0,
shares: 0,
}
const errs = stats.errors
const batchSize = 1000
const now = Date.now()
async function list(pattern: string) {
const items: string[] = []
const scan = new Bun.Glob(pattern)
for await (const file of scan.scan({ cwd: storageDir, absolute: true })) {
items.push(file)
}
return items
}
async function read(files: string[], start: number, end: number) {
const count = end - start
const tasks = new Array(count)
for (let i = 0; i < count; i++) {
tasks[i] = Bun.file(files[start + i]).json()
}
const results = await Promise.allSettled(tasks)
const items = new Array(count)
for (let i = 0; i < results.length; i++) {
const result = results[i]
if (result.status === "fulfilled") {
items[i] = result.value
continue
}
errs.push(`failed to read ${files[start + i]}: ${result.reason}`)
}
return items
}
function insert(values: any[], table: any, label: string) {
if (values.length === 0) return 0
try {
db.insert(table).values(values).onConflictDoNothing().run()
return values.length
} catch (e) {
errs.push(`failed to migrate ${label} batch: ${e}`)
return 0
}
}
// Pre-scan all files upfront to avoid repeated glob operations
log.info("scanning files...")
const [projectFiles, sessionFiles, messageFiles, partFiles, todoFiles, permFiles, shareFiles] = await Promise.all([
list("project/*.json"),
list("session/*/*.json"),
list("message/*/*.json"),
list("part/*/*.json"),
list("todo/*.json"),
list("permission/*.json"),
list("session_share/*.json"),
])
log.info("file scan complete", {
projects: projectFiles.length,
sessions: sessionFiles.length,
messages: messageFiles.length,
parts: partFiles.length,
todos: todoFiles.length,
permissions: permFiles.length,
shares: shareFiles.length,
})
const total = Math.max(
1,
projectFiles.length +
sessionFiles.length +
messageFiles.length +
partFiles.length +
todoFiles.length +
permFiles.length +
shareFiles.length,
)
const progress = options?.progress
let current = 0
const step = (label: string, count: number) => {
current = Math.min(total, current + count)
progress?.({ current, total, label })
}
progress?.({ current, total, label: "starting" })
sqlite.exec("BEGIN TRANSACTION")
// Migrate projects first (no FK deps)
const projectIds = new Set<string>()
const projectValues = [] as any[]
for (let i = 0; i < projectFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, projectFiles.length)
const batch = await read(projectFiles, i, end)
projectValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
if (!data?.id) {
errs.push(`project missing id: ${projectFiles[i + j]}`)
continue
}
projectIds.add(data.id)
projectValues.push({
id: data.id,
worktree: data.worktree ?? "/",
vcs: data.vcs,
name: data.name ?? undefined,
icon_url: data.icon?.url,
icon_color: data.icon?.color,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_initialized: data.time?.initialized,
sandboxes: data.sandboxes ?? [],
commands: data.commands,
})
}
stats.projects += insert(projectValues, ProjectTable, "project")
step("projects", end - i)
}
log.info("migrated projects", { count: stats.projects, duration: Math.round(performance.now() - start) })
// Migrate sessions (depends on projects)
const sessionIds = new Set<string>()
const sessionValues = [] as any[]
for (let i = 0; i < sessionFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, sessionFiles.length)
const batch = await read(sessionFiles, i, end)
sessionValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
if (!data?.id || !data?.projectID) {
errs.push(`session missing id or projectID: ${sessionFiles[i + j]}`)
continue
}
if (!projectIds.has(data.projectID)) {
orphans.sessions++
continue
}
sessionIds.add(data.id)
sessionValues.push({
id: data.id,
project_id: data.projectID,
parent_id: data.parentID ?? null,
slug: data.slug ?? "",
directory: data.directory ?? "",
title: data.title ?? "",
version: data.version ?? "",
share_url: data.share?.url ?? null,
summary_additions: data.summary?.additions ?? null,
summary_deletions: data.summary?.deletions ?? null,
summary_files: data.summary?.files ?? null,
summary_diffs: data.summary?.diffs ?? null,
revert: data.revert ?? null,
permission: data.permission ?? null,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
time_compacting: data.time?.compacting ?? null,
time_archived: data.time?.archived ?? null,
})
}
stats.sessions += insert(sessionValues, SessionTable, "session")
step("sessions", end - i)
}
log.info("migrated sessions", { count: stats.sessions })
if (orphans.sessions > 0) {
log.warn("skipped orphaned sessions", { count: orphans.sessions })
}
// Migrate messages using pre-scanned file map
const allMessageFiles = [] as string[]
const allMessageSessions = [] as string[]
const messageSessions = new Map<string, string>()
for (const file of messageFiles) {
const sessionID = path.basename(path.dirname(file))
if (!sessionIds.has(sessionID)) continue
allMessageFiles.push(file)
allMessageSessions.push(sessionID)
}
for (let i = 0; i < allMessageFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, allMessageFiles.length)
const batch = await read(allMessageFiles, i, end)
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = allMessageFiles[i + j]
const id = data.id ?? path.basename(file, ".json")
if (!id) {
errs.push(`message missing id: ${file}`)
continue
}
const sessionID = allMessageSessions[i + j]
messageSessions.set(id, sessionID)
const rest = data
delete rest.id
delete rest.sessionID
values[count++] = {
id,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.messages += insert(values, MessageTable, "message")
step("messages", end - i)
}
log.info("migrated messages", { count: stats.messages })
// Migrate parts using pre-scanned file map
for (let i = 0; i < partFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, partFiles.length)
const batch = await read(partFiles, i, end)
const values = new Array(batch.length)
let count = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const file = partFiles[i + j]
const id = data.id ?? path.basename(file, ".json")
const messageID = data.messageID ?? path.basename(path.dirname(file))
if (!id || !messageID) {
errs.push(`part missing id/messageID/sessionID: ${file}`)
continue
}
const sessionID = messageSessions.get(messageID)
if (!sessionID) {
errs.push(`part missing message session: ${file}`)
continue
}
if (!sessionIds.has(sessionID)) continue
const rest = data
delete rest.id
delete rest.messageID
delete rest.sessionID
values[count++] = {
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time?.created ?? now,
time_updated: data.time?.updated ?? now,
data: rest,
}
}
values.length = count
stats.parts += insert(values, PartTable, "part")
step("parts", end - i)
}
log.info("migrated parts", { count: stats.parts })
// Migrate todos
const todoSessions = todoFiles.map((file) => path.basename(file, ".json"))
for (let i = 0; i < todoFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, todoFiles.length)
const batch = await read(todoFiles, i, end)
const values = [] as any[]
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = todoSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.todos++
continue
}
if (!Array.isArray(data)) {
errs.push(`todo not an array: ${todoFiles[i + j]}`)
continue
}
for (let position = 0; position < data.length; position++) {
const todo = data[position]
if (!todo?.content || !todo?.status || !todo?.priority) continue
values.push({
session_id: sessionID,
content: todo.content,
status: todo.status,
priority: todo.priority,
position,
time_created: now,
time_updated: now,
})
}
}
stats.todos += insert(values, TodoTable, "todo")
step("todos", end - i)
}
log.info("migrated todos", { count: stats.todos })
if (orphans.todos > 0) {
log.warn("skipped orphaned todos", { count: orphans.todos })
}
// Migrate permissions
const permProjects = permFiles.map((file) => path.basename(file, ".json"))
const permValues = [] as any[]
for (let i = 0; i < permFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, permFiles.length)
const batch = await read(permFiles, i, end)
permValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const projectID = permProjects[i + j]
if (!projectIds.has(projectID)) {
orphans.permissions++
continue
}
permValues.push({ project_id: projectID, data })
}
stats.permissions += insert(permValues, PermissionTable, "permission")
step("permissions", end - i)
}
log.info("migrated permissions", { count: stats.permissions })
if (orphans.permissions > 0) {
log.warn("skipped orphaned permissions", { count: orphans.permissions })
}
// Migrate session shares
const shareSessions = shareFiles.map((file) => path.basename(file, ".json"))
const shareValues = [] as any[]
for (let i = 0; i < shareFiles.length; i += batchSize) {
const end = Math.min(i + batchSize, shareFiles.length)
const batch = await read(shareFiles, i, end)
shareValues.length = 0
for (let j = 0; j < batch.length; j++) {
const data = batch[j]
if (!data) continue
const sessionID = shareSessions[i + j]
if (!sessionIds.has(sessionID)) {
orphans.shares++
continue
}
if (!data?.id || !data?.secret || !data?.url) {
errs.push(`session_share missing id/secret/url: ${shareFiles[i + j]}`)
continue
}
shareValues.push({ session_id: sessionID, id: data.id, secret: data.secret, url: data.url })
}
stats.shares += insert(shareValues, SessionShareTable, "session_share")
step("shares", end - i)
}
log.info("migrated session shares", { count: stats.shares })
if (orphans.shares > 0) {
log.warn("skipped orphaned session shares", { count: orphans.shares })
}
sqlite.exec("COMMIT")
log.info("json migration complete", {
projects: stats.projects,
sessions: stats.sessions,
messages: stats.messages,
parts: stats.parts,
todos: stats.todos,
permissions: stats.permissions,
shares: stats.shares,
errorCount: stats.errors.length,
duration: Math.round(performance.now() - start),
})
if (stats.errors.length > 0) {
log.warn("migration errors", { errors: stats.errors.slice(0, 20) })
}
progress?.({ current: total, total, label: "complete" })
return stats
}
}

View File

@ -0,0 +1,10 @@
import { integer } from "drizzle-orm/sqlite-core"
export const Timestamps = {
time_created: integer()
.notNull()
.$default(() => Date.now()),
time_updated: integer()
.notNull()
.$onUpdate(() => Date.now()),
}

View File

@ -0,0 +1,4 @@
export { ControlAccountTable } from "../control/control.sql"
export { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../session/session.sql"
export { SessionShareTable } from "../share/share.sql"
export { ProjectTable } from "../project/project.sql"

View File

@ -4,9 +4,14 @@ export function lazy<T>(fn: () => T) {
const result = (): T => { const result = (): T => {
if (loaded) return value as T if (loaded) return value as T
loaded = true try {
value = fn() value = fn()
return value as T loaded = true
return value as T
} catch (e) {
// Don't mark as loaded if initialization failed
throw e
}
} }
result.reset = () => { result.reset = () => {

View File

@ -7,7 +7,8 @@ import { Global } from "../global"
import { Instance } from "../project/instance" import { Instance } from "../project/instance"
import { InstanceBootstrap } from "../project/bootstrap" import { InstanceBootstrap } from "../project/bootstrap"
import { Project } from "../project/project" import { Project } from "../project/project"
import { Storage } from "../storage/storage" import { Database, eq } from "../storage/db"
import { ProjectTable } from "../project/project.sql"
import { fn } from "../util/fn" import { fn } from "../util/fn"
import { Log } from "../util/log" import { Log } from "../util/log"
import { BusEvent } from "@/bus/bus-event" import { BusEvent } from "@/bus/bus-event"
@ -307,7 +308,8 @@ export namespace Worktree {
} }
async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) { async function runStartScripts(directory: string, input: { projectID: string; extra?: string }) {
const project = await Storage.read<Project.Info>(["project", input.projectID]).catch(() => undefined) const row = Database.use((db) => db.select().from(ProjectTable).where(eq(ProjectTable.id, input.projectID)).get())
const project = row ? Project.fromRow(row) : undefined
const startup = project?.commands?.start?.trim() ?? "" const startup = project?.commands?.start?.trim() ?? ""
const ok = await runStartScript(directory, startup, "project") const ok = await runStartScript(directory, startup, "project")
if (!ok) return false if (!ok) return false

View File

@ -122,12 +122,20 @@ function createFakeAgent() {
messages: async () => { messages: async () => {
return { data: [] } return { data: [] }
}, },
message: async () => { message: async (params?: any) => {
// Return a message with parts that can be looked up by partID
return { return {
data: { data: {
info: { info: {
role: "assistant", role: "assistant",
}, },
parts: [
{
id: params?.messageID ? `${params.messageID}_part` : "part_1",
type: "text",
text: "",
},
],
}, },
} }
}, },
@ -193,7 +201,7 @@ function createFakeAgent() {
} }
describe("acp.agent event subscription", () => { describe("acp.agent event subscription", () => {
test("routes message.part.updated by the event sessionID (no cross-session pollution)", async () => { test("routes message.part.delta by the event sessionID (no cross-session pollution)", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
@ -207,14 +215,12 @@ describe("acp.agent event subscription", () => {
controller.push({ controller.push({
directory: cwd, directory: cwd,
payload: { payload: {
type: "message.part.updated", type: "message.part.delta",
properties: { properties: {
part: { sessionID: sessionB,
sessionID: sessionB, messageID: "msg_1",
messageID: "msg_1", partID: "msg_1_part",
type: "text", field: "text",
synthetic: false,
},
delta: "hello", delta: "hello",
}, },
}, },
@ -230,7 +236,7 @@ describe("acp.agent event subscription", () => {
}) })
}) })
test("keeps concurrent sessions isolated when message.part.updated events are interleaved", async () => { test("keeps concurrent sessions isolated when message.part.delta events are interleaved", async () => {
await using tmp = await tmpdir() await using tmp = await tmpdir()
await Instance.provide({ await Instance.provide({
directory: tmp.path, directory: tmp.path,
@ -248,14 +254,12 @@ describe("acp.agent event subscription", () => {
controller.push({ controller.push({
directory: cwd, directory: cwd,
payload: { payload: {
type: "message.part.updated", type: "message.part.delta",
properties: { properties: {
part: { sessionID: sessionId,
sessionID: sessionId, messageID,
messageID, partID: `${messageID}_part`,
type: "text", field: "text",
synthetic: false,
},
delta, delta,
}, },
}, },
@ -402,14 +406,12 @@ describe("acp.agent event subscription", () => {
controller.push({ controller.push({
directory: cwd, directory: cwd,
payload: { payload: {
type: "message.part.updated", type: "message.part.delta",
properties: { properties: {
part: { sessionID: sessionB,
sessionID: sessionB, messageID: "msg_b",
messageID: "msg_b", partID: "msg_b_part",
type: "text", field: "text",
synthetic: false,
},
delta: "session_b_message", delta: "session_b_message",
}, },
}, },

View File

@ -2,7 +2,6 @@ import { test, expect } from "bun:test"
import os from "os" import os from "os"
import { PermissionNext } from "../../src/permission/next" import { PermissionNext } from "../../src/permission/next"
import { Instance } from "../../src/project/instance" import { Instance } from "../../src/project/instance"
import { Storage } from "../../src/storage/storage"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
// fromConfig tests // fromConfig tests

View File

@ -1,63 +1,70 @@
// IMPORTANT: Set env vars BEFORE any imports from src/ directory // IMPORTANT: Set env vars BEFORE any imports from src/ directory
// xdg-basedir reads env vars at import time, so we must set these first // xdg-basedir reads env vars at import time, so we must set these first
import os from "os" import os from "os";
import path from "path" import path from "path";
import fs from "fs/promises" import fs from "fs/promises";
import fsSync from "fs" import fsSync from "fs";
import { afterAll } from "bun:test" import { afterAll } from "bun:test";
const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid) // Set XDG env vars FIRST, before any src/ imports
await fs.mkdir(dir, { recursive: true }) const dir = path.join(os.tmpdir(), "opencode-test-data-" + process.pid);
await fs.mkdir(dir, { recursive: true });
afterAll(() => { afterAll(() => {
fsSync.rmSync(dir, { recursive: true, force: true }) fsSync.rmSync(dir, { recursive: true, force: true });
}) });
process.env["XDG_DATA_HOME"] = path.join(dir, "share");
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache");
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config");
process.env["XDG_STATE_HOME"] = path.join(dir, "state");
process.env["OPENCODE_MODELS_PATH"] = path.join(
import.meta.dir,
"tool",
"fixtures",
"models-api.json",
);
// Set test home directory to isolate tests from user's actual home directory // Set test home directory to isolate tests from user's actual home directory
// This prevents tests from picking up real user configs/skills from ~/.claude/skills // This prevents tests from picking up real user configs/skills from ~/.claude/skills
const testHome = path.join(dir, "home") const testHome = path.join(dir, "home");
await fs.mkdir(testHome, { recursive: true }) await fs.mkdir(testHome, { recursive: true });
process.env["OPENCODE_TEST_HOME"] = testHome process.env["OPENCODE_TEST_HOME"] = testHome;
// Set test managed config directory to isolate tests from system managed settings // Set test managed config directory to isolate tests from system managed settings
const testManagedConfigDir = path.join(dir, "managed") const testManagedConfigDir = path.join(dir, "managed");
process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir process.env["OPENCODE_TEST_MANAGED_CONFIG_DIR"] = testManagedConfigDir;
process.env["XDG_DATA_HOME"] = path.join(dir, "share")
process.env["XDG_CACHE_HOME"] = path.join(dir, "cache")
process.env["XDG_CONFIG_HOME"] = path.join(dir, "config")
process.env["XDG_STATE_HOME"] = path.join(dir, "state")
process.env["OPENCODE_MODELS_PATH"] = path.join(import.meta.dir, "tool", "fixtures", "models-api.json")
// Write the cache version file to prevent global/index.ts from clearing the cache // Write the cache version file to prevent global/index.ts from clearing the cache
const cacheDir = path.join(dir, "cache", "opencode") const cacheDir = path.join(dir, "cache", "opencode");
await fs.mkdir(cacheDir, { recursive: true }) await fs.mkdir(cacheDir, { recursive: true });
await fs.writeFile(path.join(cacheDir, "version"), "14") await fs.writeFile(path.join(cacheDir, "version"), "14");
// Clear provider env vars to ensure clean test state // Clear provider env vars to ensure clean test state
delete process.env["ANTHROPIC_API_KEY"] delete process.env["ANTHROPIC_API_KEY"];
delete process.env["OPENAI_API_KEY"] delete process.env["OPENAI_API_KEY"];
delete process.env["GOOGLE_API_KEY"] delete process.env["GOOGLE_API_KEY"];
delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"] delete process.env["GOOGLE_GENERATIVE_AI_API_KEY"];
delete process.env["AZURE_OPENAI_API_KEY"] delete process.env["AZURE_OPENAI_API_KEY"];
delete process.env["AWS_ACCESS_KEY_ID"] delete process.env["AWS_ACCESS_KEY_ID"];
delete process.env["AWS_PROFILE"] delete process.env["AWS_PROFILE"];
delete process.env["AWS_REGION"] delete process.env["AWS_REGION"];
delete process.env["AWS_BEARER_TOKEN_BEDROCK"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"];
delete process.env["OPENROUTER_API_KEY"] delete process.env["OPENROUTER_API_KEY"];
delete process.env["GROQ_API_KEY"] delete process.env["GROQ_API_KEY"];
delete process.env["MISTRAL_API_KEY"] delete process.env["MISTRAL_API_KEY"];
delete process.env["PERPLEXITY_API_KEY"] delete process.env["PERPLEXITY_API_KEY"];
delete process.env["TOGETHER_API_KEY"] delete process.env["TOGETHER_API_KEY"];
delete process.env["XAI_API_KEY"] delete process.env["XAI_API_KEY"];
delete process.env["DEEPSEEK_API_KEY"] delete process.env["DEEPSEEK_API_KEY"];
delete process.env["FIREWORKS_API_KEY"] delete process.env["FIREWORKS_API_KEY"];
delete process.env["CEREBRAS_API_KEY"] delete process.env["CEREBRAS_API_KEY"];
delete process.env["SAMBANOVA_API_KEY"] delete process.env["SAMBANOVA_API_KEY"];
// Now safe to import from src/ // Now safe to import from src/
const { Log } = await import("../src/util/log") const { Log } = await import("../src/util/log");
Log.init({ Log.init({
print: false, print: false,
dev: true, dev: true,
level: "DEBUG", level: "DEBUG",
}) });

View File

@ -1,10 +1,10 @@
import { describe, expect, mock, test } from "bun:test" import { describe, expect, mock, test } from "bun:test"
import type { Project as ProjectNS } from "../../src/project/project" import { Project } from "../../src/project/project"
import { Log } from "../../src/util/log" import { Log } from "../../src/util/log"
import { Storage } from "../../src/storage/storage"
import { $ } from "bun" import { $ } from "bun"
import path from "path" import path from "path"
import { tmpdir } from "../fixture/fixture" import { tmpdir } from "../fixture/fixture"
import { GlobalBus } from "../../src/bus/global"
Log.init({ print: false }) Log.init({ print: false })
@ -152,38 +152,51 @@ describe("Project.fromDirectory with worktrees", () => {
const p = await loadProject() const p = await loadProject()
await using tmp = await tmpdir({ git: true }) await using tmp = await tmpdir({ git: true })
const worktreePath = path.join(tmp.path, "..", "worktree-test") const worktreePath = path.join(tmp.path, "..", path.basename(tmp.path) + "-worktree")
await $`git worktree add ${worktreePath} -b test-branch`.cwd(tmp.path).quiet() try {
await $`git worktree add ${worktreePath} -b test-branch-${Date.now()}`.cwd(tmp.path).quiet()
const { project, sandbox } = await p.fromDirectory(worktreePath) const { project, sandbox } = await p.fromDirectory(worktreePath)
expect(project.worktree).toBe(tmp.path) expect(project.worktree).toBe(tmp.path)
expect(sandbox).toBe(worktreePath) expect(sandbox).toBe(worktreePath)
expect(project.sandboxes).toContain(worktreePath) expect(project.sandboxes).toContain(worktreePath)
expect(project.sandboxes).not.toContain(tmp.path) expect(project.sandboxes).not.toContain(tmp.path)
} finally {
await $`git worktree remove ${worktreePath}`.cwd(tmp.path).quiet() await $`git worktree remove ${worktreePath}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
}
}) })
test("should accumulate multiple worktrees in sandboxes", async () => { test("should accumulate multiple worktrees in sandboxes", async () => {
const p = await loadProject() const p = await loadProject()
await using tmp = await tmpdir({ git: true }) await using tmp = await tmpdir({ git: true })
const worktree1 = path.join(tmp.path, "..", "worktree-1") const worktree1 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt1")
const worktree2 = path.join(tmp.path, "..", "worktree-2") const worktree2 = path.join(tmp.path, "..", path.basename(tmp.path) + "-wt2")
await $`git worktree add ${worktree1} -b branch-1`.cwd(tmp.path).quiet() try {
await $`git worktree add ${worktree2} -b branch-2`.cwd(tmp.path).quiet() await $`git worktree add ${worktree1} -b branch-${Date.now()}`.cwd(tmp.path).quiet()
await $`git worktree add ${worktree2} -b branch-${Date.now() + 1}`.cwd(tmp.path).quiet()
await p.fromDirectory(worktree1) await p.fromDirectory(worktree1)
const { project } = await p.fromDirectory(worktree2) const { project } = await p.fromDirectory(worktree2)
expect(project.worktree).toBe(tmp.path) expect(project.worktree).toBe(tmp.path)
expect(project.sandboxes).toContain(worktree1) expect(project.sandboxes).toContain(worktree1)
expect(project.sandboxes).toContain(worktree2) expect(project.sandboxes).toContain(worktree2)
expect(project.sandboxes).not.toContain(tmp.path) expect(project.sandboxes).not.toContain(tmp.path)
} finally {
await $`git worktree remove ${worktree1}`.cwd(tmp.path).quiet() await $`git worktree remove ${worktree1}`
await $`git worktree remove ${worktree2}`.cwd(tmp.path).quiet() .cwd(tmp.path)
.quiet()
.catch(() => {})
await $`git worktree remove ${worktree2}`
.cwd(tmp.path)
.quiet()
.catch(() => {})
}
}) })
}) })
@ -198,11 +211,12 @@ describe("Project.discover", () => {
await p.discover(project) await p.discover(project)
const updated = await Storage.read<ProjectNS.Info>(["project", project.id]) const updated = Project.get(project.id)
expect(updated.icon).toBeDefined() expect(updated).toBeDefined()
expect(updated.icon?.url).toStartWith("data:") expect(updated!.icon).toBeDefined()
expect(updated.icon?.url).toContain("base64") expect(updated!.icon?.url).toStartWith("data:")
expect(updated.icon?.color).toBeUndefined() expect(updated!.icon?.url).toContain("base64")
expect(updated!.icon?.color).toBeUndefined()
}) })
test("should not discover non-image files", async () => { test("should not discover non-image files", async () => {
@ -214,7 +228,120 @@ describe("Project.discover", () => {
await p.discover(project) await p.discover(project)
const updated = await Storage.read<ProjectNS.Info>(["project", project.id]) const updated = Project.get(project.id)
expect(updated.icon).toBeUndefined() expect(updated).toBeDefined()
expect(updated!.icon).toBeUndefined()
})
})
describe("Project.update", () => {
test("should update name", async () => {
await using tmp = await tmpdir({ git: true })
const { project } = await Project.fromDirectory(tmp.path)
const updated = await Project.update({
projectID: project.id,
name: "New Project Name",
})
expect(updated.name).toBe("New Project Name")
const fromDb = Project.get(project.id)
expect(fromDb?.name).toBe("New Project Name")
})
test("should update icon url", async () => {
await using tmp = await tmpdir({ git: true })
const { project } = await Project.fromDirectory(tmp.path)
const updated = await Project.update({
projectID: project.id,
icon: { url: "https://example.com/icon.png" },
})
expect(updated.icon?.url).toBe("https://example.com/icon.png")
const fromDb = Project.get(project.id)
expect(fromDb?.icon?.url).toBe("https://example.com/icon.png")
})
test("should update icon color", async () => {
await using tmp = await tmpdir({ git: true })
const { project } = await Project.fromDirectory(tmp.path)
const updated = await Project.update({
projectID: project.id,
icon: { color: "#ff0000" },
})
expect(updated.icon?.color).toBe("#ff0000")
const fromDb = Project.get(project.id)
expect(fromDb?.icon?.color).toBe("#ff0000")
})
test("should update commands", async () => {
await using tmp = await tmpdir({ git: true })
const { project } = await Project.fromDirectory(tmp.path)
const updated = await Project.update({
projectID: project.id,
commands: { start: "npm run dev" },
})
expect(updated.commands?.start).toBe("npm run dev")
const fromDb = Project.get(project.id)
expect(fromDb?.commands?.start).toBe("npm run dev")
})
test("should throw error when project not found", async () => {
await using tmp = await tmpdir({ git: true })
await expect(
Project.update({
projectID: "nonexistent-project-id",
name: "Should Fail",
}),
).rejects.toThrow("Project not found: nonexistent-project-id")
})
test("should emit GlobalBus event on update", async () => {
await using tmp = await tmpdir({ git: true })
const { project } = await Project.fromDirectory(tmp.path)
let eventFired = false
let eventPayload: any = null
GlobalBus.on("event", (data) => {
eventFired = true
eventPayload = data
})
await Project.update({
projectID: project.id,
name: "Updated Name",
})
expect(eventFired).toBe(true)
expect(eventPayload.payload.type).toBe("project.updated")
expect(eventPayload.payload.properties.name).toBe("Updated Name")
})
test("should update multiple fields at once", async () => {
await using tmp = await tmpdir({ git: true })
const { project } = await Project.fromDirectory(tmp.path)
const updated = await Project.update({
projectID: project.id,
name: "Multi Update",
icon: { url: "https://example.com/favicon.ico", color: "#00ff00" },
commands: { start: "make start" },
})
expect(updated.name).toBe("Multi Update")
expect(updated.icon?.url).toBe("https://example.com/favicon.ico")
expect(updated.icon?.color).toBe("#00ff00")
expect(updated.commands?.start).toBe("make start")
}) })
}) })

View File

@ -0,0 +1,687 @@
import { describe, test, expect, beforeEach, afterEach } from "bun:test"
import { Database } from "bun:sqlite"
import { drizzle } from "drizzle-orm/bun-sqlite"
import { migrate } from "drizzle-orm/bun-sqlite/migrator"
import path from "path"
import fs from "fs/promises"
import { readFileSync, readdirSync } from "fs"
import { JsonMigration } from "../../src/storage/json-migration"
import { Global } from "../../src/global"
import { ProjectTable } from "../../src/project/project.sql"
import { SessionTable, MessageTable, PartTable, TodoTable, PermissionTable } from "../../src/session/session.sql"
import { SessionShareTable } from "../../src/share/share.sql"
// Test fixtures
const fixtures = {
project: {
id: "proj_test123abc",
name: "Test Project",
worktree: "/test/path",
vcs: "git" as const,
sandboxes: [],
},
session: {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "test-session",
directory: "/test/path",
title: "Test Session",
version: "1.0.0",
time: { created: 1700000000000, updated: 1700000001000 },
},
message: {
id: "msg_test789ghi",
sessionID: "ses_test456def",
role: "user" as const,
agent: "default",
model: { providerID: "openai", modelID: "gpt-4" },
time: { created: 1700000000000 },
},
part: {
id: "prt_testabc123",
messageID: "msg_test789ghi",
sessionID: "ses_test456def",
type: "text" as const,
text: "Hello, world!",
},
}
// Helper to create test storage directory structure
async function setupStorageDir() {
const storageDir = path.join(Global.Path.data, "storage")
await fs.rm(storageDir, { recursive: true, force: true })
await fs.mkdir(path.join(storageDir, "project"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session", "proj_test123abc"), { recursive: true })
await fs.mkdir(path.join(storageDir, "message", "ses_test456def"), { recursive: true })
await fs.mkdir(path.join(storageDir, "part", "msg_test789ghi"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session_diff"), { recursive: true })
await fs.mkdir(path.join(storageDir, "todo"), { recursive: true })
await fs.mkdir(path.join(storageDir, "permission"), { recursive: true })
await fs.mkdir(path.join(storageDir, "session_share"), { recursive: true })
// Create legacy marker to indicate JSON storage exists
await Bun.write(path.join(storageDir, "migration"), "1")
return storageDir
}
async function writeProject(storageDir: string, project: Record<string, unknown>) {
await Bun.write(path.join(storageDir, "project", `${project.id}.json`), JSON.stringify(project))
}
async function writeSession(storageDir: string, projectID: string, session: Record<string, unknown>) {
await Bun.write(path.join(storageDir, "session", projectID, `${session.id}.json`), JSON.stringify(session))
}
// Helper to create in-memory test database with schema
function createTestDb() {
const sqlite = new Database(":memory:")
sqlite.exec("PRAGMA foreign_keys = ON")
// Apply schema migrations using drizzle migrate
const dir = path.join(import.meta.dirname, "../../migration")
const entries = readdirSync(dir, { withFileTypes: true })
const migrations = entries
.filter((entry) => entry.isDirectory())
.map((entry) => ({
sql: readFileSync(path.join(dir, entry.name, "migration.sql"), "utf-8"),
timestamp: Number(entry.name.split("_")[0]),
}))
.sort((a, b) => a.timestamp - b.timestamp)
migrate(drizzle({ client: sqlite }), migrations)
return sqlite
}
describe("JSON to SQLite migration", () => {
let storageDir: string
let sqlite: Database
beforeEach(async () => {
storageDir = await setupStorageDir()
sqlite = createTestDb()
})
afterEach(async () => {
sqlite.close()
await fs.rm(storageDir, { recursive: true, force: true })
})
test("migrates project", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/test/path",
vcs: "git",
name: "Test Project",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: ["/test/sandbox"],
})
const stats = await JsonMigration.run(sqlite)
expect(stats?.projects).toBe(1)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_test123abc")
expect(projects[0].worktree).toBe("/test/path")
expect(projects[0].name).toBe("Test Project")
expect(projects[0].sandboxes).toEqual(["/test/sandbox"])
})
test("migrates project with commands", async () => {
await writeProject(storageDir, {
id: "proj_with_commands",
worktree: "/test/path",
vcs: "git",
name: "Project With Commands",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: ["/test/sandbox"],
commands: { start: "npm run dev" },
})
const stats = await JsonMigration.run(sqlite)
expect(stats?.projects).toBe(1)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_with_commands")
expect(projects[0].commands).toEqual({ start: "npm run dev" })
})
test("migrates project without commands field", async () => {
await writeProject(storageDir, {
id: "proj_no_commands",
worktree: "/test/path",
vcs: "git",
name: "Project Without Commands",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: [],
})
const stats = await JsonMigration.run(sqlite)
expect(stats?.projects).toBe(1)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_no_commands")
expect(projects[0].commands).toBeNull()
})
test("migrates session with individual columns", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/test/path",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "test-session",
directory: "/test/dir",
title: "Test Session Title",
version: "1.0.0",
time: { created: 1700000000000, updated: 1700000001000 },
summary: { additions: 10, deletions: 5, files: 3 },
share: { url: "https://example.com/share" },
})
await JsonMigration.run(sqlite)
const db = drizzle({ client: sqlite })
const sessions = db.select().from(SessionTable).all()
expect(sessions.length).toBe(1)
expect(sessions[0].id).toBe("ses_test456def")
expect(sessions[0].project_id).toBe("proj_test123abc")
expect(sessions[0].slug).toBe("test-session")
expect(sessions[0].title).toBe("Test Session Title")
expect(sessions[0].summary_additions).toBe(10)
expect(sessions[0].summary_deletions).toBe(5)
expect(sessions[0].share_url).toBe("https://example.com/share")
})
test("migrates messages and parts", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
JSON.stringify({ ...fixtures.message }),
)
await Bun.write(
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
JSON.stringify({ ...fixtures.part }),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.messages).toBe(1)
expect(stats?.parts).toBe(1)
const db = drizzle({ client: sqlite })
const messages = db.select().from(MessageTable).all()
expect(messages.length).toBe(1)
expect(messages[0].id).toBe("msg_test789ghi")
const parts = db.select().from(PartTable).all()
expect(parts.length).toBe(1)
expect(parts[0].id).toBe("prt_testabc123")
})
test("migrates legacy parts without ids in body", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "message", "ses_test456def", "msg_test789ghi.json"),
JSON.stringify({
role: "user",
agent: "default",
model: { providerID: "openai", modelID: "gpt-4" },
time: { created: 1700000000000 },
}),
)
await Bun.write(
path.join(storageDir, "part", "msg_test789ghi", "prt_testabc123.json"),
JSON.stringify({
type: "text",
text: "Hello, world!",
}),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.messages).toBe(1)
expect(stats?.parts).toBe(1)
const db = drizzle({ client: sqlite })
const messages = db.select().from(MessageTable).all()
expect(messages.length).toBe(1)
expect(messages[0].id).toBe("msg_test789ghi")
expect(messages[0].session_id).toBe("ses_test456def")
expect(messages[0].data).not.toHaveProperty("id")
expect(messages[0].data).not.toHaveProperty("sessionID")
const parts = db.select().from(PartTable).all()
expect(parts.length).toBe(1)
expect(parts[0].id).toBe("prt_testabc123")
expect(parts[0].message_id).toBe("msg_test789ghi")
expect(parts[0].session_id).toBe("ses_test456def")
expect(parts[0].data).not.toHaveProperty("id")
expect(parts[0].data).not.toHaveProperty("messageID")
expect(parts[0].data).not.toHaveProperty("sessionID")
})
test("skips orphaned sessions (no parent project)", async () => {
await Bun.write(
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
JSON.stringify({
id: "ses_orphan",
projectID: "proj_nonexistent",
slug: "orphan",
directory: "/",
title: "Orphan",
version: "1.0.0",
time: { created: Date.now(), updated: Date.now() },
}),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.sessions).toBe(0)
})
test("is idempotent (running twice doesn't duplicate)", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await JsonMigration.run(sqlite)
await JsonMigration.run(sqlite)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1) // Still only 1 due to onConflictDoNothing
})
test("migrates todos", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
// Create todo file (named by sessionID, contains array of todos)
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{
id: "todo_1",
content: "First todo",
status: "pending",
priority: "high",
},
{
id: "todo_2",
content: "Second todo",
status: "completed",
priority: "medium",
},
]),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.todos).toBe(2)
const db = drizzle({ client: sqlite })
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
expect(todos.length).toBe(2)
expect(todos[0].content).toBe("First todo")
expect(todos[0].status).toBe("pending")
expect(todos[0].priority).toBe("high")
expect(todos[0].position).toBe(0)
expect(todos[1].content).toBe("Second todo")
expect(todos[1].position).toBe(1)
})
test("todos are ordered by position", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{ content: "Third", status: "pending", priority: "low" },
{ content: "First", status: "pending", priority: "high" },
{ content: "Second", status: "in_progress", priority: "medium" },
]),
)
await JsonMigration.run(sqlite)
const db = drizzle({ client: sqlite })
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
expect(todos.length).toBe(3)
expect(todos[0].content).toBe("Third")
expect(todos[0].position).toBe(0)
expect(todos[1].content).toBe("First")
expect(todos[1].position).toBe(1)
expect(todos[2].content).toBe("Second")
expect(todos[2].position).toBe(2)
})
test("migrates permissions", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
// Create permission file (named by projectID, contains array of rules)
const permissionData = [
{ permission: "file.read", pattern: "/test/file1.ts", action: "allow" as const },
{ permission: "file.write", pattern: "/test/file2.ts", action: "ask" as const },
{ permission: "command.run", pattern: "npm install", action: "deny" as const },
]
await Bun.write(path.join(storageDir, "permission", "proj_test123abc.json"), JSON.stringify(permissionData))
const stats = await JsonMigration.run(sqlite)
expect(stats?.permissions).toBe(1)
const db = drizzle({ client: sqlite })
const permissions = db.select().from(PermissionTable).all()
expect(permissions.length).toBe(1)
expect(permissions[0].project_id).toBe("proj_test123abc")
expect(permissions[0].data).toEqual(permissionData)
})
test("migrates session shares", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
// Create session share file (named by sessionID)
await Bun.write(
path.join(storageDir, "session_share", "ses_test456def.json"),
JSON.stringify({
id: "share_123",
secret: "supersecretkey",
url: "https://share.example.com/ses_test456def",
}),
)
const stats = await JsonMigration.run(sqlite)
expect(stats?.shares).toBe(1)
const db = drizzle({ client: sqlite })
const shares = db.select().from(SessionShareTable).all()
expect(shares.length).toBe(1)
expect(shares[0].session_id).toBe("ses_test456def")
expect(shares[0].id).toBe("share_123")
expect(shares[0].secret).toBe("supersecretkey")
expect(shares[0].url).toBe("https://share.example.com/ses_test456def")
})
test("returns empty stats when storage directory does not exist", async () => {
await fs.rm(storageDir, { recursive: true, force: true })
const stats = await JsonMigration.run(sqlite)
expect(stats.projects).toBe(0)
expect(stats.sessions).toBe(0)
expect(stats.messages).toBe(0)
expect(stats.parts).toBe(0)
expect(stats.todos).toBe(0)
expect(stats.permissions).toBe(0)
expect(stats.shares).toBe(0)
expect(stats.errors).toEqual([])
})
test("continues when a JSON file is unreadable and records an error", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await Bun.write(path.join(storageDir, "project", "broken.json"), "{ invalid json")
const stats = await JsonMigration.run(sqlite)
expect(stats.projects).toBe(1)
expect(stats.errors.some((x) => x.includes("failed to read") && x.includes("broken.json"))).toBe(true)
const db = drizzle({ client: sqlite })
const projects = db.select().from(ProjectTable).all()
expect(projects.length).toBe(1)
expect(projects[0].id).toBe("proj_test123abc")
})
test("skips invalid todo entries while preserving source positions", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{ content: "keep-0", status: "pending", priority: "high" },
{ content: "drop-1", priority: "low" },
{ content: "keep-2", status: "completed", priority: "medium" },
]),
)
const stats = await JsonMigration.run(sqlite)
expect(stats.todos).toBe(2)
const db = drizzle({ client: sqlite })
const todos = db.select().from(TodoTable).orderBy(TodoTable.position).all()
expect(todos.length).toBe(2)
expect(todos[0].content).toBe("keep-0")
expect(todos[0].position).toBe(0)
expect(todos[1].content).toBe("keep-2")
expect(todos[1].position).toBe(2)
})
test("skips orphaned todos, permissions, and shares", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/",
time: { created: Date.now(), updated: Date.now() },
sandboxes: [],
})
await writeSession(storageDir, "proj_test123abc", { ...fixtures.session })
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([{ content: "valid", status: "pending", priority: "high" }]),
)
await Bun.write(
path.join(storageDir, "todo", "ses_missing.json"),
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
)
await Bun.write(
path.join(storageDir, "permission", "proj_test123abc.json"),
JSON.stringify([{ permission: "file.read" }]),
)
await Bun.write(
path.join(storageDir, "permission", "proj_missing.json"),
JSON.stringify([{ permission: "file.write" }]),
)
await Bun.write(
path.join(storageDir, "session_share", "ses_test456def.json"),
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
)
await Bun.write(
path.join(storageDir, "session_share", "ses_missing.json"),
JSON.stringify({ id: "share_missing", secret: "secret", url: "https://missing.example.com" }),
)
const stats = await JsonMigration.run(sqlite)
expect(stats.todos).toBe(1)
expect(stats.permissions).toBe(1)
expect(stats.shares).toBe(1)
const db = drizzle({ client: sqlite })
expect(db.select().from(TodoTable).all().length).toBe(1)
expect(db.select().from(PermissionTable).all().length).toBe(1)
expect(db.select().from(SessionShareTable).all().length).toBe(1)
})
test("handles mixed corruption and partial validity in one migration run", async () => {
await writeProject(storageDir, {
id: "proj_test123abc",
worktree: "/ok",
time: { created: 1700000000000, updated: 1700000001000 },
sandboxes: [],
})
await Bun.write(
path.join(storageDir, "project", "proj_missing_id.json"),
JSON.stringify({ worktree: "/bad", sandboxes: [] }),
)
await Bun.write(path.join(storageDir, "project", "proj_broken.json"), "{ nope")
await writeSession(storageDir, "proj_test123abc", {
id: "ses_test456def",
projectID: "proj_test123abc",
slug: "ok",
directory: "/ok",
title: "Ok",
version: "1",
time: { created: 1700000000000, updated: 1700000001000 },
})
await Bun.write(
path.join(storageDir, "session", "proj_test123abc", "ses_missing_project.json"),
JSON.stringify({
id: "ses_missing_project",
slug: "bad",
directory: "/bad",
title: "Bad",
version: "1",
}),
)
await Bun.write(
path.join(storageDir, "session", "proj_test123abc", "ses_orphan.json"),
JSON.stringify({
id: "ses_orphan",
projectID: "proj_missing",
slug: "orphan",
directory: "/bad",
title: "Orphan",
version: "1",
}),
)
await Bun.write(
path.join(storageDir, "message", "ses_test456def", "msg_ok.json"),
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
)
await Bun.write(path.join(storageDir, "message", "ses_test456def", "msg_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "message", "ses_missing", "msg_orphan.json"),
JSON.stringify({ role: "user", time: { created: 1700000000000 } }),
)
await Bun.write(
path.join(storageDir, "part", "msg_ok", "part_ok.json"),
JSON.stringify({ type: "text", text: "ok" }),
)
await Bun.write(
path.join(storageDir, "part", "msg_missing", "part_missing_message.json"),
JSON.stringify({ type: "text", text: "bad" }),
)
await Bun.write(path.join(storageDir, "part", "msg_ok", "part_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "todo", "ses_test456def.json"),
JSON.stringify([
{ content: "ok", status: "pending", priority: "high" },
{ content: "skip", status: "pending" },
]),
)
await Bun.write(
path.join(storageDir, "todo", "ses_missing.json"),
JSON.stringify([{ content: "orphan", status: "pending", priority: "high" }]),
)
await Bun.write(path.join(storageDir, "todo", "ses_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "permission", "proj_test123abc.json"),
JSON.stringify([{ permission: "file.read" }]),
)
await Bun.write(
path.join(storageDir, "permission", "proj_missing.json"),
JSON.stringify([{ permission: "file.write" }]),
)
await Bun.write(path.join(storageDir, "permission", "proj_broken.json"), "{ nope")
await Bun.write(
path.join(storageDir, "session_share", "ses_test456def.json"),
JSON.stringify({ id: "share_ok", secret: "secret", url: "https://ok.example.com" }),
)
await Bun.write(
path.join(storageDir, "session_share", "ses_missing.json"),
JSON.stringify({ id: "share_orphan", secret: "secret", url: "https://missing.example.com" }),
)
await Bun.write(path.join(storageDir, "session_share", "ses_broken.json"), "{ nope")
const stats = await JsonMigration.run(sqlite)
expect(stats.projects).toBe(1)
expect(stats.sessions).toBe(1)
expect(stats.messages).toBe(1)
expect(stats.parts).toBe(1)
expect(stats.todos).toBe(1)
expect(stats.permissions).toBe(1)
expect(stats.shares).toBe(1)
expect(stats.errors.length).toBeGreaterThanOrEqual(6)
const db = drizzle({ client: sqlite })
expect(db.select().from(ProjectTable).all().length).toBe(1)
expect(db.select().from(SessionTable).all().length).toBe(1)
expect(db.select().from(MessageTable).all().length).toBe(1)
expect(db.select().from(PartTable).all().length).toBe(1)
expect(db.select().from(TodoTable).all().length).toBe(1)
expect(db.select().from(PermissionTable).all().length).toBe(1)
expect(db.select().from(SessionShareTable).all().length).toBe(1)
})
})

View File

View File

@ -525,7 +525,17 @@ export type EventMessagePartUpdated = {
type: "message.part.updated" type: "message.part.updated"
properties: { properties: {
part: Part part: Part
delta?: string }
}
export type EventMessagePartDelta = {
type: "message.part.delta"
properties: {
sessionID: string
messageID: string
partID: string
field: string
delta: string
} }
} }
@ -695,10 +705,6 @@ export type Todo = {
* Priority level of the task: high, medium, low * Priority level of the task: high, medium, low
*/ */
priority: string priority: string
/**
* Unique identifier for the todo item
*/
id: string
} }
export type EventTodoUpdated = { export type EventTodoUpdated = {
@ -948,6 +954,7 @@ export type Event =
| EventMessageUpdated | EventMessageUpdated
| EventMessageRemoved | EventMessageRemoved
| EventMessagePartUpdated | EventMessagePartUpdated
| EventMessagePartDelta
| EventMessagePartRemoved | EventMessagePartRemoved
| EventPermissionAsked | EventPermissionAsked
| EventPermissionReplied | EventPermissionReplied

View File

@ -7338,12 +7338,40 @@
"properties": { "properties": {
"part": { "part": {
"$ref": "#/components/schemas/Part" "$ref": "#/components/schemas/Part"
}
},
"required": ["part"]
}
},
"required": ["type", "properties"]
},
"Event.message.part.delta": {
"type": "object",
"properties": {
"type": {
"type": "string",
"const": "message.part.delta"
},
"properties": {
"type": "object",
"properties": {
"sessionID": {
"type": "string"
},
"messageID": {
"type": "string"
},
"partID": {
"type": "string"
},
"field": {
"type": "string"
}, },
"delta": { "delta": {
"type": "string" "type": "string"
} }
}, },
"required": ["part"] "required": ["sessionID", "messageID", "partID", "field", "delta"]
} }
}, },
"required": ["type", "properties"] "required": ["type", "properties"]
@ -7757,13 +7785,9 @@
"priority": { "priority": {
"description": "Priority level of the task: high, medium, low", "description": "Priority level of the task: high, medium, low",
"type": "string" "type": "string"
},
"id": {
"description": "Unique identifier for the todo item",
"type": "string"
} }
}, },
"required": ["content", "status", "priority", "id"] "required": ["content", "status", "priority"]
}, },
"Event.todo.updated": { "Event.todo.updated": {
"type": "object", "type": "object",
@ -8434,6 +8458,9 @@
{ {
"$ref": "#/components/schemas/Event.message.part.updated" "$ref": "#/components/schemas/Event.message.part.updated"
}, },
{
"$ref": "#/components/schemas/Event.message.part.delta"
},
{ {
"$ref": "#/components/schemas/Event.message.part.removed" "$ref": "#/components/schemas/Event.message.part.removed"
}, },