feat(core): initial implementation of syncing (#17814)

pull/19142/head
James Long 2026-03-25 10:47:40 -04:00 committed by GitHub
parent 0c0c6f3bdb
commit b0017bf1b9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 4403 additions and 1760 deletions

View File

@ -0,0 +1,13 @@
CREATE TABLE `event_sequence` (
`aggregate_id` text PRIMARY KEY,
`seq` integer NOT NULL
);
--> statement-breakpoint
CREATE TABLE `event` (
`id` text PRIMARY KEY,
`aggregate_id` text NOT NULL,
`seq` integer NOT NULL,
`type` text NOT NULL,
`data` text NOT NULL,
CONSTRAINT `fk_event_aggregate_id_event_sequence_aggregate_id_fk` FOREIGN KEY (`aggregate_id`) REFERENCES `event_sequence`(`aggregate_id`) ON DELETE CASCADE
);

File diff suppressed because it is too large Load Diff

View File

@ -8,6 +8,7 @@ import { AccessToken, AccountID, AccountRepoError, Info, OrgID, RefreshToken } f
export type AccountRow = (typeof AccountTable)["$inferSelect"] export type AccountRow = (typeof AccountTable)["$inferSelect"]
type DbClient = Parameters<typeof Database.use>[0] extends (db: infer T) => unknown ? T : never type DbClient = Parameters<typeof Database.use>[0] extends (db: infer T) => unknown ? T : never
type DbTransactionCallback<A> = Parameters<typeof Database.transaction<A>>[0]
const ACCOUNT_STATE_ID = 1 const ACCOUNT_STATE_ID = 1
@ -42,13 +43,13 @@ export class AccountRepo extends ServiceMap.Service<AccountRepo, AccountRepo.Ser
Effect.gen(function* () { Effect.gen(function* () {
const decode = Schema.decodeUnknownSync(Info) const decode = Schema.decodeUnknownSync(Info)
const query = <A>(f: (db: DbClient) => A) => const query = <A>(f: DbTransactionCallback<A>) =>
Effect.try({ Effect.try({
try: () => Database.use(f), try: () => Database.use(f),
catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }),
}) })
const tx = <A>(f: (db: DbClient) => A) => const tx = <A>(f: DbTransactionCallback<A>) =>
Effect.try({ Effect.try({
try: () => Database.transaction(f), try: () => Database.transaction(f),
catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }), catch: (cause) => new AccountRepoError({ message: "Database operation failed", cause }),

View File

@ -1,10 +1,7 @@
import z from "zod" import z from "zod"
import type { ZodType } from "zod" import type { ZodType } from "zod"
import { Log } from "../util/log"
export namespace BusEvent { export namespace BusEvent {
const log = Log.create({ service: "event" })
export type Definition = ReturnType<typeof define> export type Definition = ReturnType<typeof define>
const registry = new Map<string, Definition>() const registry = new Map<string, Definition>()

View File

@ -890,7 +890,7 @@ export const GithubRunCommand = cmd({
} }
let text = "" let text = ""
Bus.subscribe(MessageV2.Event.PartUpdated, async (evt) => { Bus.subscribe(MessageV2.Event.PartUpdated, (evt) => {
if (evt.properties.part.sessionID !== session.id) return if (evt.properties.part.sessionID !== session.id) return
//if (evt.properties.part.messageID === messageID) return //if (evt.properties.part.messageID === messageID) return
const part = evt.properties.part const part = evt.properties.part

View File

@ -710,7 +710,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
}) })
}) })
sdk.event.on(SessionApi.Event.Deleted.type, (evt) => { sdk.event.on("session.deleted", (evt) => {
if (route.data.type === "session" && route.data.sessionID === evt.properties.info.id) { if (route.data.type === "session" && route.data.sessionID === evt.properties.info.id) {
route.navigate({ type: "home" }) route.navigate({ type: "home" })
toast.show({ toast.show({
@ -720,7 +720,7 @@ function App(props: { onSnapshot?: () => Promise<string[]> }) {
} }
}) })
sdk.event.on(SessionApi.Event.Error.type, (evt) => { sdk.event.on("session.error", (evt) => {
const error = evt.properties.error const error = evt.properties.error
if (error && typeof error === "object" && error.name === "MessageAbortedError") return if (error && typeof error === "object" && error.name === "MessageAbortedError") return
const message = (() => { const message = (() => {

View File

@ -3,6 +3,7 @@ import { randomBytes } from "crypto"
export namespace Identifier { export namespace Identifier {
const prefixes = { const prefixes = {
event: "evt",
session: "ses", session: "ses",
message: "msg", message: "msg",
permission: "per", permission: "per",

View File

@ -3,7 +3,6 @@ import { Config } from "../config/config"
import { Bus } from "../bus" import { Bus } from "../bus"
import { Log } from "../util/log" import { Log } from "../util/log"
import { createOpencodeClient } from "@opencode-ai/sdk" import { createOpencodeClient } from "@opencode-ai/sdk"
import { Server } from "../server/server"
import { BunProc } from "../bun" import { BunProc } from "../bun"
import { Flag } from "../flag/flag" import { Flag } from "../flag/flag"
import { CodexAuthPlugin } from "./codex" import { CodexAuthPlugin } from "./codex"
@ -58,6 +57,8 @@ export namespace Plugin {
const hooks: Hooks[] = [] const hooks: Hooks[] = []
yield* Effect.promise(async () => { yield* Effect.promise(async () => {
const { Server } = await import("../server/server")
const client = createOpencodeClient({ const client = createOpencodeClient({
baseUrl: "http://localhost:4096", baseUrl: "http://localhost:4096",
directory: ctx.directory, directory: ctx.directory,

View File

@ -0,0 +1,28 @@
import z from "zod"
import sessionProjectors from "../session/projectors"
import { SyncEvent } from "@/sync"
import { Session } from "@/session"
import { SessionTable } from "@/session/session.sql"
import { Database, eq } from "@/storage/db"
export function initProjectors() {
SyncEvent.init({
projectors: sessionProjectors,
convertEvent: (type, data) => {
if (type === "session.updated") {
const id = (data as z.infer<typeof Session.Event.Updated.schema>).sessionID
const row = Database.use((db) => db.select().from(SessionTable).where(eq(SessionTable.id, id)).get())
if (!row) return data
return {
sessionID: id,
info: Session.fromRow(row),
}
}
return data
},
})
}
initProjectors()

View File

@ -6,7 +6,6 @@ import { BusEvent } from "@/bus/bus-event"
import { Bus } from "@/bus" import { Bus } from "@/bus"
import { lazy } from "../../util/lazy" import { lazy } from "../../util/lazy"
import { AsyncQueue } from "../../util/queue" import { AsyncQueue } from "../../util/queue"
import { Instance } from "@/project/instance"
const log = Log.create({ service: "server" }) const log = Log.create({ service: "server" })
@ -53,13 +52,6 @@ export const EventRoutes = lazy(() =>
) )
}, 10_000) }, 10_000)
const unsub = Bus.subscribeAll((event) => {
q.push(JSON.stringify(event))
if (event.type === Bus.InstanceDisposed.type) {
stop()
}
})
const stop = () => { const stop = () => {
if (done) return if (done) return
done = true done = true
@ -69,6 +61,13 @@ export const EventRoutes = lazy(() =>
log.info("event disconnected") log.info("event disconnected")
} }
const unsub = Bus.subscribeAll((event) => {
q.push(JSON.stringify(event))
if (event.type === Bus.InstanceDisposed.type) {
stop()
}
})
stream.onAbort(stop) stream.onAbort(stop)
try { try {

View File

@ -1,9 +1,9 @@
import { Hono } from "hono" import { Hono, type Context } from "hono"
import { describeRoute, validator, resolver } from "hono-openapi" import { describeRoute, resolver, validator } from "hono-openapi"
import { streamSSE } from "hono/streaming" import { streamSSE } from "hono/streaming"
import z from "zod" import z from "zod"
import { Bus } from "../../bus"
import { BusEvent } from "@/bus/bus-event" import { BusEvent } from "@/bus/bus-event"
import { SyncEvent } from "@/sync"
import { GlobalBus } from "@/bus/global" import { GlobalBus } from "@/bus/global"
import { AsyncQueue } from "@/util/queue" import { AsyncQueue } from "@/util/queue"
import { Instance } from "../../project/instance" import { Instance } from "../../project/instance"
@ -17,6 +17,56 @@ const log = Log.create({ service: "server" })
export const GlobalDisposedEvent = BusEvent.define("global.disposed", z.object({})) export const GlobalDisposedEvent = BusEvent.define("global.disposed", z.object({}))
async function streamEvents(c: Context, subscribe: (q: AsyncQueue<string | null>) => () => void) {
return streamSSE(c, async (stream) => {
const q = new AsyncQueue<string | null>()
let done = false
q.push(
JSON.stringify({
payload: {
type: "server.connected",
properties: {},
},
}),
)
// Send heartbeat every 10s to prevent stalled proxy streams.
const heartbeat = setInterval(() => {
q.push(
JSON.stringify({
payload: {
type: "server.heartbeat",
properties: {},
},
}),
)
}, 10_000)
const stop = () => {
if (done) return
done = true
clearInterval(heartbeat)
unsub()
q.push(null)
log.info("global event disconnected")
}
const unsub = subscribe(q)
stream.onAbort(stop)
try {
for await (const data of q) {
if (data === null) return
await stream.writeSSE({ data })
}
} finally {
stop()
}
})
}
export const GlobalRoutes = lazy(() => export const GlobalRoutes = lazy(() =>
new Hono() new Hono()
.get( .get(
@ -70,55 +120,58 @@ export const GlobalRoutes = lazy(() =>
log.info("global event connected") log.info("global event connected")
c.header("X-Accel-Buffering", "no") c.header("X-Accel-Buffering", "no")
c.header("X-Content-Type-Options", "nosniff") c.header("X-Content-Type-Options", "nosniff")
return streamSSE(c, async (stream) => {
const q = new AsyncQueue<string | null>()
let done = false
q.push(
JSON.stringify({
payload: {
type: "server.connected",
properties: {},
},
}),
)
// Send heartbeat every 10s to prevent stalled proxy streams.
const heartbeat = setInterval(() => {
q.push(
JSON.stringify({
payload: {
type: "server.heartbeat",
properties: {},
},
}),
)
}, 10_000)
return streamEvents(c, (q) => {
async function handler(event: any) { async function handler(event: any) {
q.push(JSON.stringify(event)) q.push(JSON.stringify(event))
} }
GlobalBus.on("event", handler) GlobalBus.on("event", handler)
return () => GlobalBus.off("event", handler)
const stop = () => { })
if (done) return },
done = true )
clearInterval(heartbeat) .get(
GlobalBus.off("event", handler) "/sync-event",
q.push(null) describeRoute({
log.info("event disconnected") summary: "Subscribe to global sync events",
} description: "Get global sync events",
operationId: "global.sync-event.subscribe",
stream.onAbort(stop) responses: {
200: {
try { description: "Event stream",
for await (const data of q) { content: {
if (data === null) return "text/event-stream": {
await stream.writeSSE({ data }) schema: resolver(
} z
} finally { .object({
stop() payload: SyncEvent.payloads(),
} })
.meta({
ref: "SyncEvent",
}),
),
},
},
},
},
}),
async (c) => {
log.info("global sync event connected")
c.header("X-Accel-Buffering", "no")
c.header("X-Content-Type-Options", "nosniff")
return streamEvents(c, (q) => {
return SyncEvent.subscribeAll(({ def, event }) => {
// TODO: don't pass def, just pass the type (and it should
// be versioned)
q.push(
JSON.stringify({
payload: {
...event,
type: SyncEvent.versionedType(def.type, def.version),
},
}),
)
})
}) })
}, },
) )

View File

@ -281,14 +281,14 @@ export const SessionRoutes = lazy(() =>
const sessionID = c.req.valid("param").sessionID const sessionID = c.req.valid("param").sessionID
const updates = c.req.valid("json") const updates = c.req.valid("json")
let session = await Session.get(sessionID)
if (updates.title !== undefined) { if (updates.title !== undefined) {
session = await Session.setTitle({ sessionID, title: updates.title }) await Session.setTitle({ sessionID, title: updates.title })
} }
if (updates.time?.archived !== undefined) { if (updates.time?.archived !== undefined) {
session = await Session.setArchived({ sessionID, time: updates.time.archived }) await Session.setArchived({ sessionID, time: updates.time.archived })
} }
const session = await Session.get(sessionID)
return c.json(session) return c.json(session)
}, },
) )

View File

@ -44,6 +44,7 @@ import { PermissionRoutes } from "./routes/permission"
import { GlobalRoutes } from "./routes/global" import { GlobalRoutes } from "./routes/global"
import { MDNS } from "./mdns" import { MDNS } from "./mdns"
import { lazy } from "@/util/lazy" import { lazy } from "@/util/lazy"
import { initProjectors } from "./projectors"
// @ts-ignore This global is needed to prevent ai-sdk from logging warnings to stdout https://github.com/vercel/ai/blob/2dc67e0ef538307f21368db32d5a12345d98831b/packages/ai/src/logger/log-warnings.ts#L85 // @ts-ignore This global is needed to prevent ai-sdk from logging warnings to stdout https://github.com/vercel/ai/blob/2dc67e0ef538307f21368db32d5a12345d98831b/packages/ai/src/logger/log-warnings.ts#L85
globalThis.AI_SDK_LOG_WARNINGS = false globalThis.AI_SDK_LOG_WARNINGS = false
@ -51,6 +52,8 @@ globalThis.AI_SDK_LOG_WARNINGS = false
const csp = (hash = "") => const csp = (hash = "") =>
`default-src 'self'; script-src 'self' 'wasm-unsafe-eval'${hash ? ` 'sha256-${hash}'` : ""}; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:` `default-src 'self'; script-src 'self' 'wasm-unsafe-eval'${hash ? ` 'sha256-${hash}'` : ""}; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:`
initProjectors()
export namespace Server { export namespace Server {
const log = Log.create({ service: "server" }) const log = Log.create({ service: "server" })

View File

@ -9,12 +9,14 @@ import { Config } from "../config/config"
import { Flag } from "../flag/flag" import { Flag } from "../flag/flag"
import { Installation } from "../installation" import { Installation } from "../installation"
import { Database, NotFoundError, eq, and, or, gte, isNull, desc, like, inArray, lt } from "../storage/db" import { Database, NotFoundError, eq, and, gte, isNull, desc, like, inArray, lt } from "../storage/db"
import { SyncEvent } from "../sync"
import type { SQL } from "../storage/db" import type { SQL } from "../storage/db"
import { SessionTable, MessageTable, PartTable } from "./session.sql" import { SessionTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql" import { ProjectTable } from "../project/project.sql"
import { Storage } from "@/storage/storage" import { Storage } from "@/storage/storage"
import { Log } from "../util/log" import { Log } from "../util/log"
import { updateSchema } from "../util/update-schema"
import { MessageV2 } from "./message-v2" import { MessageV2 } from "./message-v2"
import { Instance } from "../project/instance" import { Instance } from "../project/instance"
import { SessionPrompt } from "./prompt" import { SessionPrompt } from "./prompt"
@ -182,24 +184,40 @@ export namespace Session {
export type GlobalInfo = z.output<typeof GlobalInfo> export type GlobalInfo = z.output<typeof GlobalInfo>
export const Event = { export const Event = {
Created: BusEvent.define( Created: SyncEvent.define({
"session.created", type: "session.created",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
info: Info, info: Info,
}), }),
), }),
Updated: BusEvent.define( Updated: SyncEvent.define({
"session.updated", type: "session.updated",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
info: updateSchema(Info).extend({
share: updateSchema(Info.shape.share.unwrap()).optional(),
time: updateSchema(Info.shape.time).optional(),
}),
}),
busSchema: z.object({
sessionID: SessionID.zod,
info: Info, info: Info,
}), }),
), }),
Deleted: BusEvent.define( Deleted: SyncEvent.define({
"session.deleted", type: "session.deleted",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
info: Info, info: Info,
}), }),
), }),
Diff: BusEvent.define( Diff: BusEvent.define(
"session.diff", "session.diff",
z.object({ z.object({
@ -280,18 +298,8 @@ export namespace Session {
) )
export const touch = fn(SessionID.zod, async (sessionID) => { export const touch = fn(SessionID.zod, async (sessionID) => {
const now = Date.now() const time = Date.now()
Database.use((db) => { SyncEvent.run(Event.Updated, { sessionID, info: { time: { updated: time } } })
const row = db
.update(SessionTable)
.set({ time_updated: now })
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
}) })
export async function createNext(input: { export async function createNext(input: {
@ -318,22 +326,25 @@ export namespace Session {
}, },
} }
log.info("created", result) log.info("created", result)
Database.use((db) => {
db.insert(SessionTable).values(toRow(result)).run() SyncEvent.run(Event.Created, { sessionID: result.id, info: result })
Database.effect(() =>
Bus.publish(Event.Created, {
info: result,
}),
)
})
const cfg = await Config.get() const cfg = await Config.get()
if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto")) if (!result.parentID && (Flag.OPENCODE_AUTO_SHARE || cfg.share === "auto")) {
share(result.id).catch(() => { share(result.id).catch(() => {
// Silently ignore sharing errors during session creation // Silently ignore sharing errors during session creation
}) })
Bus.publish(Event.Updated, { }
info: result,
}) if (!Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) {
// This only exist for backwards compatibility. We should not be
// manually publishing this event; it is a sync event now
Bus.publish(Event.Updated, {
sessionID: result.id,
info: result,
})
}
return result return result
} }
@ -357,12 +368,9 @@ export namespace Session {
} }
const { ShareNext } = await import("@/share/share-next") const { ShareNext } = await import("@/share/share-next")
const share = await ShareNext.create(id) const share = await ShareNext.create(id)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: share.url }).where(eq(SessionTable.id, id)).returning().get() SyncEvent.run(Event.Updated, { sessionID: id, info: { share: { url: share.url } } })
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
return share return share
}) })
@ -370,12 +378,8 @@ export namespace Session {
// Use ShareNext to remove the share (same as share function uses ShareNext to create) // Use ShareNext to remove the share (same as share function uses ShareNext to create)
const { ShareNext } = await import("@/share/share-next") const { ShareNext } = await import("@/share/share-next")
await ShareNext.remove(id) await ShareNext.remove(id)
Database.use((db) => {
const row = db.update(SessionTable).set({ share_url: null }).where(eq(SessionTable.id, id)).returning().get() SyncEvent.run(Event.Updated, { sessionID: id, info: { share: { url: null } } })
if (!row) throw new NotFoundError({ message: `Session not found: ${id}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
})
}) })
export const setTitle = fn( export const setTitle = fn(
@ -384,18 +388,7 @@ export namespace Session {
title: z.string(), title: z.string(),
}), }),
async (input) => { async (input) => {
return Database.use((db) => { SyncEvent.run(Event.Updated, { sessionID: input.sessionID, info: { title: input.title } })
const row = db
.update(SessionTable)
.set({ title: input.title })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
}, },
) )
@ -405,18 +398,7 @@ export namespace Session {
time: z.number().optional(), time: z.number().optional(),
}), }),
async (input) => { async (input) => {
return Database.use((db) => { SyncEvent.run(Event.Updated, { sessionID: input.sessionID, info: { time: { archived: input.time } } })
const row = db
.update(SessionTable)
.set({ time_archived: input.time })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
})
}, },
) )
@ -426,17 +408,9 @@ export namespace Session {
permission: Permission.Ruleset, permission: Permission.Ruleset,
}), }),
async (input) => { async (input) => {
return Database.use((db) => { SyncEvent.run(Event.Updated, {
const row = db sessionID: input.sessionID,
.update(SessionTable) info: { permission: input.permission, time: { updated: Date.now() } },
.set({ permission: input.permission, time_updated: Date.now() })
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
}) })
}, },
) )
@ -448,42 +422,24 @@ export namespace Session {
summary: Info.shape.summary, summary: Info.shape.summary,
}), }),
async (input) => { async (input) => {
return Database.use((db) => { SyncEvent.run(Event.Updated, {
const row = db sessionID: input.sessionID,
.update(SessionTable) info: {
.set({ summary: input.summary,
revert: input.revert ?? null, time: { updated: Date.now() },
summary_additions: input.summary?.additions, revert: input.revert,
summary_deletions: input.summary?.deletions, },
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
}) })
}, },
) )
export const clearRevert = fn(SessionID.zod, async (sessionID) => { export const clearRevert = fn(SessionID.zod, async (sessionID) => {
return Database.use((db) => { SyncEvent.run(Event.Updated, {
const row = db sessionID,
.update(SessionTable) info: {
.set({ time: { updated: Date.now() },
revert: null, revert: null,
time_updated: Date.now(), },
})
.where(eq(SessionTable.id, sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
}) })
}) })
@ -493,22 +449,12 @@ export namespace Session {
summary: Info.shape.summary, summary: Info.shape.summary,
}), }),
async (input) => { async (input) => {
return Database.use((db) => { SyncEvent.run(Event.Updated, {
const row = db sessionID: input.sessionID,
.update(SessionTable) info: {
.set({ time: { updated: Date.now() },
summary_additions: input.summary?.additions, summary: input.summary,
summary_deletions: input.summary?.deletions, },
summary_files: input.summary?.files,
time_updated: Date.now(),
})
.where(eq(SessionTable.id, input.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${input.sessionID}` })
const info = fromRow(row)
Database.effect(() => Bus.publish(Event.Updated, { info }))
return info
}) })
}, },
) )
@ -662,46 +608,28 @@ export namespace Session {
}) })
export const remove = fn(SessionID.zod, async (sessionID) => { export const remove = fn(SessionID.zod, async (sessionID) => {
const project = Instance.project
try { try {
const session = await get(sessionID) const session = await get(sessionID)
for (const child of await children(sessionID)) { for (const child of await children(sessionID)) {
await remove(child.id) await remove(child.id)
} }
await unshare(sessionID).catch(() => {}) await unshare(sessionID).catch(() => {})
// CASCADE delete handles messages and parts automatically
Database.use((db) => { SyncEvent.run(Event.Deleted, { sessionID, info: session })
db.delete(SessionTable).where(eq(SessionTable.id, sessionID)).run()
Database.effect(() => // Eagerly remove event sourcing data to free up space
Bus.publish(Event.Deleted, { SyncEvent.remove(sessionID)
info: session,
}),
)
})
} catch (e) { } catch (e) {
log.error(e) log.error(e)
} }
}) })
export const updateMessage = fn(MessageV2.Info, async (msg) => { export const updateMessage = fn(MessageV2.Info, async (msg) => {
const time_created = msg.time.created SyncEvent.run(MessageV2.Event.Updated, {
const { id, sessionID, ...data } = msg sessionID: msg.sessionID,
Database.use((db) => { info: msg,
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Updated, {
info: msg,
}),
)
}) })
return msg return msg
}) })
@ -711,17 +639,9 @@ export namespace Session {
messageID: MessageID.zod, messageID: MessageID.zod,
}), }),
async (input) => { async (input) => {
// CASCADE delete handles parts automatically SyncEvent.run(MessageV2.Event.Removed, {
Database.use((db) => { sessionID: input.sessionID,
db.delete(MessageTable) messageID: input.messageID,
.where(and(eq(MessageTable.id, input.messageID), eq(MessageTable.session_id, input.sessionID)))
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.Removed, {
sessionID: input.sessionID,
messageID: input.messageID,
}),
)
}) })
return input.messageID return input.messageID
}, },
@ -734,17 +654,10 @@ export namespace Session {
partID: PartID.zod, partID: PartID.zod,
}), }),
async (input) => { async (input) => {
Database.use((db) => { SyncEvent.run(MessageV2.Event.PartRemoved, {
db.delete(PartTable) sessionID: input.sessionID,
.where(and(eq(PartTable.id, input.partID), eq(PartTable.session_id, input.sessionID))) messageID: input.messageID,
.run() partID: input.partID,
Database.effect(() =>
Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: input.sessionID,
messageID: input.messageID,
partID: input.partID,
}),
)
}) })
return input.partID return input.partID
}, },
@ -753,24 +666,10 @@ export namespace Session {
const UpdatePartInput = MessageV2.Part const UpdatePartInput = MessageV2.Part
export const updatePart = fn(UpdatePartInput, async (part) => { export const updatePart = fn(UpdatePartInput, async (part) => {
const { id, messageID, sessionID, ...data } = part SyncEvent.run(MessageV2.Event.PartUpdated, {
const time = Date.now() sessionID: part.sessionID,
Database.use((db) => { part: structuredClone(part),
db.insert(PartTable) time: Date.now(),
.values({
id,
message_id: messageID,
session_id: sessionID,
time_created: time,
data,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data } })
.run()
Database.effect(() =>
Bus.publish(MessageV2.Event.PartUpdated, {
part: structuredClone(part),
}),
)
}) })
return part return part
}) })

View File

@ -6,11 +6,9 @@ import { APICallError, convertToModelMessages, LoadAPIKeyError, type ModelMessag
import { LSP } from "../lsp" import { LSP } from "../lsp"
import { Snapshot } from "@/snapshot" import { Snapshot } from "@/snapshot"
import { fn } from "@/util/fn" import { fn } from "@/util/fn"
import { SyncEvent } from "../sync"
import { Database, NotFoundError, and, desc, eq, inArray, lt, or } from "@/storage/db" import { Database, NotFoundError, and, desc, eq, inArray, lt, or } from "@/storage/db"
import { MessageTable, PartTable, SessionTable } from "./session.sql" import { MessageTable, PartTable, SessionTable } from "./session.sql"
import { ProviderTransform } from "@/provider/transform"
import { STATUS_CODES } from "http"
import { Storage } from "@/storage/storage"
import { ProviderError } from "@/provider/error" import { ProviderError } from "@/provider/error"
import { iife } from "@/util/iife" import { iife } from "@/util/iife"
import type { SystemError } from "bun" import type { SystemError } from "bun"
@ -449,25 +447,34 @@ export namespace MessageV2 {
export type Info = z.infer<typeof Info> export type Info = z.infer<typeof Info>
export const Event = { export const Event = {
Updated: BusEvent.define( Updated: SyncEvent.define({
"message.updated", type: "message.updated",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
info: Info, info: Info,
}), }),
), }),
Removed: BusEvent.define( Removed: SyncEvent.define({
"message.removed", type: "message.removed",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod, sessionID: SessionID.zod,
messageID: MessageID.zod, messageID: MessageID.zod,
}), }),
), }),
PartUpdated: BusEvent.define( PartUpdated: SyncEvent.define({
"message.part.updated", type: "message.part.updated",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
part: Part, part: Part,
time: z.number(),
}), }),
), }),
PartDelta: BusEvent.define( PartDelta: BusEvent.define(
"message.part.delta", "message.part.delta",
z.object({ z.object({
@ -478,14 +485,16 @@ export namespace MessageV2 {
delta: z.string(), delta: z.string(),
}), }),
), ),
PartRemoved: BusEvent.define( PartRemoved: SyncEvent.define({
"message.part.removed", type: "message.part.removed",
z.object({ version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod, sessionID: SessionID.zod,
messageID: MessageID.zod, messageID: MessageID.zod,
partID: PartID.zod, partID: PartID.zod,
}), }),
), }),
} }
export const WithParts = z.object({ export const WithParts = z.object({

View File

@ -0,0 +1,116 @@
import { NotFoundError, eq, and } from "../storage/db"
import { SyncEvent } from "@/sync"
import { Session } from "./index"
import { MessageV2 } from "./message-v2"
import { SessionTable, MessageTable, PartTable } from "./session.sql"
import { ProjectTable } from "../project/project.sql"
export type DeepPartial<T> = T extends object ? { [K in keyof T]?: DeepPartial<T[K]> | null } : T
function grab<T extends object, K1 extends keyof T, X>(
obj: T,
field1: K1,
cb?: (val: NonNullable<T[K1]>) => X,
): X | undefined {
if (obj == undefined || !(field1 in obj)) return undefined
const val = obj[field1]
if (val && typeof val === "object" && cb) {
return cb(val)
}
if (val === undefined) {
throw new Error(
"Session update failure: pass `null` to clear a field instead of `undefined`: " + JSON.stringify(obj),
)
}
return val as X | undefined
}
export function toPartialRow(info: DeepPartial<Session.Info>) {
const obj = {
id: grab(info, "id"),
project_id: grab(info, "projectID"),
workspace_id: grab(info, "workspaceID"),
parent_id: grab(info, "parentID"),
slug: grab(info, "slug"),
directory: grab(info, "directory"),
title: grab(info, "title"),
version: grab(info, "version"),
share_url: grab(info, "share", (v) => grab(v, "url")),
summary_additions: grab(info, "summary", (v) => grab(v, "additions")),
summary_deletions: grab(info, "summary", (v) => grab(v, "deletions")),
summary_files: grab(info, "summary", (v) => grab(v, "files")),
summary_diffs: grab(info, "summary", (v) => grab(v, "diffs")),
revert: grab(info, "revert"),
permission: grab(info, "permission"),
time_created: grab(info, "time", (v) => grab(v, "created")),
time_updated: grab(info, "time", (v) => grab(v, "updated")),
time_compacting: grab(info, "time", (v) => grab(v, "compacting")),
time_archived: grab(info, "time", (v) => grab(v, "archived")),
}
return Object.fromEntries(Object.entries(obj).filter(([_, val]) => val !== undefined))
}
export default [
SyncEvent.project(Session.Event.Created, (db, data) => {
db.insert(SessionTable).values(Session.toRow(data.info)).run()
}),
SyncEvent.project(Session.Event.Updated, (db, data) => {
const info = data.info
const row = db
.update(SessionTable)
.set(toPartialRow(info))
.where(eq(SessionTable.id, data.sessionID))
.returning()
.get()
if (!row) throw new NotFoundError({ message: `Session not found: ${data.sessionID}` })
}),
SyncEvent.project(Session.Event.Deleted, (db, data) => {
db.delete(SessionTable).where(eq(SessionTable.id, data.sessionID)).run()
}),
SyncEvent.project(MessageV2.Event.Updated, (db, data) => {
const time_created = data.info.time.created
const { id, sessionID, ...rest } = data.info
db.insert(MessageTable)
.values({
id,
session_id: sessionID,
time_created,
data: rest,
})
.onConflictDoUpdate({ target: MessageTable.id, set: { data: rest } })
.run()
}),
SyncEvent.project(MessageV2.Event.Removed, (db, data) => {
db.delete(MessageTable)
.where(and(eq(MessageTable.id, data.messageID), eq(MessageTable.session_id, data.sessionID)))
.run()
}),
SyncEvent.project(MessageV2.Event.PartRemoved, (db, data) => {
db.delete(PartTable)
.where(and(eq(PartTable.id, data.partID), eq(PartTable.session_id, data.sessionID)))
.run()
}),
SyncEvent.project(MessageV2.Event.PartUpdated, (db, data) => {
const { id, messageID, sessionID, ...rest } = data.part
db.insert(PartTable)
.values({
id,
message_id: messageID,
session_id: sessionID,
time_created: data.time,
data: rest,
})
.onConflictDoUpdate({ target: PartTable.id, set: { data: rest } })
.run()
}),
]

View File

@ -4,8 +4,7 @@ import { Snapshot } from "../snapshot"
import { MessageV2 } from "./message-v2" import { MessageV2 } from "./message-v2"
import { Session } from "." import { Session } from "."
import { Log } from "../util/log" import { Log } from "../util/log"
import { Database, eq } from "../storage/db" import { SyncEvent } from "../sync"
import { MessageTable, PartTable } from "./session.sql"
import { Storage } from "@/storage/storage" import { Storage } from "@/storage/storage"
import { Bus } from "../bus" import { Bus } from "../bus"
import { SessionPrompt } from "./prompt" import { SessionPrompt } from "./prompt"
@ -113,8 +112,10 @@ export namespace SessionRevert {
remove.push(msg) remove.push(msg)
} }
for (const msg of remove) { for (const msg of remove) {
Database.use((db) => db.delete(MessageTable).where(eq(MessageTable.id, msg.info.id)).run()) SyncEvent.run(MessageV2.Event.Removed, {
await Bus.publish(MessageV2.Event.Removed, { sessionID: sessionID, messageID: msg.info.id }) sessionID: sessionID,
messageID: msg.info.id,
})
} }
if (session.revert.partID && target) { if (session.revert.partID && target) {
const partID = session.revert.partID const partID = session.revert.partID
@ -124,8 +125,7 @@ export namespace SessionRevert {
const removeParts = target.parts.slice(removeStart) const removeParts = target.parts.slice(removeStart)
target.parts = preserveParts target.parts = preserveParts
for (const part of removeParts) { for (const part of removeParts) {
Database.use((db) => db.delete(PartTable).where(eq(PartTable.id, part.id)).run()) SyncEvent.run(MessageV2.Event.PartRemoved, {
await Bus.publish(MessageV2.Event.PartRemoved, {
sessionID: sessionID, sessionID: sessionID,
messageID: target.info.id, messageID: target.info.id,
partID: part.id, partID: part.id,

View File

@ -66,29 +66,28 @@ export namespace ShareNext {
export async function init() { export async function init() {
if (disabled) return if (disabled) return
Bus.subscribe(Session.Event.Updated, async (evt) => { Bus.subscribe(Session.Event.Updated, async (evt) => {
await sync(evt.properties.info.id, [ const session = await Session.get(evt.properties.sessionID)
await sync(session.id, [
{ {
type: "session", type: "session",
data: evt.properties.info, data: session,
}, },
]) ])
}) })
Bus.subscribe(MessageV2.Event.Updated, async (evt) => { Bus.subscribe(MessageV2.Event.Updated, async (evt) => {
await sync(evt.properties.info.sessionID, [ const info = evt.properties.info
await sync(info.sessionID, [
{ {
type: "message", type: "message",
data: evt.properties.info, data: evt.properties.info,
}, },
]) ])
if (evt.properties.info.role === "user") { if (info.role === "user") {
await sync(evt.properties.info.sessionID, [ await sync(info.sessionID, [
{ {
type: "model", type: "model",
data: [ data: [await Provider.getModel(info.model.providerID, info.model.modelID).then((m) => m)],
await Provider.getModel(evt.properties.info.model.providerID, evt.properties.info.model.modelID).then(
(m) => m,
),
],
}, },
]) ])
} }

View File

@ -27,16 +27,20 @@ export const NotFoundError = NamedError.create(
const log = Log.create({ service: "db" }) const log = Log.create({ service: "db" })
export namespace Database { export namespace Database {
export const Path = iife(() => { export function getChannelPath() {
if (Flag.OPENCODE_DB) {
if (path.isAbsolute(Flag.OPENCODE_DB)) return Flag.OPENCODE_DB
return path.join(Global.Path.data, Flag.OPENCODE_DB)
}
const channel = Installation.CHANNEL const channel = Installation.CHANNEL
if (["latest", "beta"].includes(channel) || Flag.OPENCODE_DISABLE_CHANNEL_DB) if (["latest", "beta"].includes(channel) || Flag.OPENCODE_DISABLE_CHANNEL_DB)
return path.join(Global.Path.data, "opencode.db") return path.join(Global.Path.data, "opencode.db")
const safe = channel.replace(/[^a-zA-Z0-9._-]/g, "-") const safe = channel.replace(/[^a-zA-Z0-9._-]/g, "-")
return path.join(Global.Path.data, `opencode-${safe}.db`) return path.join(Global.Path.data, `opencode-${safe}.db`)
}
export const Path = iife(() => {
if (Flag.OPENCODE_DB) {
if (Flag.OPENCODE_DB === ":memory:" || path.isAbsolute(Flag.OPENCODE_DB)) return Flag.OPENCODE_DB
return path.join(Global.Path.data, Flag.OPENCODE_DB)
}
return getChannelPath()
}) })
export type Transaction = SQLiteTransaction<"sync", void> export type Transaction = SQLiteTransaction<"sync", void>
@ -145,17 +149,27 @@ export namespace Database {
} }
} }
export function transaction<T>(callback: (tx: TxOrDb) => T): T { type NotPromise<T> = T extends Promise<any> ? never : T
export function transaction<T>(
callback: (tx: TxOrDb) => NotPromise<T>,
options?: {
behavior?: "deferred" | "immediate" | "exclusive"
},
): NotPromise<T> {
try { try {
return callback(ctx.use().tx) return callback(ctx.use().tx)
} catch (err) { } catch (err) {
if (err instanceof Context.NotFound) { if (err instanceof Context.NotFound) {
const effects: (() => void | Promise<void>)[] = [] const effects: (() => void | Promise<void>)[] = []
const result = (Client().transaction as any)((tx: TxOrDb) => { const result = Client().transaction(
return ctx.provide({ tx, effects }, () => callback(tx)) (tx: TxOrDb) => {
}) return ctx.provide({ tx, effects }, () => callback(tx))
},
{ behavior: options?.behavior },
)
for (const effect of effects) effect() for (const effect of effects) effect()
return result return result as NotPromise<T>
} }
throw err throw err
} }

View File

@ -0,0 +1,181 @@
tl;dr All of these APIs work, are properly type-checked, and are sync events are backwards compatible with `Bus`:
```ts
// The schema from `Updated` typechecks the object correctly
SyncEvent.run(Updated, { sessionID: id, info: { title: "foo"} })
// `subscribeAll` passes a generic sync event
SyncEvent.subscribeAll(event => {
// These will be type-checked correctly
event.id
event.seq
// This will be unknown because we are listening for all events,
// and this API is only used to record them
event.data
})
// This works, but you shouldn't publish sync event like this (should fail in the future)
Bus.publish(Updated, { sessionID: id, info: { title: "foo"} })
// Update event is fully type-checked
Bus.subscribe(Updated, event => event.properties.info.title)
// Update event is fully type-checked
client.subscribe("session.updated", evt => evt.properties.info.title)
```
# Goal
## Syncing with only one writer
This system defines a basic event sourcing system for session replayability. The goal is to allow for one device to control and modify the session, and allow multiple other devices to "sync" session data. The sync works by getting a log of events to replay and replaying them locally.
Because only one device is allowed to write, we don't need any kind of sophisticated distributed system clocks or causal ordering. We implement total ordering with a simple sequence id (a number) and increment it by one every time we generate an event.
## Bus event integration and backwards compatibility
This initial implementation aims to be fully backwards compatible. We should be able to land this without any visible changes to the user.
An existing `Bus` abstraction to send events already exists. We already send events like `session.created` through the system. We should not duplicate this.
The difference in event sourcing is events are sent _before_ the mutation happens, and "projectors" handle the effects and perform the mutations. This difference is subtle, and a necessary change for syncing to work.
So the goal is:
- Introduce a new syncing abstraction to handle event sourcing and projectors
- Seamlessly integrate these new events into the same existing `Bus` abstraction
- Maintain full backwards compatibility to reduce risk
## My approach
This directory introduces a new abstraction: `SyncEvent`. This handles all of the event sourcing.
There are now "sync events" which are different than "bus events". Bus events are defined like this:
```ts
const Diff = BusEvent.define(
"session.diff",
z.object({
sessionID: SessionID.zod,
diff: Snapshot.FileDiff.array(),
}),
)
```
You can do `Bus.publish(Diff, { ... })` to push these events, and `Bus.subscribe(Diff, handler)` to listen to them.
Sync events are a lower-level abstraction which are similar, but also handle the requirements for recording and replaying. Defining them looks like this:
```ts
const Created = SyncEvent.define({
type: "session.created",
version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
info: Info,
}),
})
```
Not too different, except they track a version and an "aggregate" field (will explain that later).
You do this to run an event, which is kind of like `Bus.publish` except that it runs through the event sourcing system:
```
SyncEvent.run(Created, { ... })
```
The data passed as the second argument is properly type-checked based on the schema defined in `Created`.
Importantly, **sync events automatically re-publish as bus events**. This makes them backwards compatible, and allows the `Bus` to still be the single abstraction that the system uses to listen for individual events.
**We have upgraded many of the session events to be sync events** (all of the ones that mutate the db). Sync and bus events are largely compatible. Here are the differences:
### Event shape
- The shape of the events are slightly different. A sync event has the `type`, `id`, `seq`, `aggregateID`, and `data` fields. A bus event has the `type` and `properties` fields. `data` and `properties` are largely the same thing. This conversion is automatically handled when the sync system re-published the event throught the bus.
The reason for this is because sync events need to track more information. I chose not to copy the `properties` naming to more clearly disambiguate the event types.
### Event flow
There is no way to subscribe to individual sync events in `SyncEvent`. You can use `subscribeAll` to receive _all_ of the events, which is needed for clients that want to record them.
To listen for individual events, use `Bus.subscribe`. You can pass in a sync event definition to it: `Bus.subscribe(Created, handler)`. This is fully supported.
You should never "publish" a sync event however: `Bus.publish(Created, ...)`. I would like to force this to be a type error in the future. You should never be touching the db directly, and should not be manually handling these events.
### Backwards compatibility
The system install projectors in `server/projectors.js`. It calls `SyncEvent.init` to do this. It also installs a hook for dynamically converting an event at runtime (`convertEvent`).
This allows you to "reshape" an event from the sync system before it's published to the bus. This should be avoided, but might be necessary for temporary backwards compat.
The only time we use this is the `session.updated` event. Previously this event contained the entire session object. The sync even only contains the fields updated. We convert the event to contain to full object for backwards compatibility (but ideally we'd remove this).
It's very important that types are correct when working with events. Event definitions have a `schema` which carries the defintiion of the event shape (provided by a zod schema, inferred into a TypeScript type). Examples:
```ts
// The schema from `Updated` typechecks the object correctly
SyncEvent.run(Updated, { sessionID: id, info: { title: "foo"} })
// `subscribeAll` passes a generic sync event
SyncEvent.subscribeAll(event => {
// These will be type-checked correctly
event.id
event.seq
// This will be unknown because we are listening for all events,
// and this API is only used to record them
event.data
})
// This works, but you shouldn't publish sync event like this (should fail in the future)
Bus.publish(Updated, { sessionID: id, info: { title: "foo"} })
// Update event is fully type-checked
Bus.subscribe(Updated, event => event.properties.info.title)
// Update event is fully type-checked
client.subscribe("session.updated", evt => evt.properties.info.title)
```
The last two examples look similar to `SyncEvent.run`, but they were the cause of a lot of grief. Those are existing APIs that we can't break, but we are passing in the new sync event definitions to these APIs, which sometimes have a different event shape.
I previously mentioned the runtime conversion of events, but we still need to the types to work! To do that, the `define` API supports an optional `busSchema` prop to give it the schema for backwards compatibility. For example this is the full definition of `Session.Update`:
```ts
const Update = SyncEvent.define({
type: "session.updated",
version: 1,
aggregate: "sessionID",
schema: z.object({
sessionID: SessionID.zod,
info: partialSchema(Info)
}),
busSchema: z.object({
sessionID: SessionID.zod,
info: Info,
}),
})
```
*Important*: the conversion done in `convertEvent` is not automatically type-checked with `busSchema`. It's very important they match, but because we need this at type-checking time this needs to live here.
Internally, the way this works is `busSchema` is stored on a `properties` field which is what the bus system expects. Doing this made everything with `Bus` "just work". This is why you can pass a sync event to the bus APIs.
*Alternatives*
These are some other paths I explored:
* Providing a way to subscribe to individual sync events, and change all the instances of `Bus.subscribe` in our code to it. Then you are directly only working with sync events always.
* Two big problems. First, `Bus` is instance-scoped, and we'd need to make the sync event system instance-scoped too for backwards compat. If we didn't, those listeners would get calls for events they weren't expecting.
* Second, we can't change consumers of our SDK. So they still have to use the old events, and we might as well stick with them for consistency
* Directly add sync event support to bus system
* I explored adding sync events to the bus, but due to backwards compat, it only made it more complicated (still need to support both shapes)
* I explored a `convertSchema` function to convert the event schema at runtime so we didn't need `busSchema`
* Fatal flaw: we need type-checking done earlier. We can't do this at run-time. This worked for consumers of our SDK (because it gets generated TS types from the converted schema) but breaks for our internal usage of `Bus.subscribe` calls
I explored many other permutations of the above solutions. What we have today I think is the best balance of backwards compatibility while opening a path forward for the new events.

View File

@ -0,0 +1,16 @@
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"
export const EventSequenceTable = sqliteTable("event_sequence", {
aggregate_id: text().notNull().primaryKey(),
seq: integer().notNull(),
})
export const EventTable = sqliteTable("event", {
id: text().primaryKey(),
aggregate_id: text()
.notNull()
.references(() => EventSequenceTable.aggregate_id, { onDelete: "cascade" }),
seq: integer().notNull(),
type: text().notNull(),
data: text({ mode: "json" }).$type<Record<string, unknown>>().notNull(),
})

View File

@ -0,0 +1,263 @@
import z from "zod"
import type { ZodObject } from "zod"
import { EventEmitter } from "events"
import { Database, eq } from "@/storage/db"
import { Bus as ProjectBus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { EventSequenceTable, EventTable } from "./event.sql"
import { EventID } from "./schema"
import { Flag } from "@/flag/flag"
export namespace SyncEvent {
export type Definition = {
type: string
version: number
aggregate: string
schema: z.ZodObject
// This is temporary and only exists for compatibility with bus
// event definitions
properties: z.ZodObject
}
export type Event<Def extends Definition = Definition> = {
id: string
seq: number
aggregateID: string
data: z.infer<Def["schema"]>
}
export type SerializedEvent<Def extends Definition = Definition> = Event<Def> & { type: string }
type ProjectorFunc = (db: Database.TxOrDb, data: unknown) => void
export const registry = new Map<string, Definition>()
let projectors: Map<Definition, ProjectorFunc> | undefined
const versions = new Map<string, number>()
let frozen = false
let convertEvent: (type: string, event: Event["data"]) => Promise<Record<string, unknown>> | Record<string, unknown>
const Bus = new EventEmitter<{ event: [{ def: Definition; event: Event }] }>()
export function reset() {
frozen = false
projectors = undefined
convertEvent = (_, data) => data
}
export function init(input: { projectors: Array<[Definition, ProjectorFunc]>; convertEvent?: typeof convertEvent }) {
projectors = new Map(input.projectors)
// Install all the latest event defs to the bus. We only ever emit
// latest versions from code, and keep around old versions for
// replaying. Replaying does not go through the bus, and it
// simplifies the bus to only use unversioned latest events
for (let [type, version] of versions.entries()) {
let def = registry.get(versionedType(type, version))!
BusEvent.define(def.type, def.properties || def.schema)
}
// Freeze the system so it clearly errors if events are defined
// after `init` which would cause bugs
frozen = true
convertEvent = input.convertEvent || ((_, data) => data)
}
export function versionedType<A extends string>(type: A): A
export function versionedType<A extends string, B extends number>(type: A, version: B): `${A}/${B}`
export function versionedType(type: string, version?: number) {
return version ? `${type}.${version}` : type
}
export function define<
Type extends string,
Agg extends string,
Schema extends ZodObject<Record<Agg, z.ZodType<string>>>,
BusSchema extends ZodObject = Schema,
>(input: { type: Type; version: number; aggregate: Agg; schema: Schema; busSchema?: BusSchema }) {
if (frozen) {
throw new Error("Error defining sync event: sync system has been frozen")
}
const def = {
type: input.type,
version: input.version,
aggregate: input.aggregate,
schema: input.schema,
properties: input.busSchema ? input.busSchema : input.schema,
}
versions.set(def.type, Math.max(def.version, versions.get(def.type) || 0))
registry.set(versionedType(def.type, def.version), def)
return def
}
export function project<Def extends Definition>(
def: Def,
func: (db: Database.TxOrDb, data: Event<Def>["data"]) => void,
): [Definition, ProjectorFunc] {
return [def, func as ProjectorFunc]
}
function process<Def extends Definition>(def: Def, event: Event<Def>, options: { publish: boolean }) {
if (projectors == null) {
throw new Error("No projectors available. Call `SyncEvent.init` to install projectors")
}
const projector = projectors.get(def)
if (!projector) {
throw new Error(`Projector not found for event: ${def.type}`)
}
// idempotent: need to ignore any events already logged
Database.transaction((tx) => {
projector(tx, event.data)
if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) {
tx.insert(EventSequenceTable)
.values({
aggregate_id: event.aggregateID,
seq: event.seq,
})
.onConflictDoUpdate({
target: EventSequenceTable.aggregate_id,
set: { seq: event.seq },
})
.run()
tx.insert(EventTable)
.values({
id: event.id,
seq: event.seq,
aggregate_id: event.aggregateID,
type: versionedType(def.type, def.version),
data: event.data as Record<string, unknown>,
})
.run()
}
Database.effect(() => {
Bus.emit("event", {
def,
event,
})
if (options?.publish) {
const result = convertEvent(def.type, event.data)
if (result instanceof Promise) {
result.then((data) => {
ProjectBus.publish({ type: def.type, properties: def.schema }, data)
})
} else {
ProjectBus.publish({ type: def.type, properties: def.schema }, result)
}
}
})
})
}
// TODO:
//
// * Support applying multiple events at one time. One transaction,
// and it validets all the sequence ids
// * when loading events from db, apply zod validation to ensure shape
export function replay(event: SerializedEvent, options?: { republish: boolean }) {
const def = registry.get(event.type)
if (!def) {
throw new Error(`Unknown event type: ${event.type}`)
}
const row = Database.use((db) =>
db
.select({ seq: EventSequenceTable.seq })
.from(EventSequenceTable)
.where(eq(EventSequenceTable.aggregate_id, event.aggregateID))
.get(),
)
const latest = row?.seq ?? -1
if (event.seq <= latest) {
return
}
const expected = latest + 1
if (event.seq !== expected) {
throw new Error(`Sequence mismatch for aggregate "${event.aggregateID}": expected ${expected}, got ${event.seq}`)
}
process(def, event, { publish: !!options?.republish })
}
export function run<Def extends Definition>(def: Def, data: Event<Def>["data"]) {
const agg = (data as Record<string, string>)[def.aggregate]
// This should never happen: we've enforced it via typescript in
// the definition
if (agg == null) {
throw new Error(`SyncEvent.run: "${def.aggregate}" required but not found: ${JSON.stringify(data)}`)
}
if (def.version !== versions.get(def.type)) {
throw new Error(`SyncEvent.run: running old versions of events is not allowed: ${def.type}`)
}
// Note that this is an "immediate" transaction which is critical.
// We need to make sure we can safely read and write with nothing
// else changing the data from under us
Database.transaction(
(tx) => {
const id = EventID.ascending()
const row = tx
.select({ seq: EventSequenceTable.seq })
.from(EventSequenceTable)
.where(eq(EventSequenceTable.aggregate_id, agg))
.get()
const seq = row?.seq != null ? row.seq + 1 : 0
const event = { id, seq, aggregateID: agg, data }
process(def, event, { publish: true })
},
{
behavior: "immediate",
},
)
}
export function remove(aggregateID: string) {
Database.transaction((tx) => {
tx.delete(EventSequenceTable).where(eq(EventSequenceTable.aggregate_id, aggregateID)).run()
tx.delete(EventTable).where(eq(EventTable.aggregate_id, aggregateID)).run()
})
}
export function subscribeAll(handler: (event: { def: Definition; event: Event }) => void) {
Bus.on("event", handler)
return () => Bus.off("event", handler)
}
export function payloads() {
return z
.union(
registry
.entries()
.map(([type, def]) => {
return z
.object({
type: z.literal(type),
aggregate: z.literal(def.aggregate),
data: def.schema,
})
.meta({
ref: "SyncEvent" + "." + def.type,
})
})
.toArray() as any,
)
.meta({
ref: "SyncEvent",
})
}
}

View File

@ -0,0 +1,14 @@
import { Schema } from "effect"
import z from "zod"
import { Identifier } from "@/id/id"
import { withStatics } from "@/util/schema"
export const EventID = Schema.String.pipe(
Schema.brand("EventID"),
withStatics((s) => ({
make: (id: string) => s.makeUnsafe(id),
ascending: (id?: string) => s.makeUnsafe(Identifier.ascending("event", id)),
zod: Identifier.schema("event").pipe(z.custom<Schema.Schema.Type<typeof s>>()),
})),
)

View File

@ -0,0 +1,13 @@
import z from "zod"
export function updateSchema<T extends z.ZodRawShape>(schema: z.ZodObject<T>) {
const next = {} as {
[K in keyof T]: z.ZodOptional<z.ZodNullable<T[K]>>
}
for (const [k, v] of Object.entries(schema.required().shape) as [keyof T & string, z.ZodTypeAny][]) {
next[k] = v.nullable() as unknown as (typeof next)[typeof k]
}
return z.object(next)
}

View File

@ -60,6 +60,8 @@ function toolEvent(
const payload: EventMessagePartUpdated = { const payload: EventMessagePartUpdated = {
type: "message.part.updated", type: "message.part.updated",
properties: { properties: {
sessionID: sessionId,
time: Date.now(),
part: { part: {
id: `part_${opts.callID}`, id: `part_${opts.callID}`,
sessionID: sessionId, sessionID: sessionId,

View File

@ -74,11 +74,17 @@ delete process.env["SAMBANOVA_API_KEY"]
delete process.env["OPENCODE_SERVER_PASSWORD"] delete process.env["OPENCODE_SERVER_PASSWORD"]
delete process.env["OPENCODE_SERVER_USERNAME"] delete process.env["OPENCODE_SERVER_USERNAME"]
// Use in-memory sqlite
process.env["OPENCODE_DB"] = ":memory:"
// Now safe to import from src/ // Now safe to import from src/
const { Log } = await import("../src/util/log") const { Log } = await import("../src/util/log")
const { initProjectors } = await import("../src/server/projectors")
Log.init({ Log.init({
print: false, print: false,
dev: true, dev: true,
level: "DEBUG", level: "DEBUG",
}) })
initProjectors()

View File

@ -10,8 +10,8 @@ import { MessageID, PartID } from "../../src/session/schema"
const projectRoot = path.join(__dirname, "../..") const projectRoot = path.join(__dirname, "../..")
Log.init({ print: false }) Log.init({ print: false })
describe("session.started event", () => { describe("session.created event", () => {
test("should emit session.started event when session is created", async () => { test("should emit session.created event when session is created", async () => {
await Instance.provide({ await Instance.provide({
directory: projectRoot, directory: projectRoot,
fn: async () => { fn: async () => {
@ -41,14 +41,14 @@ describe("session.started event", () => {
}) })
}) })
test("session.started event should be emitted before session.updated", async () => { test("session.created event should be emitted before session.updated", async () => {
await Instance.provide({ await Instance.provide({
directory: projectRoot, directory: projectRoot,
fn: async () => { fn: async () => {
const events: string[] = [] const events: string[] = []
const unsubStarted = Bus.subscribe(Session.Event.Created, () => { const unsubCreated = Bus.subscribe(Session.Event.Created, () => {
events.push("started") events.push("created")
}) })
const unsubUpdated = Bus.subscribe(Session.Event.Updated, () => { const unsubUpdated = Bus.subscribe(Session.Event.Updated, () => {
@ -59,12 +59,12 @@ describe("session.started event", () => {
await new Promise((resolve) => setTimeout(resolve, 100)) await new Promise((resolve) => setTimeout(resolve, 100))
unsubStarted() unsubCreated()
unsubUpdated() unsubUpdated()
expect(events).toContain("started") expect(events).toContain("created")
expect(events).toContain("updated") expect(events).toContain("updated")
expect(events.indexOf("started")).toBeLessThan(events.indexOf("updated")) expect(events.indexOf("created")).toBeLessThan(events.indexOf("updated"))
await Session.remove(session.id) await Session.remove(session.id)
}, },

View File

@ -6,14 +6,9 @@ import { Database } from "../../src/storage/db"
describe("Database.Path", () => { describe("Database.Path", () => {
test("returns database path for the current channel", () => { test("returns database path for the current channel", () => {
const db = process.env["OPENCODE_DB"] const expected = ["latest", "beta"].includes(Installation.CHANNEL)
const expected = db ? path.join(Global.Path.data, "opencode.db")
? path.isAbsolute(db) : path.join(Global.Path.data, `opencode-${Installation.CHANNEL.replace(/[^a-zA-Z0-9._-]/g, "-")}.db`)
? db expect(Database.getChannelPath()).toBe(expected)
: path.join(Global.Path.data, db)
: ["latest", "beta"].includes(Installation.CHANNEL)
? path.join(Global.Path.data, "opencode.db")
: path.join(Global.Path.data, `opencode-${Installation.CHANNEL.replace(/[^a-zA-Z0-9._-]/g, "-")}.db`)
expect(Database.Path).toBe(expected)
}) })
}) })

View File

@ -0,0 +1,187 @@
import { describe, test, expect, beforeEach, afterEach, afterAll } from "bun:test"
import { tmpdir } from "../fixture/fixture"
import z from "zod"
import { Bus } from "../../src/bus"
import { Instance } from "../../src/project/instance"
import { SyncEvent } from "../../src/sync"
import { Database } from "../../src/storage/db"
import { EventTable } from "../../src/sync/event.sql"
import { Identifier } from "../../src/id/id"
import { Flag } from "../../src/flag/flag"
import { initProjectors } from "../../src/server/projectors"
const original = Flag.OPENCODE_EXPERIMENTAL_WORKSPACES
beforeEach(() => {
Database.close()
// @ts-expect-error don't do this normally, but it works
Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = true
})
afterEach(() => {
// @ts-expect-error don't do this normally, but it works
Flag.OPENCODE_EXPERIMENTAL_WORKSPACES = original
})
function withInstance(fn: () => void | Promise<void>) {
return async () => {
await using tmp = await tmpdir()
await Instance.provide({
directory: tmp.path,
fn: async () => {
await fn()
},
})
}
}
describe("SyncEvent", () => {
function setup() {
SyncEvent.reset()
const Created = SyncEvent.define({
type: "item.created",
version: 1,
aggregate: "id",
schema: z.object({ id: z.string(), name: z.string() }),
})
const Sent = SyncEvent.define({
type: "item.sent",
version: 1,
aggregate: "item_id",
schema: z.object({ item_id: z.string(), to: z.string() }),
})
SyncEvent.init({
projectors: [SyncEvent.project(Created, () => {}), SyncEvent.project(Sent, () => {})],
})
return { Created, Sent }
}
afterAll(() => {
SyncEvent.reset()
initProjectors()
})
describe("run", () => {
test(
"inserts event row",
withInstance(() => {
const { Created } = setup()
SyncEvent.run(Created, { id: "evt_1", name: "first" })
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(1)
expect(rows[0].type).toBe("item.created.1")
expect(rows[0].aggregate_id).toBe("evt_1")
}),
)
test(
"increments seq per aggregate",
withInstance(() => {
const { Created } = setup()
SyncEvent.run(Created, { id: "evt_1", name: "first" })
SyncEvent.run(Created, { id: "evt_1", name: "second" })
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(2)
expect(rows[1].seq).toBe(rows[0].seq + 1)
}),
)
test(
"uses custom aggregate field from agg()",
withInstance(() => {
const { Sent } = setup()
SyncEvent.run(Sent, { item_id: "evt_1", to: "james" })
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(1)
expect(rows[0].aggregate_id).toBe("evt_1")
}),
)
test(
"emits events",
withInstance(async () => {
const { Created } = setup()
const events: Array<{
type: string
properties: { id: string; name: string }
}> = []
const unsub = Bus.subscribeAll((event) => events.push(event))
SyncEvent.run(Created, { id: "evt_1", name: "test" })
expect(events).toHaveLength(1)
expect(events[0]).toEqual({
type: "item.created",
properties: {
id: "evt_1",
name: "test",
},
})
unsub()
}),
)
})
describe("replay", () => {
test(
"inserts event from external payload",
withInstance(() => {
const id = Identifier.descending("message")
SyncEvent.replay({
id: "evt_1",
type: "item.created.1",
seq: 0,
aggregateID: id,
data: { id, name: "replayed" },
})
const rows = Database.use((db) => db.select().from(EventTable).all())
expect(rows).toHaveLength(1)
expect(rows[0].aggregate_id).toBe(id)
}),
)
test(
"throws on sequence mismatch",
withInstance(() => {
const id = Identifier.descending("message")
SyncEvent.replay({
id: "evt_1",
type: "item.created.1",
seq: 0,
aggregateID: id,
data: { id, name: "first" },
})
expect(() =>
SyncEvent.replay({
id: "evt_1",
type: "item.created.1",
seq: 5,
aggregateID: id,
data: { id, name: "bad" },
}),
).toThrow(/Sequence mismatch/)
}),
)
test(
"throws on unknown event type",
withInstance(() => {
expect(() =>
SyncEvent.replay({
id: "evt_1",
type: "unknown.event.1",
seq: 0,
aggregateID: "x",
data: {},
}),
).toThrow(/Unknown event type/)
}),
)
})
})

View File

@ -46,6 +46,7 @@ import type {
GlobalDisposeResponses, GlobalDisposeResponses,
GlobalEventResponses, GlobalEventResponses,
GlobalHealthResponses, GlobalHealthResponses,
GlobalSyncEventSubscribeResponses,
GlobalUpgradeErrors, GlobalUpgradeErrors,
GlobalUpgradeResponses, GlobalUpgradeResponses,
InstanceDisposeResponses, InstanceDisposeResponses,
@ -230,6 +231,20 @@ class HeyApiRegistry<T> {
} }
} }
export class SyncEvent extends HeyApiClient {
/**
* Subscribe to global sync events
*
* Get global sync events
*/
public subscribe<ThrowOnError extends boolean = false>(options?: Options<never, ThrowOnError>) {
return (options?.client ?? this.client).sse.get<GlobalSyncEventSubscribeResponses, unknown, ThrowOnError>({
url: "/global/sync-event",
...options,
})
}
}
export class Config extends HeyApiClient { export class Config extends HeyApiClient {
/** /**
* Get global configuration * Get global configuration
@ -329,6 +344,11 @@ export class Global extends HeyApiClient {
}) })
} }
private _syncEvent?: SyncEvent
get syncEvent(): SyncEvent {
return (this._syncEvent ??= new SyncEvent({ client: this.client }))
}
private _config?: Config private _config?: Config
get config(): Config { get config(): Config {
return (this._config ??= new Config({ client: this.client })) return (this._config ??= new Config({ client: this.client }))

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff