diff --git a/.gitignore b/.gitignore index d4dc7ecb5a..b07ff307d4 100644 --- a/.gitignore +++ b/.gitignore @@ -25,12 +25,19 @@ pnpm-debug.log* # pnpm workspace .pnpm-store/ +platform/.platform-dev-token # TypeScript *.tsbuildinfo dist/ frontend/build/ !frontend/packages/icons/dist/ +platform/api/openapi.json +platform/api/.env +platform/api/.env.* +!platform/api/.env.example +!platform/api/.env.*.example +platform/vercel-health-check/.next/ # IDE .vscode/ @@ -44,8 +51,10 @@ Thumbs.db secrets/**/* -tf/.terraform/**/* -tf/.terraform.lock.hcl +*.tfstate* +**/.terraform/**/* +**/.terraform.lock.hcl +platform/tf/terraform.tfvars # Frontend frontend/.env.* @@ -81,3 +90,6 @@ examples/*/public/ # Native addon binaries *.node + +# Sandbox deploy bundle (built by scripts/docker/build-sandbox.sh) +.sandbox-deploy/ diff --git a/Cargo.lock b/Cargo.lock index e4a1905a4b..8e0948fcd6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -938,6 +938,15 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "cookie" version = "0.18.1" @@ -1041,6 +1050,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn 2.0.104", +] + [[package]] name = "curve25519-dalek" version = "4.1.3" @@ -2605,6 +2624,17 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + [[package]] name = "libz-sys" version = "1.1.22" @@ -2861,6 +2891,66 @@ dependencies = [ "vbare", ] +[[package]] +name = "napi" +version = "2.16.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55740c4ae1d8696773c78fdafd5d0e5fe9bc9f1b071c7ba493ba5c413a9184f3" +dependencies = [ + "bitflags", + "ctor", + "napi-derive", + "napi-sys", + "once_cell", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "napi-build" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d376940fd5b723c6893cd1ee3f33abbfd86acb1cd1ec079f3ab04a2a3bc4d3b1" + +[[package]] +name = "napi-derive" +version = "2.16.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c" +dependencies = [ + "cfg-if", + "convert_case", + "napi-derive-backend", + "proc-macro2", + "quote", + "syn 2.0.104", +] + +[[package]] +name = "napi-derive-backend" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf" +dependencies = [ + "convert_case", + "once_cell", + "proc-macro2", + "quote", + "regex", + "semver", + "syn 2.0.104", +] + +[[package]] +name = "napi-sys" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3" +dependencies = [ + "libloading", +] + [[package]] name = "native-tls" version = "0.2.14" @@ -5096,6 +5186,40 @@ dependencies = [ "tracing", ] +[[package]] +name = "rivetkit-native" +version = "2.2.1" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.22.1", + "hex", + "libsqlite3-sys", + "napi", + "napi-build", + "napi-derive", + "rivet-envoy-client", + "rivet-envoy-protocol", + "rivetkit-sqlite-native", + "serde", + "serde_json", + "tokio", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "rivetkit-sqlite-native" +version = "2.1.6" +dependencies = [ + "async-trait", + "getrandom 0.2.16", + "libsqlite3-sys", + "tokio", + "tracing", +] + [[package]] name = "rocksdb" version = "0.24.0" diff --git a/Cargo.toml b/Cargo.toml index feed1aeaf1..4b7b531119 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -148,6 +148,10 @@ members = [ version = "0.26.0" features = [ "rustls-tls-native-roots" ] + [workspace.dependencies.rdkafka] + version = "0.38.0" + features = [ "ssl" ] + [workspace.dependencies.vergen] version = "9.0.4" features = [ "build", "cargo", "rustc" ] @@ -347,6 +351,11 @@ members = [ [workspace.dependencies.vbare-compiler] version = "0.0.4" + [workspace.dependencies.raw-foundationdb] + package = "foundationdb" + version = "0.9.2" + features = [ "fdb-7_3", "embedded-fdb-include" ] + [workspace.dependencies.rivet-api-builder] path = "engine/packages/api-builder" diff --git a/examples/sandbox/Dockerfile b/examples/sandbox/Dockerfile index 48152a1273..61e9f95861 100644 --- a/examples/sandbox/Dockerfile +++ b/examples/sandbox/Dockerfile @@ -1,20 +1,12 @@ FROM node:22-slim -RUN corepack enable && corepack prepare pnpm@10.13.1 --activate +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates && rm -rf /var/lib/apt/lists/* WORKDIR /app -ENV NODE_OPTIONS=--max-old-space-size=7168 -COPY package.json pnpm-lock.yaml pnpm-workspace.yaml turbo.json tsconfig.base.json tsup.base.ts ./ -COPY examples/sandbox/ ./examples/sandbox/ -COPY rivetkit-typescript/packages/ ./rivetkit-typescript/packages/ -COPY engine/sdks/typescript/ ./engine/sdks/typescript/ -COPY shared/typescript/ ./shared/typescript/ - -RUN pnpm install --frozen-lockfile -RUN mkdir -p rivetkit-openapi rivetkit-asyncapi -RUN pnpm build --filter=sandbox -COPY examples/sandbox/public ./examples/sandbox/dist/public - -WORKDIR /app/examples/sandbox +# pnpm deploy produces a flat node_modules with no symlinks. +# dist/ contains the fully bundled vite output (ssr noExternal: true). +# dist/public/ contains the frontend static assets. +COPY . . +ENV PORT=8080 EXPOSE 8080 -CMD ["pnpm", "start"] +CMD ["node", "node_modules/srvx/bin/srvx.mjs", "dist/server.js"] diff --git a/examples/sandbox/scripts/bench-remote.ts b/examples/sandbox/scripts/bench-remote.ts new file mode 100644 index 0000000000..a2a0680633 --- /dev/null +++ b/examples/sandbox/scripts/bench-remote.ts @@ -0,0 +1,127 @@ +#!/usr/bin/env -S npx tsx + +/** + * Remote benchmark runner. Calls the /api/bench endpoint on Cloud Run + * so all actor calls happen within the datacenter. + * + * Also measures local baseline RTT for comparison. + * + * Usage: + * npx tsx scripts/bench-remote.ts [--filter ] + * + * Example: + * npx tsx scripts/bench-remote.ts https://kitchen-sink-staging-676044580344.us-east4.run.app + * npx tsx scripts/bench-remote.ts https://kitchen-sink-staging-676044580344.us-east4.run.app --filter wake + */ + +const args = process.argv.slice(2); +const CLOUD_RUN_URL = args.find((a) => !a.startsWith("--"))?.replace(/\/$/, ""); +const filterIdx = args.indexOf("--filter"); +const FILTER = filterIdx >= 0 ? args[filterIdx + 1] : undefined; + +if (!CLOUD_RUN_URL) { + console.error("Usage: npx tsx scripts/bench-remote.ts [--filter ]"); + process.exit(1); +} + +function fmt(n: number): string { + return `${n.toFixed(1)}ms`; +} + +interface BenchEntry { + group: string; + name: string; + e2eMs: number; + serverMs: number | null; + perOpMs: number | null; + failed?: boolean; + failReason?: string; +} + +function printTable(entries: BenchEntry[], baselineMs: number, localRttMs: number): void { + const nameW = Math.max(40, ...entries.map((e) => e.name.length)); + const sep = "─".repeat(nameW + 46); + + console.log(`\n┌${sep}┐`); + console.log( + `│ ${"Benchmark".padEnd(nameW)} ${"E2E".padStart(10)} ${"Server".padStart(10)} ${"Per-Op".padStart(10)} ${"RTT".padStart(8)} │`, + ); + console.log(`├${sep}┤`); + + let currentGroup = ""; + for (const e of entries) { + if (e.group !== currentGroup) { + if (currentGroup) console.log(`├${sep}┤`); + console.log(`│ ${`── ${e.group} ──`.padEnd(nameW + 44)} │`); + currentGroup = e.group; + } + if (e.failed) { + console.log(`│ ${e.name.padEnd(nameW)} ${"FAILED".padStart(10)} ${"".padStart(10)} ${"".padStart(10)} ${"".padStart(8)} │`); + } else { + const serverStr = e.serverMs != null ? fmt(e.serverMs) : fmt(Math.max(0, e.e2eMs - baselineMs)); + const perOpStr = e.perOpMs != null ? fmt(e.perOpMs) : ""; + const rtt = e.serverMs != null ? fmt(e.e2eMs - e.serverMs) : fmt(baselineMs); + console.log( + `│ ${e.name.padEnd(nameW)} ${fmt(e.e2eMs).padStart(10)} ${serverStr.padStart(10)} ${perOpStr.padStart(10)} ${rtt.padStart(8)} │`, + ); + } + } + console.log(`└${sep}┘`); +} + +async function main(): Promise { + console.log(`Rivet Actor Benchmark (server-side)`); + console.log(`Cloud Run: ${CLOUD_RUN_URL}`); + if (FILTER) console.log(`Filter: ${FILTER}`); + console.log(); + + // Measure local RTT to Cloud Run for reference + console.log("Measuring local RTT to Cloud Run..."); + await fetch(`${CLOUD_RUN_URL}/`); + await fetch(`${CLOUD_RUN_URL}/`); + const localTimes: number[] = []; + for (let i = 0; i < 10; i++) { + const start = performance.now(); + await fetch(`${CLOUD_RUN_URL}/`); + localTimes.push(performance.now() - start); + } + localTimes.sort((a, b) => a - b); + const localRttMs = localTimes[Math.floor(localTimes.length / 2)]; + console.log(`Local RTT to Cloud Run (median of 10): ${fmt(localRttMs)}\n`); + + // Call server-side bench endpoint + const benchUrl = FILTER + ? `${CLOUD_RUN_URL}/api/bench?filter=${encodeURIComponent(FILTER)}` + : `${CLOUD_RUN_URL}/api/bench`; + + console.log("Running benchmarks server-side..."); + console.log("(This may take a few minutes)\n"); + + const res = await fetch(benchUrl, { signal: AbortSignal.timeout(600_000) }); + if (!res.ok) { + const text = await res.text(); + console.error(`Bench endpoint failed: ${res.status}: ${text}`); + process.exit(1); + } + + const { baselineMs, entries } = (await res.json()) as { + baselineMs: number; + entries: BenchEntry[]; + }; + + console.log(`Server-side baseline RTT (datacenter): ${fmt(baselineMs)}`); + console.log(`Local RTT (client → Cloud Run): ${fmt(localRttMs)}`); + + printTable(entries, baselineMs, localRttMs); + + const passed = entries.filter((e: BenchEntry) => !e.failed); + const failed = entries.filter((e: BenchEntry) => e.failed); + console.log(`\nServer baseline RTT: ${fmt(baselineMs)}`); + console.log(`Local RTT: ${fmt(localRttMs)}`); + console.log(`Passed: ${passed.length}, Failed: ${failed.length}`); +} + +main().catch((err) => { + console.error("Benchmark failed:", err); + process.exit(1); +}); diff --git a/examples/sandbox/scripts/bench-sqlite.ts b/examples/sandbox/scripts/bench-sqlite.ts new file mode 100644 index 0000000000..149cf9b7b4 --- /dev/null +++ b/examples/sandbox/scripts/bench-sqlite.ts @@ -0,0 +1,313 @@ +#!/usr/bin/env -S npx tsx + +/** + * SQLite benchmark for the sandbox deployed on Rivet Cloud. + * + * Uses the Rivet gateway HTTP API with rvt-* query parameters. + * + * Usage: + * npx tsx scripts/bench-sqlite.ts + * + * Example: + * npx tsx scripts/bench-sqlite.ts \ + * "https://my-ns:pk_token@api.staging.rivet.dev" + */ + +const RAW_ENDPOINT = process.argv[2]; +if (!RAW_ENDPOINT) { + console.error("Usage: npx tsx scripts/bench-sqlite.ts "); + process.exit(1); +} + +// Parse endpoint: https://namespace:token@host +const url = new URL(RAW_ENDPOINT); +const NAMESPACE = url.username; +const TOKEN = url.password; +const HOST = `${url.protocol}//${url.host}`; + +console.log(`Namespace: ${NAMESPACE}`); +console.log(`Host: ${HOST}\n`); + +// ── Helpers ──────────────────────────────────────────────────────── + +interface TimedResult { + result: T; + ms: number; +} + +async function timed(fn: () => Promise): Promise> { + const start = performance.now(); + const result = await fn(); + return { result, ms: performance.now() - start }; +} + +async function callAction( + actorName: string, + key: string[], + action: string, + args: unknown[] = [], +): Promise { + const params = new URLSearchParams({ + "rvt-method": "getOrCreate", + "rvt-key": key.join(","), + "rvt-token": TOKEN, + "rvt-namespace": NAMESPACE, + "rvt-runner": "default", + }); + const actionUrl = `${HOST}/gateway/${actorName}/action/${action}?${params}`; + const res = await fetch(actionUrl, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-rivet-encoding": "json", + }, + body: JSON.stringify({ args }), + }); + if (!res.ok) { + const text = await res.text(); + throw new Error(`${res.status}: ${text}`); + } + const body = await res.json(); + return body.output; +} + +function fmt(n: number): string { + return `${n.toFixed(1)}ms`; +} + +function perOp(total: number, count: number): string { + return `${(total / count).toFixed(3)}ms/op`; +} + +interface BenchEntry { + name: string; + ms: number; + detail?: string; +} + +function printTable(entries: BenchEntry[]): void { + const nameW = Math.max(50, ...entries.map((e) => e.name.length)); + const sep = "-".repeat(nameW + 35); + console.log(sep); + console.log( + `${"Benchmark".padEnd(nameW)} ${"Time".padStart(14)} ${"Detail".padEnd(20)}`, + ); + console.log(sep); + for (const e of entries) { + console.log( + `${e.name.padEnd(nameW)} ${fmt(e.ms).padStart(14)} ${(e.detail ?? "").padEnd(20)}`, + ); + } + console.log(sep); +} + +// ── Benchmarks ───────────────────────────────────────────────────── + +async function main(): Promise { + console.log(`SQLite Benchmark\n`); + + const entries: BenchEntry[] = []; + const uid = () => `${Date.now()}-${crypto.randomUUID().slice(0, 8)}`; + + // ── Warmup ── + console.log("Warming up (creating a counter actor)..."); + await callAction("testCounter", [`warmup-${uid()}`], "increment", [1]); + console.log("Warm.\n"); + + // ── 1. Cold SQLite actor creation ── + console.log("[1] Cold SQLite actor create (sqliteRawActor)..."); + try { + const key = [`bench-cold-${uid()}`]; + const { ms: coldMs } = await timed(() => + callAction("sqliteRawActor", key, "getTodos"), + ); + entries.push({ + name: "Cold actor create + migrate + getTodos", + ms: coldMs, + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ name: "Cold actor create + migrate + getTodos", ms: -1, detail: "FAILED" }); + } + + // ── 2. Sequential inserts ── + for (const n of [1, 10, 50]) { + console.log(`[2] Insert x${n}...`); + try { + const key = [`bench-ins-${n}-${uid()}`]; + // Create actor + await callAction("sqliteRawActor", key, "getTodos"); + + const { ms: insertMs } = await timed(async () => { + for (let i = 0; i < n; i++) { + await callAction("sqliteRawActor", key, "addTodo", [`todo-${i}`]); + } + }); + entries.push({ + name: `Insert x${n} (sequential actions)`, + ms: insertMs, + detail: perOp(insertMs, n), + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ name: `Insert x${n}`, ms: -1, detail: "FAILED" }); + } + } + + // ── 3. Read after writes ── + console.log("[3] Read after writes..."); + try { + const key = [`bench-read-${uid()}`]; + for (let i = 0; i < 50; i++) { + await callAction("sqliteRawActor", key, "addTodo", [`rd-${i}`]); + } + const { ms: readMs, result } = await timed(() => + callAction("sqliteRawActor", key, "getTodos"), + ); + const count = Array.isArray(result) ? result.length : "?"; + entries.push({ + name: `Read 50 todos (getTodos)`, + ms: readMs, + detail: `${count} rows`, + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ name: "Read 50 todos", ms: -1, detail: "FAILED" }); + } + + // ── 4. Toggle (update) ── + console.log("[4] Toggle (update) x20..."); + try { + const key = [`bench-toggle-${uid()}`]; + for (let i = 0; i < 20; i++) { + await callAction("sqliteRawActor", key, "addTodo", [`t-${i}`]); + } + const { ms: toggleMs } = await timed(async () => { + for (let i = 1; i <= 20; i++) { + await callAction("sqliteRawActor", key, "toggleTodo", [i]); + } + }); + entries.push({ + name: `Toggle x20 (update)`, + ms: toggleMs, + detail: perOp(toggleMs, 20), + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ name: "Toggle x20", ms: -1, detail: "FAILED" }); + } + + // ── 5. Delete ── + console.log("[5] Delete x20..."); + try { + const key = [`bench-del-${uid()}`]; + for (let i = 0; i < 20; i++) { + await callAction("sqliteRawActor", key, "addTodo", [`d-${i}`]); + } + const { ms: deleteMs } = await timed(async () => { + for (let i = 1; i <= 20; i++) { + await callAction("sqliteRawActor", key, "deleteTodo", [i]); + } + }); + entries.push({ + name: `Delete x20`, + ms: deleteMs, + detail: perOp(deleteMs, 20), + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ name: "Delete x20", ms: -1, detail: "FAILED" }); + } + + // ── 6. Complex 20-query load test ── + console.log("[6] testSqliteLoad: 20-query complex workload..."); + try { + const key = [`bench-load-${uid()}`]; + const { ms: loadMs, result } = await timed(() => + callAction("testSqliteLoad", key, "runLoadTest"), + ); + const queriesRun = + result && typeof result === "object" && "queriesRun" in result + ? (result as { queriesRun: number }).queriesRun + : "?"; + entries.push({ + name: `Complex load test (${queriesRun} queries)`, + ms: loadMs, + detail: `${queriesRun} queries`, + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ + name: "Complex load test", + ms: -1, + detail: "FAILED", + }); + } + + // ── 7. Concurrent actor creation ── + console.log("[7] Concurrent 5 actors..."); + try { + const { ms: concMs } = await timed(async () => { + await Promise.all( + Array.from({ length: 5 }, (_, i) => + callAction( + "sqliteRawActor", + [`bench-conc-${uid()}-${i}`], + "addTodo", + [`concurrent-${i}`], + ), + ), + ); + }); + entries.push({ + name: `Concurrent 5 actor create+insert`, + ms: concMs, + detail: perOp(concMs, 5), + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ + name: "Concurrent 5 actor create+insert", + ms: -1, + detail: "FAILED", + }); + } + + // ── 8. Rapid-fire on warm actor ── + console.log("[8] Rapid-fire x50 on warm actor..."); + try { + const key = [`bench-rapid-${uid()}`]; + await callAction("sqliteRawActor", key, "getTodos"); + + const n = 50; + const { ms: rapidMs } = await timed(async () => { + for (let i = 0; i < n; i++) { + await callAction("sqliteRawActor", key, "addTodo", [`r-${i}`]); + } + }); + entries.push({ + name: `Rapid-fire x${n} inserts (warm actor)`, + ms: rapidMs, + detail: perOp(rapidMs, n), + }); + } catch (err) { + console.log(` FAILED: ${err}`); + entries.push({ name: "Rapid-fire x50", ms: -1, detail: "FAILED" }); + } + + // ── Results ── + + console.log("\n"); + printTable(entries); + + const passed = entries.filter((e) => e.ms > 0); + const failed = entries.filter((e) => e.ms <= 0); + const totalMs = passed.reduce((sum, e) => sum + e.ms, 0); + console.log(`\nTotal: ${fmt(totalMs)}`); + console.log(`Passed: ${passed.length}, Failed: ${failed.length}`); +} + +main().catch((err) => { + console.error("Benchmark failed:", err); + process.exit(1); +}); diff --git a/examples/sandbox/scripts/bench-throughput.ts b/examples/sandbox/scripts/bench-throughput.ts new file mode 100644 index 0000000000..219ee7d14c --- /dev/null +++ b/examples/sandbox/scripts/bench-throughput.ts @@ -0,0 +1,111 @@ +#!/usr/bin/env -S npx tsx + +/** + * Throughput benchmark. Creates a fresh testThroughput actor (50-table migration) + * then calls increment in a loop, printing per-call latency. + * + * Usage: + * npx tsx scripts/bench-throughput.ts [count] + */ + +const NAMESPACE = "kitchen-sink-7bq6-test-2-g5pg"; +const TOKEN = "pk_6iY9qgm1ER09ks5hDHU67RFtPIoaJ0si4hie5VNq41NGhCDEzlexvh7vm08sSDXM"; +const HOST = "https://api.staging.rivet.dev"; +const COUNT = parseInt(process.argv[2] || "100", 10); + +async function callAction( + actorName: string, + key: string[], + action: string, + args: unknown[] = [], +): Promise { + const params = new URLSearchParams({ + "rvt-method": "getOrCreate", + "rvt-key": key.join(","), + "rvt-token": TOKEN, + "rvt-namespace": NAMESPACE, + "rvt-runner": "default", + }); + const actionUrl = `${HOST}/gateway/${actorName}/action/${action}?${params}`; + const res = await fetch(actionUrl, { + method: "POST", + headers: { "Content-Type": "application/json", "x-rivet-encoding": "json" }, + body: JSON.stringify({ args }), + }); + if (!res.ok) { + const text = await res.text(); + throw new Error(`${res.status}: ${text}`); + } + return (await res.json()).output; +} + +function fmt(n: number): string { + return `${n.toFixed(1)}ms`; +} + +async function main(): Promise { + const uid = `${Date.now()}-${crypto.randomUUID().slice(0, 8)}`; + const key = [`throughput-${uid}`]; + + console.log(`Throughput Benchmark`); + console.log(`Host: ${HOST}`); + console.log(`Namespace: ${NAMESPACE}`); + console.log(`Count: ${COUNT}`); + console.log(); + + // Cold start: create actor (triggers 50-table migration) + console.log("Creating actor (50-table migration)..."); + const coldStart = performance.now(); + const firstValue = await callAction("testThroughput", key, "increment"); + const coldMs = performance.now() - coldStart; + console.log(`Cold start: ${fmt(coldMs)} (counter=${firstValue})\n`); + + // Increment loop + console.log(`Running ${COUNT} increments...\n`); + const times: number[] = []; + const t0 = performance.now(); + + for (let i = 0; i < COUNT; i++) { + const start = performance.now(); + const value = await callAction("testThroughput", [`${key}-${i}`], "increment"); + const ms = performance.now() - start; + times.push(ms); + + if ((i + 1) % 10 === 0 || i === COUNT - 1) { + const elapsed = performance.now() - t0; + const rps = ((i + 1) / elapsed) * 1000; + process.stdout.write( + ` [${String(i + 1).padStart(4)}/${COUNT}] ${fmt(ms).padStart(8)} avg=${fmt(elapsed / (i + 1)).padStart(8)} rps=${rps.toFixed(1).padStart(6)} counter=${value}\n`, + ); + } + } + + const totalMs = performance.now() - t0; + times.sort((a, b) => a - b); + + const avg = times.reduce((a, b) => a + b, 0) / times.length; + const p50 = times[Math.floor(times.length * 0.5)]; + const p95 = times[Math.floor(times.length * 0.95)]; + const p99 = times[Math.floor(times.length * 0.99)]; + const min = times[0]; + const max = times[times.length - 1]; + const rps = (COUNT / totalMs) * 1000; + + console.log(`\n${"─".repeat(50)}`); + console.log(`Cold start: ${fmt(coldMs)}`); + console.log(`Total: ${fmt(totalMs)} (${COUNT} ops)`); + console.log(`Throughput: ${rps.toFixed(1)} rps`); + console.log(`${"─".repeat(50)}`); + console.log(`avg: ${fmt(avg)}`); + console.log(`p50: ${fmt(p50)}`); + console.log(`p95: ${fmt(p95)}`); + console.log(`p99: ${fmt(p99)}`); + console.log(`min: ${fmt(min)}`); + console.log(`max: ${fmt(max)}`); + console.log(`${"─".repeat(50)}`); +} + +main().catch((err) => { + console.error("Benchmark failed:", err); + process.exit(1); +}); diff --git a/examples/sandbox/scripts/bench-wake.ts b/examples/sandbox/scripts/bench-wake.ts new file mode 100644 index 0000000000..a18b6d4e69 --- /dev/null +++ b/examples/sandbox/scripts/bench-wake.ts @@ -0,0 +1,143 @@ +#!/usr/bin/env -S npx tsx + +/** + * Wake latency benchmark for actors on Rivet Cloud. + * + * Measures: + * 1. Cold start: getOrCreate a fresh actor + call no-op action + * 2. Wake from sleep: put actor to sleep, then call no-op action again + * + * Usage: + * npx tsx scripts/bench-wake.ts + * + * Example: + * npx tsx scripts/bench-wake.ts \ + * "https://my-ns:pk_token@api.staging.rivet.dev" + */ + +const RAW_ENDPOINT = process.argv[2]; +if (!RAW_ENDPOINT) { + console.error("Usage: npx tsx scripts/bench-wake.ts "); + process.exit(1); +} + +const url = new URL(RAW_ENDPOINT); +const NAMESPACE = url.username; +const TOKEN = url.password; +const HOST = `${url.protocol}//${url.host}`; + +console.log(`Namespace: ${NAMESPACE}`); +console.log(`Host: ${HOST}\n`); + +async function callAction( + actorName: string, + key: string[], + action: string, + args: unknown[] = [], +): Promise<{ output: unknown; ms: number }> { + const params = new URLSearchParams({ + "rvt-method": "getOrCreate", + "rvt-key": key.join(","), + "rvt-token": TOKEN, + "rvt-namespace": NAMESPACE, + "rvt-runner": "default", + }); + const actionUrl = `${HOST}/gateway/${actorName}/action/${action}?${params}`; + const start = performance.now(); + const res = await fetch(actionUrl, { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-rivet-encoding": "json", + }, + body: JSON.stringify({ args }), + }); + const ms = performance.now() - start; + if (!res.ok) { + const text = await res.text(); + throw new Error(`${res.status}: ${text}`); + } + const body = await res.json(); + return { output: body.output, ms }; +} + +function fmt(n: number): string { + return `${n.toFixed(1)}ms`; +} + +async function main(): Promise { + const ITERATIONS = 5; + + console.log(`Wake Latency Benchmark (${ITERATIONS} iterations)\n`); + + // Warmup: make sure the envoy is connected and ready + console.log("Warming up..."); + const warmupKey = [`warmup-${Date.now()}`]; + await callAction("testWake", warmupKey, "noop"); + await callAction("testWake", warmupKey, "goToSleep"); + // Wait for sleep to take effect + await new Promise((r) => setTimeout(r, 2000)); + await callAction("testWake", warmupKey, "noop"); + console.log("Warm.\n"); + + const coldTimes: number[] = []; + const wakeTimes: number[] = []; + + for (let i = 0; i < ITERATIONS; i++) { + const uid = `${Date.now()}-${crypto.randomUUID().slice(0, 8)}`; + const key = [`bench-wake-${uid}`]; + + // 1. Cold start: fresh actor creation + no-op + const cold = await callAction("testWake", key, "noop"); + coldTimes.push(cold.ms); + + // 2. Put it to sleep + await callAction("testWake", key, "goToSleep"); + + // Wait for sleep to take effect + await new Promise((r) => setTimeout(r, 2000)); + + // 3. Wake: call no-op on sleeping actor + const wake = await callAction("testWake", key, "noop"); + wakeTimes.push(wake.ms); + + console.log( + ` [${i + 1}/${ITERATIONS}] cold=${fmt(cold.ms)} wake=${fmt(wake.ms)}`, + ); + } + + // Results + const avg = (arr: number[]) => arr.reduce((a, b) => a + b, 0) / arr.length; + const min = (arr: number[]) => Math.min(...arr); + const max = (arr: number[]) => Math.max(...arr); + const p50 = (arr: number[]) => { + const sorted = [...arr].sort((a, b) => a - b); + return sorted[Math.floor(sorted.length / 2)]; + }; + + console.log("\n"); + const sep = "-".repeat(60); + console.log(sep); + console.log( + `${"Metric".padEnd(30)} ${"Cold Start".padStart(12)} ${"Wake".padStart(12)}`, + ); + console.log(sep); + console.log( + `${"avg".padEnd(30)} ${fmt(avg(coldTimes)).padStart(12)} ${fmt(avg(wakeTimes)).padStart(12)}`, + ); + console.log( + `${"p50".padEnd(30)} ${fmt(p50(coldTimes)).padStart(12)} ${fmt(p50(wakeTimes)).padStart(12)}`, + ); + console.log( + `${"min".padEnd(30)} ${fmt(min(coldTimes)).padStart(12)} ${fmt(min(wakeTimes)).padStart(12)}`, + ); + console.log( + `${"max".padEnd(30)} ${fmt(max(coldTimes)).padStart(12)} ${fmt(max(wakeTimes)).padStart(12)}`, + ); + console.log(sep); +} + +main().catch((err) => { + console.error("Benchmark failed:", err); + process.exit(1); +}); diff --git a/examples/sandbox/scripts/bench.ts b/examples/sandbox/scripts/bench.ts new file mode 100644 index 0000000000..3c6059498f --- /dev/null +++ b/examples/sandbox/scripts/bench.ts @@ -0,0 +1,339 @@ +#!/usr/bin/env -S npx tsx + +/** + * Unified benchmark for Rivet Cloud actors. + * + * Measures baseline RTT via no-op on a warm actor and subtracts it from + * all measurements to show server-side processing time. + * + * Usage: + * npx tsx scripts/bench.ts [--filter ] + * + * Examples: + * npx tsx scripts/bench.ts "https://ns:token@api.staging.rivet.dev" + * npx tsx scripts/bench.ts "https://ns:token@api.staging.rivet.dev" --filter wake + * npx tsx scripts/bench.ts "https://ns:token@api.staging.rivet.dev" --filter sqlite + */ + +// ── Args ────────────────────────────────────────────────────────── + +const args = process.argv.slice(2); +const RAW_ENDPOINT = args.find((a) => !a.startsWith("--")); +const filterIdx = args.indexOf("--filter"); +const FILTER = filterIdx >= 0 ? args[filterIdx + 1]?.toLowerCase() : undefined; + +if (!RAW_ENDPOINT) { + console.error("Usage: npx tsx scripts/bench.ts [--filter ]"); + process.exit(1); +} + +const url = new URL(RAW_ENDPOINT); +const NAMESPACE = url.username; +const TOKEN = url.password; +const HOST = `${url.protocol}//${url.host}`; + +// ── HTTP helpers ────────────────────────────────────────────────── + +async function callAction( + actorName: string, + key: string[], + action: string, + args: unknown[] = [], +): Promise { + const params = new URLSearchParams({ + "rvt-method": "getOrCreate", + "rvt-key": key.join(","), + "rvt-token": TOKEN, + "rvt-namespace": NAMESPACE, + "rvt-runner": "default", + }); + const actionUrl = `${HOST}/gateway/${actorName}/action/${action}?${params}`; + const res = await fetch(actionUrl, { + method: "POST", + headers: { "Content-Type": "application/json", "x-rivet-encoding": "json" }, + body: JSON.stringify({ args }), + }); + if (!res.ok) { + const text = await res.text(); + throw new Error(`${res.status}: ${text}`); + } + return (await res.json()).output; +} + +async function timed(fn: () => Promise): Promise<{ result: T; ms: number }> { + const start = performance.now(); + const result = await fn(); + return { result, ms: performance.now() - start }; +} + +// ── Baseline RTT ────────────────────────────────────────────────── + +async function measureBaseline(n = 20): Promise<{ baselineMs: number; baselineKey: string[] }> { + const baselineKey = [`baseline-${uid()}`]; + + // Warm up (discard first 3 for DNS/connection/cold start) + await callAction("counter", baselineKey, "noop"); + await callAction("counter", baselineKey, "noop"); + await callAction("counter", baselineKey, "noop"); + + const times: number[] = []; + for (let i = 0; i < n; i++) { + const start = performance.now(); + await callAction("counter", baselineKey, "noop"); + times.push(performance.now() - start); + } + times.sort((a, b) => a - b); + return { baselineMs: times[Math.floor(times.length / 2)], baselineKey }; +} + +// ── Result table ────────────────────────────────────────────────── + +interface BenchEntry { + group: string; + name: string; + e2eMs: number; + serverMs: number | null; + perOpMs: number | null; + failed?: boolean; + failReason?: string; +} + +function fmt(n: number): string { + return `${n.toFixed(1)}ms`; +} + +function printTable(entries: BenchEntry[], baselineMs: number): void { + const nameW = Math.max(40, ...entries.map((e) => e.name.length)); + const sep = "─".repeat(nameW + 46); + + console.log(`\n┌${sep}┐`); + console.log( + `│ ${"Benchmark".padEnd(nameW)} ${"E2E".padStart(10)} ${"Server".padStart(10)} ${"Per-Op".padStart(10)} ${"RTT".padStart(8)} │`, + ); + console.log(`├${sep}┤`); + + let currentGroup = ""; + for (const e of entries) { + if (e.group !== currentGroup) { + if (currentGroup) console.log(`├${sep}┤`); + console.log(`│ ${`── ${e.group} ──`.padEnd(nameW + 44)} │`); + currentGroup = e.group; + } + if (e.failed) { + console.log(`│ ${e.name.padEnd(nameW)} ${"FAILED".padStart(10)} ${"".padStart(10)} ${"".padStart(10)} ${"".padStart(8)} │`); + } else { + const serverStr = e.serverMs != null ? fmt(e.serverMs) : fmt(Math.max(0, e.e2eMs - baselineMs)); + const perOpStr = e.perOpMs != null ? fmt(e.perOpMs) : ""; + const rtt = e.serverMs != null ? fmt(e.e2eMs - e.serverMs) : fmt(baselineMs); + console.log( + `│ ${e.name.padEnd(nameW)} ${fmt(e.e2eMs).padStart(10)} ${serverStr.padStart(10)} ${perOpStr.padStart(10)} ${rtt.padStart(8)} │`, + ); + } + } + console.log(`└${sep}┘`); +} + +// ── Bench definitions ───────────────────────────────────────────── + +type BenchFn = () => Promise; + +function uid(): string { + return `${Date.now()}-${crypto.randomUUID().slice(0, 8)}`; +} + +function shouldRun(group: string, name: string): boolean { + if (!FILTER) return true; + return `${group} ${name}`.toLowerCase().includes(FILTER); +} + +function benchLatency(): BenchFn[] { + const group = "Latency"; + const benches: BenchFn[] = []; + + if (shouldRun(group, "HTTP ping (health endpoint)")) { + benches.push(async () => { + const healthUrl = `${HOST}/`; + // Warmup + await fetch(healthUrl); + await fetch(healthUrl); + const times: number[] = []; + for (let i = 0; i < 10; i++) { + const start = performance.now(); + await fetch(healthUrl); + times.push(performance.now() - start); + } + times.sort((a, b) => a - b); + const ms = times[Math.floor(times.length / 2)]; + return { group, name: "HTTP ping (health endpoint)", e2eMs: ms, serverMs: null, perOpMs: null }; + }); + } + + if (shouldRun(group, "Action ping (warm actor)")) { + benches.push(async () => { + const key = [`bench-ping-${uid()}`]; + await callAction("counter", key, "noop"); + const times: number[] = []; + for (let i = 0; i < 10; i++) { + const { ms } = await timed(() => callAction("counter", key, "noop")); + times.push(ms); + } + times.sort((a, b) => a - b); + const ms = times[Math.floor(times.length / 2)]; + return { group, name: "Action ping (warm actor)", e2eMs: ms, serverMs: null, perOpMs: null }; + }); + } + + if (shouldRun(group, "Cold start (fresh actor)")) { + benches.push(async () => { + const key = [`bench-cold-${uid()}`]; + const { ms } = await timed(() => callAction("counter", key, "noop")); + return { group, name: "Cold start (fresh actor)", e2eMs: ms, serverMs: null, perOpMs: null }; + }); + } + + if (shouldRun(group, "Wake from sleep")) { + benches.push(async () => { + const key = [`bench-wake-${uid()}`]; + await callAction("counter", key, "noop"); + await callAction("counter", key, "goToSleep"); + await new Promise((r) => setTimeout(r, 2000)); + const { ms } = await timed(() => callAction("counter", key, "noop")); + return { group, name: "Wake from sleep", e2eMs: ms, serverMs: null, perOpMs: null }; + }); + } + + return benches; +} + +function benchSqlite(): BenchFn[] { + const group = "SQLite"; + const benches: BenchFn[] = []; + + const sqliteBenches: { name: string; action: string; args: unknown[] }[] = [ + { name: "Insert single x10", action: "insertSingle", args: [10] }, + { name: "Insert single x100", action: "insertSingle", args: [100] }, + { name: "Insert single x1000", action: "insertSingle", args: [1000] }, + { name: "Insert single x10000", action: "insertSingle", args: [10000] }, + { name: "Insert TX x1", action: "insertTx", args: [1] }, + { name: "Insert TX x10", action: "insertTx", args: [10] }, + { name: "Insert TX x10000", action: "insertTx", args: [10000] }, + { name: "Insert batch x10", action: "insertBatch", args: [10] }, + { name: "Point read x100", action: "pointRead", args: [100] }, + { name: "Full scan (500 rows)", action: "fullScan", args: [500] }, + { name: "Range scan indexed", action: "rangeScanIndexed", args: [] }, + { name: "Range scan unindexed", action: "rangeScanUnindexed", args: [] }, + { name: "Bulk update", action: "bulkUpdate", args: [] }, + { name: "Bulk delete", action: "bulkDelete", args: [] }, + { name: "Hot row updates x100", action: "hotRowUpdates", args: [100] }, + { name: "Hot row updates x10000", action: "hotRowUpdates", args: [10000] }, + { name: "VACUUM after delete", action: "vacuumAfterDelete", args: [] }, + { name: "Large payload insert (32KB x20)", action: "largePayloadInsert", args: [20] }, + { name: "Mixed OLTP x1", action: "mixedOltp", args: [] }, + { name: "JSON extract query", action: "jsonInsertAndQuery", args: [] }, + { name: "JSON each aggregation", action: "jsonEachAgg", args: [] }, + { name: "Complex: aggregation", action: "complexAggregation", args: [] }, + { name: "Complex: subquery", action: "complexSubquery", args: [] }, + { name: "Complex: join (200 rows)", action: "complexJoin", args: [] }, + { name: "Complex: CTE + window functions", action: "complexCteWindow", args: [] }, + { name: "Migration (50 tables)", action: "migrationTables", args: [50] }, + ]; + + for (const b of sqliteBenches) { + if (!shouldRun(group, b.name)) continue; + benches.push(async () => { + const key = [`bench-sql-${uid()}`]; + try { + // Create actor first (cold start not measured) + await callAction("testSqliteBench", key, "noop"); + const { result, ms: e2eMs } = await timed(() => + callAction("testSqliteBench", key, b.action, b.args), + ); + const r = result as { ms?: number; ops?: number; [k: string]: unknown }; + const serverMs = r.ms ?? null; + const perOpMs = (serverMs != null && r.ops) ? serverMs / r.ops : null; + return { group, name: b.name, e2eMs, serverMs, perOpMs }; + } catch (err) { + return { group, name: b.name, e2eMs: -1, serverMs: null, perOpMs: null, failed: true, failReason: String(err).slice(0, 80) }; + } + }); + } + + // Concurrent actors + if (shouldRun(group, "Concurrent 5 actors")) { + benches.push(async () => { + const { ms: wallMs } = await timed(async () => { + await Promise.all( + Array.from({ length: 5 }, (_, i) => + callAction("testSqliteBench", [`bench-conc-${uid()}-${i}`], "insertSingle", [10]), + ), + ); + }); + return { group, name: "Concurrent 5 actors wall time", e2eMs: wallMs, serverMs: null, perOpMs: null }; + }); + benches.push(async () => { + const times: number[] = []; + await Promise.all( + Array.from({ length: 5 }, async (_, i) => { + const { ms } = await timed(() => + callAction("testSqliteBench", [`bench-conc2-${uid()}-${i}`], "insertSingle", [10]), + ); + times.push(ms); + }), + ); + const avg = times.reduce((a, b) => a + b, 0) / times.length; + return { group, name: "Concurrent 5 actors (per-actor)", e2eMs: avg, serverMs: null, perOpMs: null }; + }); + } + + return benches; +} + +// ── Main ────────────────────────────────────────────────────────── + +async function main(): Promise { + console.log(`Rivet Actor Benchmark`); + console.log(`Namespace: ${NAMESPACE}`); + console.log(`Host: ${HOST}`); + if (FILTER) console.log(`Filter: ${FILTER}`); + console.log(); + + // Warmup: create an actor so envoy is connected + console.log("Warming up envoy..."); + await callAction("counter", [`warmup-${uid()}`], "noop"); + console.log("Ready.\n"); + + // Measure baseline RTT via no-op on a warm actor + console.log("Measuring baseline RTT (no-op on warm actor)..."); + const { baselineMs } = await measureBaseline(20); + console.log(`Baseline RTT (median of 20): ${fmt(baselineMs)}\n`); + + // Collect all benches + const allBenches: BenchFn[] = [...benchLatency(), ...benchSqlite()]; + + if (allBenches.length === 0) { + console.log("No benchmarks matched the filter."); + return; + } + + console.log(`Running ${allBenches.length} benchmarks...\n`); + + const entries: BenchEntry[] = []; + for (let i = 0; i < allBenches.length; i++) { + const entry = await allBenches[i](); + entries.push(entry); + const status = entry.failed ? "FAILED" : fmt(entry.e2eMs); + process.stdout.write(` [${String(i + 1).padStart(2)}/${allBenches.length}] ${entry.name.padEnd(40)} ${status}\n`); + } + + printTable(entries, baselineMs); + + const passed = entries.filter((e) => !e.failed); + const failed = entries.filter((e) => e.failed); + console.log(`\nBaseline RTT: ${fmt(baselineMs)}`); + console.log(`Passed: ${passed.length}, Failed: ${failed.length}`); +} + +main().catch((err) => { + console.error("Benchmark failed:", err); + process.exit(1); +}); diff --git a/examples/sandbox/src/actors/counter/counter.ts b/examples/sandbox/src/actors/counter/counter.ts index 9fd224e818..0cd384f36f 100644 --- a/examples/sandbox/src/actors/counter/counter.ts +++ b/examples/sandbox/src/actors/counter/counter.ts @@ -19,5 +19,12 @@ export const counter = actor({ getCount: (c) => { return c.state.count; }, + noop: (_c) => { + return { ok: true }; + }, + goToSleep: (c) => { + c.sleep(); + return { ok: true }; + }, }, }); diff --git a/examples/sandbox/src/actors/testing/test-sqlite-bench.ts b/examples/sandbox/src/actors/testing/test-sqlite-bench.ts new file mode 100644 index 0000000000..d8e42df6e7 --- /dev/null +++ b/examples/sandbox/src/actors/testing/test-sqlite-bench.ts @@ -0,0 +1,337 @@ +import { actor } from "rivetkit"; +import { db } from "rivetkit/db"; + +export const testSqliteBench = actor({ + options: { + actionTimeout: 300_000, + }, + db: db({ + onMigrate: async (db) => { + await db.execute(`CREATE TABLE IF NOT EXISTS bench ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + key TEXT NOT NULL, + value TEXT NOT NULL, + num INTEGER NOT NULL DEFAULT 0, + payload BLOB, + created_at INTEGER NOT NULL DEFAULT 0 + )`); + await db.execute("CREATE INDEX IF NOT EXISTS idx_bench_key ON bench(key)"); + await db.execute("CREATE INDEX IF NOT EXISTS idx_bench_num ON bench(num)"); + + await db.execute(`CREATE TABLE IF NOT EXISTS bench_json ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + data TEXT NOT NULL DEFAULT '{}' + )`); + + await db.execute(`CREATE TABLE IF NOT EXISTS bench_secondary ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + bench_id INTEGER NOT NULL, + label TEXT NOT NULL, + score REAL NOT NULL DEFAULT 0, + FOREIGN KEY (bench_id) REFERENCES bench(id) + )`); + }, + }), + actions: { + noop: (_c) => ({ ok: true }), + + goToSleep: (c) => { + c.sleep(); + return { ok: true }; + }, + + insertSingle: async (c, n: number) => { + const t0 = performance.now(); + for (let i = 0; i < n; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `k-${i}`, `v-${i}`, i, Date.now(), + ); + } + return { ms: performance.now() - t0, ops: n }; + }, + + insertTx: async (c, n: number) => { + const t0 = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < n; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `k-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + return { ms: performance.now() - t0, ops: n }; + }, + + insertBatch: async (c, n: number) => { + const t0 = performance.now(); + const placeholders = Array.from({ length: n }, () => "(?, ?, ?, ?)").join(", "); + const args: unknown[] = []; + for (let i = 0; i < n; i++) { + args.push(`k-${i}`, `v-${i}`, i, Date.now()); + } + await c.db.execute(`INSERT INTO bench (key, value, num, created_at) VALUES ${placeholders}`, ...args); + return { ms: performance.now() - t0, ops: n }; + }, + + pointRead: async (c, n: number) => { + await c.db.execute("INSERT INTO bench (key, value, num, created_at) VALUES ('pr', 'pr', 0, 0)"); + const rows = await c.db.execute("SELECT id FROM bench WHERE key = 'pr' LIMIT 1"); + const id = (rows[0] as { id: number }).id; + const t0 = performance.now(); + for (let i = 0; i < n; i++) { + await c.db.execute("SELECT * FROM bench WHERE id = ?", id); + } + return { ms: performance.now() - t0, ops: n }; + }, + + fullScan: async (c, seedRows: number) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < seedRows; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `scan-${i}`, `val-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute("SELECT * FROM bench"); + return { ms: performance.now() - t0, seedMs, rows: (rows as unknown[]).length }; + }, + + rangeScanIndexed: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 500; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `rs-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute("SELECT * FROM bench WHERE num BETWEEN 100 AND 300"); + return { ms: performance.now() - t0, seedMs, rows: (rows as unknown[]).length }; + }, + + rangeScanUnindexed: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 500; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `ru-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute("SELECT * FROM bench WHERE value BETWEEN 'v-100' AND 'v-300'"); + return { ms: performance.now() - t0, seedMs, rows: (rows as unknown[]).length }; + }, + + bulkUpdate: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 200; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `bu-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + await c.db.execute("UPDATE bench SET value = 'updated', num = num + 1000 WHERE key LIKE 'bu-%'"); + return { ms: performance.now() - t0, seedMs, ops: 200 }; + }, + + bulkDelete: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 200; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `bd-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + await c.db.execute("DELETE FROM bench WHERE key LIKE 'bd-%'"); + return { ms: performance.now() - t0, seedMs, ops: 200 }; + }, + + hotRowUpdates: async (c, n: number) => { + await c.db.execute("INSERT INTO bench (key, value, num, created_at) VALUES ('hot', 'v', 0, 0)"); + const rows = await c.db.execute("SELECT id FROM bench WHERE key = 'hot' LIMIT 1"); + const id = (rows[0] as { id: number }).id; + const t0 = performance.now(); + for (let i = 0; i < n; i++) { + await c.db.execute("UPDATE bench SET num = ? WHERE id = ?", i, id); + } + return { ms: performance.now() - t0, ops: n }; + }, + + vacuumAfterDelete: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 500; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `vac-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + await c.db.execute("DELETE FROM bench WHERE key LIKE 'vac-%'"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + await c.db.execute("VACUUM"); + return { ms: performance.now() - t0, seedMs }; + }, + + largePayloadInsert: async (c, n: number) => { + const blob = "x".repeat(32 * 1024); + const t0 = performance.now(); + for (let i = 0; i < n; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, payload, created_at) VALUES (?, ?, ?, ?, ?)", + `lp-${i}`, `v-${i}`, i, blob, Date.now(), + ); + } + return { ms: performance.now() - t0, ops: n }; + }, + + mixedOltp: async (c) => { + const t0 = performance.now(); + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + "oltp", "initial", 0, Date.now(), + ); + const rows = await c.db.execute("SELECT * FROM bench WHERE key = 'oltp' LIMIT 1"); + const id = (rows[0] as { id: number }).id; + await c.db.execute("UPDATE bench SET value = 'updated', num = 1 WHERE id = ?", id); + const updated = await c.db.execute("SELECT * FROM bench WHERE id = ?", id); + return { ms: performance.now() - t0, ops: 4 }; + }, + + jsonInsertAndQuery: async (c) => { + const t0 = performance.now(); + for (let i = 0; i < 50; i++) { + await c.db.execute( + "INSERT INTO bench_json (data) VALUES (?)", + JSON.stringify({ name: `item-${i}`, tags: ["a", "b"], score: Math.random() * 100 }), + ); + } + const rows = await c.db.execute( + "SELECT id, json_extract(data, '$.name') as name, json_extract(data, '$.score') as score FROM bench_json ORDER BY json_extract(data, '$.score') DESC LIMIT 10" + ); + return { ms: performance.now() - t0, ops: 51, rows: (rows as unknown[]).length }; + }, + + jsonEachAgg: async (c) => { + await c.db.execute( + "INSERT INTO bench_json (data) VALUES (?)", + JSON.stringify({ items: Array.from({ length: 100 }, (_, i) => ({ id: i, val: i * 10 })) }), + ); + const t0 = performance.now(); + const rows = await c.db.execute( + "SELECT SUM(json_extract(value, '$.val')) as total FROM bench_json, json_each(json_extract(data, '$.items')) LIMIT 1" + ); + return { ms: performance.now() - t0, total: (rows[0] as { total: number }).total }; + }, + + complexAggregation: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 200; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `grp-${i % 10}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute( + "SELECT key, COUNT(*) as cnt, AVG(num) as avg_num, MIN(num) as min_num, MAX(num) as max_num FROM bench WHERE key LIKE 'grp-%' GROUP BY key ORDER BY cnt DESC" + ); + return { ms: performance.now() - t0, seedMs, groups: (rows as unknown[]).length }; + }, + + complexSubquery: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 200; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `sq-${i}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute( + "SELECT * FROM bench WHERE num > (SELECT AVG(num) FROM bench) ORDER BY num DESC LIMIT 50" + ); + return { ms: performance.now() - t0, seedMs, rows: (rows as unknown[]).length }; + }, + + complexJoin: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 200; i++) { + await c.db.execute("INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", `j-${i}`, `v-${i}`, i, Date.now()); + await c.db.execute("INSERT INTO bench_secondary (bench_id, label, score) VALUES (?, ?, ?)", i + 1, `label-${i}`, Math.random() * 100); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute( + "SELECT b.key, b.num, s.label, s.score FROM bench b INNER JOIN bench_secondary s ON s.bench_id = b.id WHERE b.key LIKE 'j-%' ORDER BY s.score DESC LIMIT 200" + ); + return { ms: performance.now() - t0, seedMs, rows: (rows as unknown[]).length }; + }, + + complexCteWindow: async (c) => { + const t0Seed = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < 200; i++) { + await c.db.execute( + "INSERT INTO bench (key, value, num, created_at) VALUES (?, ?, ?, ?)", + `cte-${i % 10}`, `v-${i}`, i, Date.now(), + ); + } + await c.db.execute("COMMIT"); + const seedMs = performance.now() - t0Seed; + const t0 = performance.now(); + const rows = await c.db.execute(` + WITH ranked AS ( + SELECT key, num, ROW_NUMBER() OVER (PARTITION BY key ORDER BY num DESC) as rn, + AVG(num) OVER (PARTITION BY key) as avg_num + FROM bench + WHERE key LIKE 'cte-%' + ) + SELECT * FROM ranked WHERE rn <= 3 ORDER BY key, rn + `); + return { ms: performance.now() - t0, seedMs, rows: (rows as unknown[]).length }; + }, + + migrationTables: async (c, n: number) => { + const t0 = performance.now(); + await c.db.execute("BEGIN"); + for (let i = 0; i < n; i++) { + await c.db.execute(`CREATE TABLE IF NOT EXISTS mig_${i} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + data TEXT NOT NULL DEFAULT '' + )`); + } + await c.db.execute("COMMIT"); + return { ms: performance.now() - t0, ops: n }; + }, + }, +}); diff --git a/examples/sandbox/src/actors/testing/test-throughput.ts b/examples/sandbox/src/actors/testing/test-throughput.ts new file mode 100644 index 0000000000..baf6e335ca --- /dev/null +++ b/examples/sandbox/src/actors/testing/test-throughput.ts @@ -0,0 +1,32 @@ +import { actor } from "rivetkit"; +import { db } from "rivetkit/db"; + +export const testThroughput = actor({ + options: { + actionTimeout: 300_000, + }, + db: db({ + onMigrate: async (database) => { + // await database.execute("BEGIN"); + for (let i = 0; i < 50; i++) { + await database.execute(`CREATE TABLE IF NOT EXISTS tbl_${i} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + data TEXT NOT NULL DEFAULT '' + )`); + } + await database.execute(`CREATE TABLE IF NOT EXISTS counter ( + id INTEGER PRIMARY KEY CHECK (id = 1), + value INTEGER NOT NULL DEFAULT 0 + )`); + await database.execute("INSERT OR IGNORE INTO counter (id, value) VALUES (1, 0)"); + // await database.execute("COMMIT"); + }, + }), + actions: { + increment: async (c) => { + await c.db.execute("UPDATE counter SET value = value + 1 WHERE id = 1"); + const rows = await c.db.execute("SELECT value FROM counter WHERE id = 1"); + return (rows[0] as { value: number }).value; + }, + }, +}); diff --git a/examples/sandbox/src/actors/testing/test-wake.ts b/examples/sandbox/src/actors/testing/test-wake.ts new file mode 100644 index 0000000000..068f131cf9 --- /dev/null +++ b/examples/sandbox/src/actors/testing/test-wake.ts @@ -0,0 +1,14 @@ +import { actor } from "rivetkit"; + +export const testWake = actor({ + state: {}, + actions: { + noop: (_c) => { + return { ok: true }; + }, + goToSleep: (c) => { + c.sleep(); + return { ok: true }; + }, + }, +}); diff --git a/examples/sandbox/src/bench-endpoint.ts b/examples/sandbox/src/bench-endpoint.ts new file mode 100644 index 0000000000..1de5b27435 --- /dev/null +++ b/examples/sandbox/src/bench-endpoint.ts @@ -0,0 +1,197 @@ +/** + * Server-side benchmark runner. Executes all benchmarks from within the + * Cloud Run instance so network hops stay inside the datacenter. + */ + +import type { Registry } from "rivetkit"; + +interface BenchEntry { + group: string; + name: string; + e2eMs: number; + serverMs: number | null; + perOpMs: number | null; + failed?: boolean; + failReason?: string; +} + +function uid(): string { + return `${Date.now()}-${crypto.randomUUID().slice(0, 8)}`; +} + +export async function runBenchmarks( + registry: Registry, + filter?: string | null, +): Promise<{ baselineMs: number; entries: BenchEntry[] }> { + const config = registry.parseConfig(); + const endpoint = config.endpoint; + const namespace = config.namespace; + const token = config.token; + + if (!endpoint || !namespace) { + throw new Error("Registry has no endpoint/namespace configured"); + } + + async function callAction( + actorName: string, + key: string[], + action: string, + args: unknown[] = [], + ): Promise { + const params = new URLSearchParams({ + "rvt-method": "getOrCreate", + "rvt-key": key.join(","), + "rvt-namespace": namespace, + "rvt-runner": "default", + }); + if (token) params.set("rvt-token", token); + const actionUrl = `${endpoint}/gateway/${actorName}/action/${action}?${params}`; + const res = await fetch(actionUrl, { + method: "POST", + headers: { "Content-Type": "application/json", "x-rivet-encoding": "json" }, + body: JSON.stringify({ args }), + }); + if (!res.ok) { + const text = await res.text(); + throw new Error(`${res.status}: ${text}`); + } + return (await res.json()).output; + } + + async function timed(fn: () => Promise): Promise<{ result: T; ms: number }> { + const start = performance.now(); + const result = await fn(); + return { result, ms: performance.now() - start }; + } + + function shouldRun(group: string, name: string): boolean { + if (!filter) return true; + return `${group} ${name}`.toLowerCase().includes(filter.toLowerCase()); + } + + // Warmup + await callAction("counter", [`warmup-${uid()}`], "noop"); + + // Baseline: no-op on warm actor + const baselineKey = [`baseline-${uid()}`]; + await callAction("counter", baselineKey, "noop"); + await callAction("counter", baselineKey, "noop"); + await callAction("counter", baselineKey, "noop"); + + const baselineTimes: number[] = []; + for (let i = 0; i < 20; i++) { + const start = performance.now(); + await callAction("counter", baselineKey, "noop"); + baselineTimes.push(performance.now() - start); + } + baselineTimes.sort((a, b) => a - b); + const baselineMs = baselineTimes[Math.floor(baselineTimes.length / 2)]; + + const entries: BenchEntry[] = []; + + // ── Latency ─────────────────────────────────────────────────── + + const latencyGroup = "Latency"; + + if (shouldRun(latencyGroup, "Action ping (warm actor)")) { + const key = [`bench-ping-${uid()}`]; + await callAction("counter", key, "noop"); + const times: number[] = []; + for (let i = 0; i < 10; i++) { + const { ms } = await timed(() => callAction("counter", key, "noop")); + times.push(ms); + } + times.sort((a, b) => a - b); + entries.push({ group: latencyGroup, name: "Action ping (warm actor)", e2eMs: times[Math.floor(times.length / 2)], serverMs: null, perOpMs: null }); + } + + if (shouldRun(latencyGroup, "Cold start (fresh actor)")) { + const key = [`bench-cold-${uid()}`]; + const { ms } = await timed(() => callAction("counter", key, "noop")); + entries.push({ group: latencyGroup, name: "Cold start (fresh actor)", e2eMs: ms, serverMs: null, perOpMs: null }); + } + + if (shouldRun(latencyGroup, "Wake from sleep")) { + const key = [`bench-wake-${uid()}`]; + await callAction("counter", key, "noop"); + await callAction("counter", key, "goToSleep"); + await new Promise((r) => setTimeout(r, 2000)); + const { ms } = await timed(() => callAction("counter", key, "noop")); + entries.push({ group: latencyGroup, name: "Wake from sleep", e2eMs: ms, serverMs: null, perOpMs: null }); + } + + // ── SQLite ──────────────────────────────────────────────────── + + const sqliteGroup = "SQLite"; + const sqliteBenches: { name: string; action: string; args: unknown[] }[] = [ + { name: "Insert single x10", action: "insertSingle", args: [10] }, + { name: "Insert single x100", action: "insertSingle", args: [100] }, + { name: "Insert single x1000", action: "insertSingle", args: [1000] }, + { name: "Insert single x10000", action: "insertSingle", args: [10000] }, + { name: "Insert TX x1", action: "insertTx", args: [1] }, + { name: "Insert TX x10", action: "insertTx", args: [10] }, + { name: "Insert TX x10000", action: "insertTx", args: [10000] }, + { name: "Insert batch x10", action: "insertBatch", args: [10] }, + { name: "Point read x100", action: "pointRead", args: [100] }, + { name: "Full scan (500 rows)", action: "fullScan", args: [500] }, + { name: "Range scan indexed", action: "rangeScanIndexed", args: [] }, + { name: "Range scan unindexed", action: "rangeScanUnindexed", args: [] }, + { name: "Bulk update", action: "bulkUpdate", args: [] }, + { name: "Bulk delete", action: "bulkDelete", args: [] }, + { name: "Hot row updates x100", action: "hotRowUpdates", args: [100] }, + { name: "Hot row updates x10000", action: "hotRowUpdates", args: [10000] }, + { name: "VACUUM after delete", action: "vacuumAfterDelete", args: [] }, + { name: "Large payload insert (32KB x20)", action: "largePayloadInsert", args: [20] }, + { name: "Mixed OLTP x1", action: "mixedOltp", args: [] }, + { name: "JSON extract query", action: "jsonInsertAndQuery", args: [] }, + { name: "JSON each aggregation", action: "jsonEachAgg", args: [] }, + { name: "Complex: aggregation", action: "complexAggregation", args: [] }, + { name: "Complex: subquery", action: "complexSubquery", args: [] }, + { name: "Complex: join (200 rows)", action: "complexJoin", args: [] }, + { name: "Complex: CTE + window functions", action: "complexCteWindow", args: [] }, + { name: "Migration (50 tables)", action: "migrationTables", args: [50] }, + ]; + + for (const b of sqliteBenches) { + if (!shouldRun(sqliteGroup, b.name)) continue; + const key = [`bench-sql-${uid()}`]; + try { + await callAction("testSqliteBench", key, "noop"); + const { result, ms: e2eMs } = await timed(() => + callAction("testSqliteBench", key, b.action, b.args), + ); + const r = result as { ms?: number; ops?: number; [k: string]: unknown }; + const serverMs = r.ms ?? null; + const perOpMs = (serverMs != null && r.ops) ? serverMs / r.ops : null; + entries.push({ group: sqliteGroup, name: b.name, e2eMs, serverMs, perOpMs }); + } catch (err) { + entries.push({ group: sqliteGroup, name: b.name, e2eMs: -1, serverMs: null, perOpMs: null, failed: true, failReason: String(err).slice(0, 120) }); + } + } + + // Concurrent actors + if (shouldRun(sqliteGroup, "Concurrent 5 actors")) { + const { ms: wallMs } = await timed(async () => { + await Promise.all( + Array.from({ length: 5 }, (_, i) => + callAction("testSqliteBench", [`bench-conc-${uid()}-${i}`], "insertSingle", [10]), + ), + ); + }); + entries.push({ group: sqliteGroup, name: "Concurrent 5 actors wall time", e2eMs: wallMs, serverMs: null, perOpMs: null }); + + const times: number[] = []; + await Promise.all( + Array.from({ length: 5 }, async (_, i) => { + const { ms } = await timed(() => + callAction("testSqliteBench", [`bench-conc2-${uid()}-${i}`], "insertSingle", [10]), + ); + times.push(ms); + }), + ); + const avg = times.reduce((a, b) => a + b, 0) / times.length; + entries.push({ group: sqliteGroup, name: "Concurrent 5 actors (per-actor)", e2eMs: avg, serverMs: null, perOpMs: null }); + } + + return { baselineMs, entries }; +} diff --git a/examples/sandbox/src/index.ts b/examples/sandbox/src/index.ts index b0240f1785..f370d65720 100644 --- a/examples/sandbox/src/index.ts +++ b/examples/sandbox/src/index.ts @@ -115,6 +115,9 @@ import { inlineClientActor } from "./actors/testing/inline-client.ts"; import { testCounter } from "./actors/testing/test-counter.ts"; import { testCounterSqlite } from "./actors/testing/test-counter-sqlite.ts"; import { testSqliteLoad } from "./actors/testing/test-sqlite-load.ts"; +import { testWake } from "./actors/testing/test-wake.ts"; +import { testSqliteBench } from "./actors/testing/test-sqlite-bench.ts"; +import { testThroughput } from "./actors/testing/test-throughput.ts"; // AI import { aiAgent } from "./actors/ai/ai-agent.ts"; @@ -210,6 +213,9 @@ export const registry = setup({ testCounter, testCounterSqlite, testSqliteLoad, + testWake, + testSqliteBench, + testThroughput, // AI aiAgent, }, diff --git a/examples/sandbox/src/server.ts b/examples/sandbox/src/server.ts index ce03591313..ae8c52ab97 100644 --- a/examples/sandbox/src/server.ts +++ b/examples/sandbox/src/server.ts @@ -1,3 +1,15 @@ +import { Hono } from "hono"; import { registry } from "./index.ts"; +import { runBenchmarks } from "./bench-endpoint.ts"; -export default registry.serve(); +const app = new Hono(); + +app.get("/api/bench", async (c) => { + const filter = c.req.query("filter"); + const results = await runBenchmarks(registry, filter); + return c.json(results); +}); + +app.all("/api/rivet/*", (c) => registry.handler(c.req.raw)); + +export default app; diff --git a/examples/sandbox/vite.config.ts b/examples/sandbox/vite.config.ts index d6411dd196..87d9c9f5bd 100644 --- a/examples/sandbox/vite.config.ts +++ b/examples/sandbox/vite.config.ts @@ -19,5 +19,7 @@ export default defineConfig({ plugins: [react(), sqlRawPlugin(), ...srvx({ entry: "src/server.ts" })], ssr: { noExternal: true, + // Native addon can't be bundled by vite - must remain a runtime require(). + external: ["@rivetkit/rivetkit-native", "@rivetkit/rivetkit-native/wrapper"], }, }); diff --git a/package.json b/package.json index 4329017e75..5f7eebc9ea 100644 --- a/package.json +++ b/package.json @@ -1,18 +1,21 @@ { "name": "@rivetkit/engine-workspace", "private": true, - "packageManager": "pnpm@10.13.1", + "packageManager": "pnpm@10.15.0", "engines": { "node": ">=20" }, "scripts": { + "dev": "pnpm --filter @rivet-gg/cloud-api dev", "start": "npx turbo watch build", "build": "npx turbo build", "test": "npx turbo test", "test:watch": "npx turbo watch test", "check-types": "npx turbo check-types", + "sdk:generate": "npx turbo sdk:generate", "lint": "pnpm biome check .", - "fmt": "pnpm biome check --write --diagnostic-level=error ." + "fmt": "pnpm biome check --write --diagnostic-level=error .", + "postinstall": "lefthook install" }, "devDependencies": { "@bare-ts/tools": "0.15.0", @@ -47,7 +50,9 @@ "overrides": { "react": "19.1.0", "react-dom": "19.1.0", - "@rivet-gg/cloud": "https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c", + "rivetkit": "workspace:*", + "@rivetkit/engine-api-full": "https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389", + "@rivet-gg/cloud": "https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221", "@codemirror/state": "6.5.2", "@codemirror/view": "6.38.2", "@codemirror/autocomplete": "6.18.7", @@ -55,4 +60,4 @@ "@codemirror/lint": "6.8.5" } } -} +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index be21a76d89..0cb4feadde 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -10,14 +10,14 @@ overrides: '@rivetkit/next-js': workspace:* '@rivetkit/db': workspace:* '@rivetkit/sqlite-wasm': workspace:* - '@rivetkit/engine-api-full': workspace:* + '@rivetkit/engine-api-full': https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389 '@rivetkit/rivetkit-native': workspace:* '@types/react': ^19 '@types/react-dom': ^19 '@clerk/shared': 3.27.1 react: 19.1.0 react-dom: 19.1.0 - '@rivet-gg/cloud': https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c + '@rivet-gg/cloud': https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 '@codemirror/state': 6.5.2 '@codemirror/view': 6.38.2 '@codemirror/autocomplete': 6.18.7 @@ -313,7 +313,7 @@ importers: version: 0.0.260331072558 '@rivet-dev/agent-os-pi': specifier: ^0.1.1 - version: 0.1.1(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76) + version: 0.1.1(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))(pyodide@0.28.3)(ws@8.19.0)(zod@4.1.13) rivetkit: specifier: workspace:* version: link:../../rivetkit-typescript/packages/rivetkit @@ -438,11 +438,11 @@ importers: specifier: ^4.7.0 version: 4.7.0(monaco-editor@0.55.1)(react-dom@19.1.0(react@19.1.0))(react@19.1.0) '@rivet-gg/cloud': - specifier: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c - version: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c + specifier: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 + version: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 '@rivetkit/engine-api-full': - specifier: workspace:* - version: link:../../engine/sdks/typescript/api-full + specifier: https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389 + version: https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389 dotenv: specifier: ^17.2.2 version: 17.2.3 @@ -3532,14 +3532,14 @@ importers: specifier: ^1.2.3 version: 1.2.3(@types/react-dom@19.2.3(@types/react@19.2.13))(@types/react@19.2.13)(react-dom@19.1.0(react@19.1.0))(react@19.1.0) '@rivet-gg/cloud': - specifier: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c - version: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c + specifier: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 + version: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 '@rivet-gg/icons': specifier: workspace:* version: link:packages/icons '@rivetkit/engine-api-full': - specifier: workspace:* - version: link:../engine/sdks/typescript/api-full + specifier: https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389 + version: https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389 '@rivetkit/shared-data': specifier: workspace:* version: link:packages/shared-data @@ -4777,8 +4777,8 @@ importers: specifier: 25.5.3 version: 25.5.3 '@rivet-gg/cloud': - specifier: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c - version: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c + specifier: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 + version: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221 '@rivet-gg/components': specifier: workspace:* version: link:../frontend/packages/components @@ -9584,14 +9584,18 @@ packages: '@rivet-gg/api@25.5.3': resolution: {integrity: sha512-pj8xYQ+I/aQDbThmicPxvR+TWAzGoLSE53mbJi4QZHF8VH2oMvU7CMWqy7OTFH30DIRyVzsnHHRJZKGwtmQL3g==} - '@rivet-gg/cloud@https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c': - resolution: {tarball: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c} + '@rivet-gg/cloud@https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221': + resolution: {tarball: https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221} version: 0.0.0 '@rivetkit/bare-ts@0.6.2': resolution: {integrity: sha512-3qndQUQXLdwafMEqfhz24hUtDPcsf1Bu3q52Kb8MqeH8JUh3h6R4HYW3ZJXiQsLcyYyFM68PuIwlLRlg1xDEpg==} engines: {node: ^14.18.0 || >=16.0.0} + '@rivetkit/engine-api-full@https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389': + resolution: {tarball: https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389} + version: 2.0.33 + '@rivetkit/fast-json-patch@3.1.2': resolution: {integrity: sha512-CtA50xgsSSzICQduF/NDShPRzvucnNvsW/lQO0WgMTT1XAj9Lfae4pm7r3llFwilgG+9iq76Hv1LUqNy72v6yw==} @@ -19573,10 +19577,6 @@ snapshots: '@adraffy/ens-normalize@1.11.1': {} - '@agentclientprotocol/sdk@0.16.1(zod@3.25.76)': - dependencies: - zod: 3.25.76 - '@agentclientprotocol/sdk@0.16.1(zod@4.1.13)': dependencies: zod: 4.1.13 @@ -19718,12 +19718,6 @@ snapshots: package-manager-detector: 1.6.0 tinyexec: 1.0.2 - '@anthropic-ai/sdk@0.73.0(zod@3.25.76)': - dependencies: - json-schema-to-ts: 3.1.1 - optionalDependencies: - zod: 3.25.76 - '@anthropic-ai/sdk@0.73.0(zod@4.1.13)': dependencies: json-schema-to-ts: 3.1.1 @@ -22649,19 +22643,6 @@ snapshots: '@fortawesome/fontawesome-svg-core': 7.1.0 react: 19.1.0 - '@google/genai@1.48.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))': - dependencies: - google-auth-library: 10.6.2 - p-retry: 4.6.2 - protobufjs: 7.5.4 - ws: 8.19.0 - optionalDependencies: - '@modelcontextprotocol/sdk': 1.25.3(hono@4.11.9)(zod@3.25.76) - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - '@google/genai@1.48.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))': dependencies: google-auth-library: 10.6.2 @@ -23329,18 +23310,6 @@ snapshots: std-env: 3.10.0 yoctocolors: 2.1.2 - '@mariozechner/pi-agent-core@0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76)': - dependencies: - '@mariozechner/pi-ai': 0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76) - transitivePeerDependencies: - - '@modelcontextprotocol/sdk' - - aws-crt - - bufferutil - - supports-color - - utf-8-validate - - ws - - zod - '@mariozechner/pi-agent-core@0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))(ws@8.19.0)(zod@4.1.13)': dependencies: '@mariozechner/pi-ai': 0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))(ws@8.19.0)(zod@4.1.13) @@ -23353,30 +23322,6 @@ snapshots: - ws - zod - '@mariozechner/pi-ai@0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76)': - dependencies: - '@anthropic-ai/sdk': 0.73.0(zod@3.25.76) - '@aws-sdk/client-bedrock-runtime': 3.1024.0 - '@google/genai': 1.48.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76)) - '@mistralai/mistralai': 1.14.1 - '@sinclair/typebox': 0.34.41 - ajv: 8.17.1 - ajv-formats: 3.0.1(ajv@8.17.1) - chalk: 5.6.2 - openai: 6.26.0(ws@8.19.0)(zod@3.25.76) - partial-json: 0.1.7 - proxy-agent: 6.5.0 - undici: 7.24.7 - zod-to-json-schema: 3.25.1(zod@3.25.76) - transitivePeerDependencies: - - '@modelcontextprotocol/sdk' - - aws-crt - - bufferutil - - supports-color - - utf-8-validate - - ws - - zod - '@mariozechner/pi-ai@0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))(ws@8.19.0)(zod@4.1.13)': dependencies: '@anthropic-ai/sdk': 0.73.0(zod@4.1.13) @@ -23401,38 +23346,6 @@ snapshots: - ws - zod - '@mariozechner/pi-coding-agent@0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76)': - dependencies: - '@mariozechner/jiti': 2.6.5 - '@mariozechner/pi-agent-core': 0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76) - '@mariozechner/pi-ai': 0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76) - '@mariozechner/pi-tui': 0.60.0 - '@silvia-odwyer/photon-node': 0.3.4 - chalk: 5.6.2 - cli-highlight: 2.1.11 - diff: 8.0.3 - extract-zip: 2.0.1 - file-type: 21.3.4 - glob: 13.0.6 - hosted-git-info: 9.0.2 - ignore: 7.0.5 - marked: 15.0.12 - minimatch: 10.2.5 - proper-lockfile: 4.1.2 - strip-ansi: 7.1.2 - undici: 7.24.7 - yaml: 2.8.2 - optionalDependencies: - '@mariozechner/clipboard': 0.3.2 - transitivePeerDependencies: - - '@modelcontextprotocol/sdk' - - aws-crt - - bufferutil - - supports-color - - utf-8-validate - - ws - - zod - '@mariozechner/pi-coding-agent@0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))(ws@8.19.0)(zod@4.1.13)': dependencies: '@mariozechner/jiti': 2.6.5 @@ -25462,22 +25375,6 @@ snapshots: '@rivet-dev/agent-os-gzip@0.0.260331072558': {} - '@rivet-dev/agent-os-pi@0.1.1(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76)': - dependencies: - '@agentclientprotocol/sdk': 0.16.1(zod@3.25.76) - '@mariozechner/pi-ai': 0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76) - '@mariozechner/pi-coding-agent': 0.60.0(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@3.25.76))(ws@8.19.0)(zod@3.25.76) - '@rivet-dev/agent-os-core': 0.1.1(pyodide@0.28.3) - transitivePeerDependencies: - - '@modelcontextprotocol/sdk' - - aws-crt - - bufferutil - - pyodide - - supports-color - - utf-8-validate - - ws - - zod - '@rivet-dev/agent-os-pi@0.1.1(@modelcontextprotocol/sdk@1.25.3(hono@4.11.9)(zod@4.1.13))(pyodide@0.28.3)(ws@8.19.0)(zod@4.1.13)': dependencies: '@agentclientprotocol/sdk': 0.16.1(zod@4.1.13) @@ -25519,7 +25416,7 @@ snapshots: transitivePeerDependencies: - encoding - '@rivet-gg/cloud@https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@413534c': + '@rivet-gg/cloud@https://pkg.pr.new/rivet-dev/cloud/@rivet-gg/cloud@715f221': dependencies: cross-fetch: 4.1.0 form-data: 4.0.5 @@ -25534,6 +25431,17 @@ snapshots: '@rivetkit/bare-ts@0.6.2': {} + '@rivetkit/engine-api-full@https://pkg.pr.new/rivet-dev/engine-ee/@rivetkit/engine-api-full@011e389': + dependencies: + form-data: 4.0.5 + js-base64: 3.7.8 + node-fetch: 2.7.0 + qs: 6.14.1 + readable-stream: 4.7.0 + url-join: 5.0.0 + transitivePeerDependencies: + - encoding + '@rivetkit/fast-json-patch@3.1.2': {} '@rivetkit/on-change@6.0.2-rc.1': {} @@ -33988,11 +33896,6 @@ snapshots: transitivePeerDependencies: - encoding - openai@6.26.0(ws@8.19.0)(zod@3.25.76): - optionalDependencies: - ws: 8.19.0 - zod: 3.25.76 - openai@6.26.0(ws@8.19.0)(zod@4.1.13): optionalDependencies: ws: 8.19.0 diff --git a/rivetkit-typescript/packages/rivetkit-native/index.d.ts b/rivetkit-typescript/packages/rivetkit-native/index.d.ts index 2153132bcc..8dd3ad8de6 100644 --- a/rivetkit-typescript/packages/rivetkit-native/index.d.ts +++ b/rivetkit-typescript/packages/rivetkit-native/index.d.ts @@ -27,6 +27,7 @@ export interface JsEnvoyConfig { poolName: string version: number metadata?: any + notGlobal: boolean /** * Log level for the Rust tracing subscriber (e.g. "trace", "debug", "info", "warn", "error"). * Falls back to RIVET_LOG_LEVEL, then LOG_LEVEL, then RUST_LOG env vars. Defaults to "warn". diff --git a/rivetkit-typescript/packages/rivetkit-native/src/database.rs b/rivetkit-typescript/packages/rivetkit-native/src/database.rs index e5fe5167ce..be98463611 100644 --- a/rivetkit-typescript/packages/rivetkit-native/src/database.rs +++ b/rivetkit-typescript/packages/rivetkit-native/src/database.rs @@ -1,15 +1,15 @@ -use std::ffi::{c_char, CStr, CString}; +use std::ffi::{CStr, CString, c_char}; use std::ptr; use std::sync::{Arc, Mutex}; use async_trait::async_trait; use libsqlite3_sys::{ - sqlite3, sqlite3_bind_blob, sqlite3_bind_double, sqlite3_bind_int64, sqlite3_bind_null, - sqlite3_bind_text, sqlite3_changes, sqlite3_column_blob, sqlite3_column_bytes, - sqlite3_column_count, sqlite3_column_double, sqlite3_column_int64, sqlite3_column_name, - sqlite3_column_text, sqlite3_column_type, sqlite3_errmsg, sqlite3_finalize, - sqlite3_prepare_v2, sqlite3_step, SQLITE_BLOB, SQLITE_DONE, SQLITE_FLOAT, SQLITE_INTEGER, - SQLITE_NULL, SQLITE_OK, SQLITE_ROW, SQLITE_TEXT, SQLITE_TRANSIENT, + SQLITE_BLOB, SQLITE_DONE, SQLITE_FLOAT, SQLITE_INTEGER, SQLITE_NULL, SQLITE_OK, SQLITE_ROW, + SQLITE_TEXT, SQLITE_TRANSIENT, sqlite3, sqlite3_bind_blob, sqlite3_bind_double, + sqlite3_bind_int64, sqlite3_bind_null, sqlite3_bind_text, sqlite3_changes, sqlite3_column_blob, + sqlite3_column_bytes, sqlite3_column_count, sqlite3_column_double, sqlite3_column_int64, + sqlite3_column_name, sqlite3_column_text, sqlite3_column_type, sqlite3_errmsg, + sqlite3_finalize, sqlite3_prepare_v2, sqlite3_step, }; use napi::bindgen_prelude::Buffer; use napi_derive::napi; @@ -109,8 +109,7 @@ pub struct JsNativeDatabase { impl JsNativeDatabase { pub fn as_ptr(&self) -> *mut libsqlite3_sys::sqlite3 { - self - .db + self.db .lock() .ok() .and_then(|guard| guard.as_ref().map(NativeDatabase::as_ptr)) @@ -243,13 +242,7 @@ fn bind_params( let text = CString::new(param.text_value.clone().unwrap_or_default()) .map_err(|err| napi::Error::from_reason(err.to_string()))?; unsafe { - sqlite3_bind_text( - stmt, - bind_index, - text.as_ptr(), - -1, - SQLITE_TRANSIENT(), - ) + sqlite3_bind_text(stmt, bind_index, text.as_ptr(), -1, SQLITE_TRANSIENT()) } } "blob" => { @@ -291,26 +284,17 @@ fn collect_columns(stmt: *mut libsqlite3_sys::sqlite3_stmt) -> Vec { if name_ptr.is_null() { String::new() } else { - CStr::from_ptr(name_ptr) - .to_string_lossy() - .into_owned() + CStr::from_ptr(name_ptr).to_string_lossy().into_owned() } }) .collect() } -fn column_value( - stmt: *mut libsqlite3_sys::sqlite3_stmt, - index: i32, -) -> serde_json::Value { +fn column_value(stmt: *mut libsqlite3_sys::sqlite3_stmt, index: i32) -> serde_json::Value { match unsafe { sqlite3_column_type(stmt, index) } { SQLITE_NULL => serde_json::Value::Null, - SQLITE_INTEGER => { - serde_json::Value::from(unsafe { sqlite3_column_int64(stmt, index) }) - } - SQLITE_FLOAT => { - serde_json::Value::from(unsafe { sqlite3_column_double(stmt, index) }) - } + SQLITE_INTEGER => serde_json::Value::from(unsafe { sqlite3_column_int64(stmt, index) }), + SQLITE_FLOAT => serde_json::Value::from(unsafe { sqlite3_column_double(stmt, index) }), SQLITE_TEXT => { let text_ptr = unsafe { sqlite3_column_text(stmt, index) }; if text_ptr.is_null() { @@ -328,9 +312,7 @@ fn column_value( serde_json::Value::Null } else { let blob_len = unsafe { sqlite3_column_bytes(stmt, index) } as usize; - let blob = unsafe { - std::slice::from_raw_parts(blob_ptr as *const u8, blob_len) - }; + let blob = unsafe { std::slice::from_raw_parts(blob_ptr as *const u8, blob_len) }; serde_json::Value::Array( blob.iter() .map(|byte| serde_json::Value::from(*byte)) @@ -349,9 +331,7 @@ fn execute_statement( ) -> napi::Result { let c_sql = CString::new(sql).map_err(|err| napi::Error::from_reason(err.to_string()))?; let mut stmt = ptr::null_mut(); - let rc = unsafe { - sqlite3_prepare_v2(db, c_sql.as_ptr(), -1, &mut stmt, ptr::null_mut()) - }; + let rc = unsafe { sqlite3_prepare_v2(db, c_sql.as_ptr(), -1, &mut stmt, ptr::null_mut()) }; if rc != SQLITE_OK { return Err(sqlite_error(db, "failed to prepare sqlite statement")); } @@ -393,9 +373,7 @@ fn query_statement( ) -> napi::Result { let c_sql = CString::new(sql).map_err(|err| napi::Error::from_reason(err.to_string()))?; let mut stmt = ptr::null_mut(); - let rc = unsafe { - sqlite3_prepare_v2(db, c_sql.as_ptr(), -1, &mut stmt, ptr::null_mut()) - }; + let rc = unsafe { sqlite3_prepare_v2(db, c_sql.as_ptr(), -1, &mut stmt, ptr::null_mut()) }; if rc != SQLITE_OK { return Err(sqlite_error(db, "failed to prepare sqlite query")); } diff --git a/rivetkit-typescript/packages/rivetkit-native/wrapper.js b/rivetkit-typescript/packages/rivetkit-native/wrapper.js index b65c635ef2..8a01d4ed97 100644 --- a/rivetkit-typescript/packages/rivetkit-native/wrapper.js +++ b/rivetkit-typescript/packages/rivetkit-native/wrapper.js @@ -111,8 +111,9 @@ function wrapHandle(jsHandle) { Buffer.from(requestId), clientMessageIndex, ), - startServerlessActor: async (payload) => - await jsHandle.startServerless(Buffer.from(payload)), + startServerlessActor: (payload) => { + jsHandle.startServerless(Buffer.from(payload)); + }, // Internal: expose raw handle for openDatabaseFromEnvoy _raw: jsHandle, }; @@ -134,7 +135,7 @@ function startEnvoySync(config) { poolName: config.poolName, version: config.version, metadata: config.metadata || null, - notGlobal: config.notGlobal, + notGlobal: config.notGlobal ?? false, }, (event) => { handleEvent(event, config, wrappedHandle); @@ -163,152 +164,6 @@ async function openDatabaseFromEnvoy(handle, actorId) { return native.openDatabaseFromEnvoy(rawHandle, actorId); } -function isPlainObject(value) { - return ( - !!value && - typeof value === "object" && - !Array.isArray(value) && - Object.getPrototypeOf(value) === Object.prototype - ); -} - -function toNativeBinding(value) { - if (value === null || value === undefined) { - return { kind: "null" }; - } - if (typeof value === "bigint") { - return { kind: "int", intValue: Number(value) }; - } - if (typeof value === "number") { - return Number.isInteger(value) - ? { kind: "int", intValue: value } - : { kind: "float", floatValue: value }; - } - if (typeof value === "string") { - return { kind: "text", textValue: value }; - } - if (value instanceof ArrayBuffer) { - return { kind: "blob", blobValue: Buffer.from(value) }; - } - if (ArrayBuffer.isView(value)) { - return { - kind: "blob", - blobValue: Buffer.from(value.buffer, value.byteOffset, value.byteLength), - }; - } - - throw new Error(`unsupported sqlite binding type: ${typeof value}`); -} - -function extractNamedSqliteParameters(sql) { - return [...sql.matchAll(/([:@$][A-Za-z_][A-Za-z0-9_]*)/g)].map( - (match) => match[1], - ); -} - -function getNamedSqliteBinding(bindings, name) { - if (name in bindings) { - return bindings[name]; - } - - const bareName = name.slice(1); - if (bareName in bindings) { - return bindings[bareName]; - } - - for (const prefix of [":", "@", "$"]) { - const candidate = `${prefix}${bareName}`; - if (candidate in bindings) { - return bindings[candidate]; - } - } - - return undefined; -} - -function normalizeBindings(sql, args) { - if (!args || args.length === 0) { - return []; - } - - if ( - args.length === 1 && - isPlainObject(args[0]) && - !(args[0] instanceof Uint8Array) - ) { - const names = extractNamedSqliteParameters(sql); - if (names.length === 0) { - throw new Error( - "native sqlite object bindings require named placeholders in the SQL statement", - ); - } - return names.map((name) => { - const value = getNamedSqliteBinding(args[0], name); - if (value === undefined) { - throw new Error(`missing bind parameter: ${name}`); - } - return toNativeBinding(value); - }); - } - - return args.map(toNativeBinding); -} - -function mapRows(rows, columns) { - return rows.map((row) => { - const rowObject = {}; - for (let i = 0; i < columns.length; i++) { - rowObject[columns[i]] = row[i]; - } - return rowObject; - }); -} - -async function openRawDatabaseFromEnvoy(handle, actorId) { - const nativeDb = await openDatabaseFromEnvoy(handle, actorId); - let closed = false; - - const ensureOpen = () => { - if (closed) { - throw new Error("database is closed"); - } - }; - - return { - execute: async (query, ...args) => { - ensureOpen(); - - if (args.length > 0) { - const bindings = normalizeBindings(query, args); - const token = query.trimStart().slice(0, 16).toUpperCase(); - const returnsRows = - token.startsWith("SELECT") || - token.startsWith("PRAGMA") || - token.startsWith("WITH") || - /\bRETURNING\b/i.test(query); - - if (returnsRows) { - const result = await nativeDb.query(query, bindings); - return mapRows(result.rows, result.columns); - } - - await nativeDb.run(query, bindings); - return []; - } - - const result = await nativeDb.exec(query); - return mapRows(result.rows, result.columns); - }, - close: async () => { - if (closed) { - return; - } - closed = true; - await nativeDb.close(); - }, - }; -} - /** * Route callback envelopes from the native addon to EnvoyConfig callbacks. */ @@ -333,15 +188,15 @@ function handleEvent(event, config, wrappedHandle) { null, // preloadedKv ), ).then( - async () => { + () => { if (handle._raw) { - await handle._raw.respondCallback(event.responseId, {}); + handle._raw.respondCallback(event.responseId, {}); } }, - async (err) => { + (err) => { console.error("onActorStart error:", err); if (handle._raw) { - await handle._raw.respondCallback(event.responseId, { + handle._raw.respondCallback(event.responseId, { error: String(err), }); } @@ -358,15 +213,15 @@ function handleEvent(event, config, wrappedHandle) { event.reason || "stopped", ), ).then( - async () => { + () => { if (handle._raw) { - await handle._raw.respondCallback(event.responseId, {}); + handle._raw.respondCallback(event.responseId, {}); } }, - async (err) => { + (err) => { console.error("onActorStop error:", err); if (handle._raw) { - await handle._raw.respondCallback(event.responseId, { + handle._raw.respondCallback(event.responseId, { error: String(err), }); } @@ -403,17 +258,17 @@ function handleEvent(event, config, wrappedHandle) { const respBody = response.body ? Buffer.from(await response.arrayBuffer()).toString("base64") : undefined; - await handle._raw.respondCallback(event.responseId, { + handle._raw.respondCallback(event.responseId, { status: response.status || 200, headers: respHeaders, body: respBody, }); } }, - async (err) => { + (err) => { console.error("fetch callback error:", err); if (handle._raw) { - await handle._raw.respondCallback(event.responseId, { + handle._raw.respondCallback(event.responseId, { status: 500, headers: { "content-type": "text/plain" }, body: Buffer.from(String(err)).toString("base64"), @@ -492,6 +347,7 @@ function handleEvent(event, config, wrappedHandle) { ) : false; + console.log("[wrapper] websocket_open actorId:", event.actorId?.slice(0, 12), "path:", event.path); Promise.resolve( config.websocket( handle, @@ -506,7 +362,9 @@ function handleEvent(event, config, wrappedHandle) { false, ), ).then(() => { + console.log("[wrapper] websocket callback resolved, dispatching open event"); ws.dispatchEvent(new Event("open")); + console.log("[wrapper] open event dispatched"); }).catch((err) => { console.error("[wrapper] websocket callback error:", err); }); @@ -574,4 +432,3 @@ function handleEvent(event, config, wrappedHandle) { module.exports.startEnvoy = startEnvoy; module.exports.startEnvoySync = startEnvoySync; module.exports.openDatabaseFromEnvoy = openDatabaseFromEnvoy; -module.exports.openRawDatabaseFromEnvoy = openRawDatabaseFromEnvoy; diff --git a/scripts/docker/build-push-sandbox.sh b/scripts/docker/build-push-sandbox.sh index 40e67fc8f4..346e726334 100755 --- a/scripts/docker/build-push-sandbox.sh +++ b/scripts/docker/build-push-sandbox.sh @@ -1,28 +1,3 @@ #!/usr/bin/env bash set -euo pipefail - -AR_HOSTNAME=${AR_HOSTNAME:-us-east4-docker.pkg.dev} -AR_PROJECT_ID=${AR_PROJECT_ID:-dev-projects-491221} -AR_REPOSITORY=${AR_REPOSITORY:-cloud-run-source-deploy} -IMAGE_NAMESPACE=${IMAGE_NAMESPACE:-rivet-dev-rivet} -IMAGE_NAME=${IMAGE_NAME:-rivet-kitchen-sink} -IMAGE_REPO="${AR_HOSTNAME}/${AR_PROJECT_ID}/${AR_REPOSITORY}/${IMAGE_NAMESPACE}/${IMAGE_NAME}" - -COMMIT_SHA=${COMMIT_SHA:-$(git rev-parse HEAD)} -DOCKERFILE=${DOCKERFILE:-examples/sandbox/Dockerfile} -CONTEXT=${CONTEXT:-.} - -echo "Building ${IMAGE_REPO}:${COMMIT_SHA} and ${IMAGE_REPO}:latest" -docker build \ - -f "${DOCKERFILE}" \ - -t "${IMAGE_REPO}:${COMMIT_SHA}" \ - -t "${IMAGE_REPO}:latest" \ - "${CONTEXT}" - -echo "Pushing ${IMAGE_REPO}:${COMMIT_SHA}" -docker push "${IMAGE_REPO}:${COMMIT_SHA}" - -echo "Pushing ${IMAGE_REPO}:latest" -docker push "${IMAGE_REPO}:latest" - -echo "Done" +exec "$(dirname "$0")/build-sandbox.sh" --push "$@" diff --git a/scripts/docker/build-sandbox.sh b/scripts/docker/build-sandbox.sh new file mode 100755 index 0000000000..082b577979 --- /dev/null +++ b/scripts/docker/build-sandbox.sh @@ -0,0 +1,119 @@ +#!/usr/bin/env bash +# +# Build the sandbox Docker image using pnpm deploy for a flat, symlink-free node_modules. +# rivetkit-native is built on the host and injected into the deploy bundle. +# +# Usage: +# ./scripts/docker/build-sandbox.sh # build image only +# ./scripts/docker/build-sandbox.sh --push # build + push to Artifact Registry +# +set -euo pipefail + +REPO_ROOT="$(cd "$(dirname "$0")/../.." && pwd)" +DEPLOY_DIR="$REPO_ROOT/.sandbox-deploy" + +AR_HOSTNAME="${AR_HOSTNAME:-us-east4-docker.pkg.dev}" +AR_PROJECT_ID="${AR_PROJECT_ID:-dev-projects-491221}" +AR_REPOSITORY="${AR_REPOSITORY:-cloud-run-source-deploy}" +IMAGE_NAMESPACE="${IMAGE_NAMESPACE:-rivet-dev-rivet}" +IMAGE_NAME="${IMAGE_NAME:-rivet-kitchen-sink}" +IMAGE_REPO="${AR_HOSTNAME}/${AR_PROJECT_ID}/${AR_REPOSITORY}/${IMAGE_NAMESPACE}/${IMAGE_NAME}" +COMMIT_SHA="${COMMIT_SHA:-$(git -C "$REPO_ROOT" rev-parse HEAD)}" + +PUSH=false +if [[ "${1:-}" == "--push" ]]; then + PUSH=true +fi + +cd "$REPO_ROOT" + +# --- Host build --- + +echo "==> pnpm install" +pnpm install + +echo "==> Building rivetkit-native" +(cd rivetkit-typescript/packages/rivetkit-native && pnpm build) + +echo "==> Building sandbox (turbo resolves workspace dep graph)" +pnpm build --filter=sandbox + +# --- Create flat deploy bundle --- + +echo "==> pnpm deploy --prod" +rm -rf "$DEPLOY_DIR" +pnpm --filter=sandbox deploy --prod "$DEPLOY_DIR" + +# pnpm deploy respects .gitignore, so build artifacts may be missing. +if [ ! -d "$DEPLOY_DIR/dist" ]; then + echo "==> Copying dist/ into deploy dir" + cp -r examples/sandbox/dist "$DEPLOY_DIR/dist" +fi +# srvx looks for public/ relative to the server entry (dist/public/). +if [ -d "examples/sandbox/public" ]; then + echo "==> Copying public/ (frontend assets) into deploy dir" + cp -r examples/sandbox/public "$DEPLOY_DIR/dist/public" +fi + +# Remove .dockerignore from deploy dir since it excludes node_modules/dist +rm -f "$DEPLOY_DIR/.dockerignore" + +# Inject @rivetkit/rivetkit-native (JS + .node binary) so the engine driver +# uses native SQLite instead of falling back to WASM. +NATIVE_PKG="rivetkit-typescript/packages/rivetkit-native" +NATIVE_DST="$DEPLOY_DIR/node_modules/@rivetkit/rivetkit-native" +echo "==> Injecting @rivetkit/rivetkit-native package" +mkdir -p "$NATIVE_DST" +cp "$NATIVE_PKG/package.json" "$NATIVE_DST/" +cp "$NATIVE_PKG/index.js" "$NATIVE_DST/" +cp "$NATIVE_PKG/index.d.ts" "$NATIVE_DST/" +cp "$NATIVE_PKG/wrapper.js" "$NATIVE_DST/" +cp "$NATIVE_PKG/wrapper.d.ts" "$NATIVE_DST/" +# Copy the .node binary for the Docker target platform. +NATIVE_NODE="$NATIVE_PKG/rivetkit-native.linux-x64-gnu.node" +if [ -f "$NATIVE_NODE" ]; then + cp "$NATIVE_NODE" "$NATIVE_DST/" +else + echo "WARNING: $NATIVE_NODE not found, native SQLite will not work" +fi + +# Also inject @rivetkit/engine-envoy-protocol (dependency of rivetkit-native/wrapper) +ENVOY_PROTO_PKG="engine/sdks/typescript/envoy-protocol" +ENVOY_PROTO_DST="$DEPLOY_DIR/node_modules/@rivetkit/engine-envoy-protocol" +if [ -d "$ENVOY_PROTO_PKG/dist" ]; then + echo "==> Injecting @rivetkit/engine-envoy-protocol" + mkdir -p "$ENVOY_PROTO_DST" + cp "$ENVOY_PROTO_PKG/package.json" "$ENVOY_PROTO_DST/" + cp -r "$ENVOY_PROTO_PKG/dist" "$ENVOY_PROTO_DST/dist" +fi + +# Inject @rivetkit/sqlite-wasm (the workspace rivetkit dynamically imports this name). +# The source package dir is sqlite-vfs but the import specifier is @rivetkit/sqlite-wasm. +SQLITE_VFS_PKG="rivetkit-typescript/packages/sqlite-vfs" +SQLITE_WASM_DST="$DEPLOY_DIR/node_modules/@rivetkit/sqlite-wasm" +if [ -d "$SQLITE_VFS_PKG/dist" ]; then + echo "==> Injecting @rivetkit/sqlite-wasm" + mkdir -p "$SQLITE_WASM_DST" + cp "$SQLITE_VFS_PKG/package.json" "$SQLITE_WASM_DST/" + cp -r "$SQLITE_VFS_PKG/dist" "$SQLITE_WASM_DST/dist" +fi + +# --- Docker build --- + +echo "==> docker build" +docker build \ + -f examples/sandbox/Dockerfile \ + -t "${IMAGE_REPO}:${COMMIT_SHA}" \ + -t "${IMAGE_REPO}:latest" \ + "$DEPLOY_DIR" + +echo "Built ${IMAGE_REPO}:${COMMIT_SHA}" + +if $PUSH; then + echo "==> Pushing ${IMAGE_REPO}:${COMMIT_SHA}" + docker push "${IMAGE_REPO}:${COMMIT_SHA}" + echo "==> Pushing ${IMAGE_REPO}:latest" + docker push "${IMAGE_REPO}:latest" +fi + +echo "Done"