diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..357011e09 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,7 @@ +* text=auto eol=lf +*.ts text eol=lf +*.tsx text eol=lf +*.js text eol=lf +*.jsx text eol=lf +*.json text eol=lf +*.md text eol=lf diff --git a/apps/dev-playground/.gitignore b/apps/dev-playground/.gitignore index a399b75c4..bc5b8f669 100644 --- a/apps/dev-playground/.gitignore +++ b/apps/dev-playground/.gitignore @@ -7,4 +7,7 @@ shared/appkit-types/serving.d.ts # Database plugin playground artifacts generated by `appkit db introspect` config/database/schema.ts -config/database/migrations/ \ No newline at end of file +config/database/migrations/ + +# Auto-generated from config/database/schema.ts by the database vite plugin +shared/appkit-types/database.d.ts \ No newline at end of file diff --git a/apps/dev-playground/client/src/routeTree.gen.ts b/apps/dev-playground/client/src/routeTree.gen.ts index 45e280700..174ec11d2 100644 --- a/apps/dev-playground/client/src/routeTree.gen.ts +++ b/apps/dev-playground/client/src/routeTree.gen.ts @@ -20,6 +20,7 @@ import { Route as LakebaseRouteRouteImport } from './routes/lakebase.route' import { Route as JobsRouteRouteImport } from './routes/jobs.route' import { Route as GenieRouteRouteImport } from './routes/genie.route' import { Route as FilesRouteRouteImport } from './routes/files.route' +import { Route as DatabaseRouteRouteImport } from './routes/database.route' import { Route as DataVisualizationRouteRouteImport } from './routes/data-visualization.route' import { Route as ChartInferenceRouteRouteImport } from './routes/chart-inference.route' import { Route as ArrowAnalyticsRouteRouteImport } from './routes/arrow-analytics.route' @@ -81,6 +82,11 @@ const FilesRouteRoute = FilesRouteRouteImport.update({ path: '/files', getParentRoute: () => rootRouteImport, } as any) +const DatabaseRouteRoute = DatabaseRouteRouteImport.update({ + id: '/database', + path: '/database', + getParentRoute: () => rootRouteImport, +} as any) const DataVisualizationRouteRoute = DataVisualizationRouteRouteImport.update({ id: '/data-visualization', path: '/data-visualization', @@ -113,6 +119,7 @@ export interface FileRoutesByFullPath { '/arrow-analytics': typeof ArrowAnalyticsRouteRoute '/chart-inference': typeof ChartInferenceRouteRoute '/data-visualization': typeof DataVisualizationRouteRoute + '/database': typeof DatabaseRouteRoute '/files': typeof FilesRouteRoute '/genie': typeof GenieRouteRoute '/jobs': typeof JobsRouteRoute @@ -131,6 +138,7 @@ export interface FileRoutesByTo { '/arrow-analytics': typeof ArrowAnalyticsRouteRoute '/chart-inference': typeof ChartInferenceRouteRoute '/data-visualization': typeof DataVisualizationRouteRoute + '/database': typeof DatabaseRouteRoute '/files': typeof FilesRouteRoute '/genie': typeof GenieRouteRoute '/jobs': typeof JobsRouteRoute @@ -150,6 +158,7 @@ export interface FileRoutesById { '/arrow-analytics': typeof ArrowAnalyticsRouteRoute '/chart-inference': typeof ChartInferenceRouteRoute '/data-visualization': typeof DataVisualizationRouteRoute + '/database': typeof DatabaseRouteRoute '/files': typeof FilesRouteRoute '/genie': typeof GenieRouteRoute '/jobs': typeof JobsRouteRoute @@ -170,6 +179,7 @@ export interface FileRouteTypes { | '/arrow-analytics' | '/chart-inference' | '/data-visualization' + | '/database' | '/files' | '/genie' | '/jobs' @@ -188,6 +198,7 @@ export interface FileRouteTypes { | '/arrow-analytics' | '/chart-inference' | '/data-visualization' + | '/database' | '/files' | '/genie' | '/jobs' @@ -206,6 +217,7 @@ export interface FileRouteTypes { | '/arrow-analytics' | '/chart-inference' | '/data-visualization' + | '/database' | '/files' | '/genie' | '/jobs' @@ -225,6 +237,7 @@ export interface RootRouteChildren { ArrowAnalyticsRouteRoute: typeof ArrowAnalyticsRouteRoute ChartInferenceRouteRoute: typeof ChartInferenceRouteRoute DataVisualizationRouteRoute: typeof DataVisualizationRouteRoute + DatabaseRouteRoute: typeof DatabaseRouteRoute FilesRouteRoute: typeof FilesRouteRoute GenieRouteRoute: typeof GenieRouteRoute JobsRouteRoute: typeof JobsRouteRoute @@ -317,6 +330,13 @@ declare module '@tanstack/react-router' { preLoaderRoute: typeof FilesRouteRouteImport parentRoute: typeof rootRouteImport } + '/database': { + id: '/database' + path: '/database' + fullPath: '/database' + preLoaderRoute: typeof DatabaseRouteRouteImport + parentRoute: typeof rootRouteImport + } '/data-visualization': { id: '/data-visualization' path: '/data-visualization' @@ -361,6 +381,7 @@ const rootRouteChildren: RootRouteChildren = { ArrowAnalyticsRouteRoute: ArrowAnalyticsRouteRoute, ChartInferenceRouteRoute: ChartInferenceRouteRoute, DataVisualizationRouteRoute: DataVisualizationRouteRoute, + DatabaseRouteRoute: DatabaseRouteRoute, FilesRouteRoute: FilesRouteRoute, GenieRouteRoute: GenieRouteRoute, JobsRouteRoute: JobsRouteRoute, diff --git a/apps/dev-playground/client/src/routes/__root.tsx b/apps/dev-playground/client/src/routes/__root.tsx index db42fdafb..f32ec7228 100644 --- a/apps/dev-playground/client/src/routes/__root.tsx +++ b/apps/dev-playground/client/src/routes/__root.tsx @@ -48,6 +48,14 @@ function RootComponent() { Arrow Analytics + + + + + + + {error && ( +
+ {error} +
+ )} + +
+ + + + Case + Entity + Risk + Status + Assigned to + Actions + + + + {data === null && loading && ( + + + Loading cases… + + + )} + {data !== null && data.length === 0 && !loading && ( + + + No cases match the current filter. + + + )} + {data?.map((row) => ( + + ))} + +
+
+ + ); +} + +function CaseRowItem({ + row, + onChanged, +}: { + row: Awaited>[number]; + onChanged: () => void; +}) { + const [busy, setBusy] = useState(false); + const [error, setError] = useState(null); + + const updateStatus = async (next: string) => { + if (next === row.status) return; + setBusy(true); + setError(null); + try { + await db.cases.update(row.case_id, { + status: next, + updated_at: new Date().toISOString(), + }); + onChanged(); + } catch (err) { + setError(describeError(err)); + } finally { + setBusy(false); + } + }; + + const remove = async () => { + if (!window.confirm(`Delete case ${row.case_id}?`)) return; + setBusy(true); + setError(null); + try { + await db.cases.delete(row.case_id); + onChanged(); + } catch (err) { + setError(describeError(err)); + setBusy(false); + } + }; + + return ( + + +
{row.case_id}
+
{row.case_type ?? "—"}
+
+ +
{row.entity_name ?? row.entity_id}
+
{row.entity_id}
+
+ + {row.risk_level ? ( + + {row.risk_level} {row.risk_score != null && `(${row.risk_score})`} + + ) : ( + + )} + + + + {error &&
{error}
} +
+ {row.assigned_to ?? "—"} + + + +
+ ); +} + +function CreateCase() { + const [caseId, setCaseId] = useState(""); + const [entityId, setEntityId] = useState(""); + const [entityName, setEntityName] = useState(""); + const [riskLevel, setRiskLevel] = useState("Medium"); + const [status, setStatus] = useState("New"); + const [busy, setBusy] = useState(false); + const [message, setMessage] = useState<{ + kind: "ok" | "err"; + text: string; + } | null>(null); + + const caseIdField = useId(); + const entityIdField = useId(); + const entityNameField = useId(); + const riskField = useId(); + const statusField = useId(); + + const disabled = busy || caseId.trim() === "" || entityId.trim() === ""; + + const submit = async (e: React.FormEvent) => { + e.preventDefault(); + if (disabled) return; + setBusy(true); + setMessage(null); + try { + await db.cases.create({ + case_id: caseId.trim(), + entity_id: entityId.trim(), + entity_name: entityName.trim() || null, + risk_level: riskLevel, + status, + }); + setMessage({ kind: "ok", text: `Created ${caseId.trim()}` }); + setCaseId(""); + setEntityId(""); + setEntityName(""); + } catch (err) { + setMessage({ kind: "err", text: describeError(err) }); + } finally { + setBusy(false); + } + }; + + return ( + +

New Case

+

+ Exercises db.cases.create(...). The server validates the + body against the Zod schema generated from schema.ts. +

+ +
+
+ + setCaseId(e.target.value)} + disabled={busy} + required + /> +
+
+ + setEntityId(e.target.value)} + disabled={busy} + required + /> +
+
+ + setEntityName(e.target.value)} + disabled={busy} + /> +
+
+ + +
+
+ + +
+ + + + {message && ( +
+ {message.text} +
+ )} +
+
+ ); +} + +function describeError(err: unknown): string { + if (err instanceof DatabaseHTTPError) { + const body = err.body as { error?: string; message?: string } | undefined; + return `HTTP ${err.statusCode} — ${body?.error ?? body?.message ?? err.message}`; + } + if (err instanceof Error) return err.message; + return String(err); +} diff --git a/apps/dev-playground/client/src/routes/index.tsx b/apps/dev-playground/client/src/routes/index.tsx index ec2d9a50a..e96e7e92a 100644 --- a/apps/dev-playground/client/src/routes/index.tsx +++ b/apps/dev-playground/client/src/routes/index.tsx @@ -200,6 +200,26 @@ function IndexRoute() { + +
+

+ Database Plugin +

+

+ Schema-driven CRUD end-to-end: declare tables in{" "} + config/database/schema.ts, get typed{" "} + db.cases.where(...) on the browser, routes + auto-mounted on the server. No glue code. +

+ +
+
+

diff --git a/apps/dev-playground/client/vite.config.ts b/apps/dev-playground/client/vite.config.ts index f892c62f9..3b0d18769 100644 --- a/apps/dev-playground/client/vite.config.ts +++ b/apps/dev-playground/client/vite.config.ts @@ -1,4 +1,5 @@ import path from "node:path"; +import { appKitDatabaseTypesPlugin } from "@databricks/appkit"; import { tanstackRouter } from "@tanstack/router-plugin/vite"; import react from "@vitejs/plugin-react"; import { defineConfig } from "vite"; @@ -11,6 +12,7 @@ export default defineConfig({ target: "react", autoCodeSplitting: process.env.NODE_ENV !== "development", }), + appKitDatabaseTypesPlugin(), ], server: { hmr: { diff --git a/apps/dev-playground/server/index.ts b/apps/dev-playground/server/index.ts index 91179dacd..98ca8a8a7 100644 --- a/apps/dev-playground/server/index.ts +++ b/apps/dev-playground/server/index.ts @@ -11,6 +11,7 @@ import { serving, WRITE_ACTIONS, } from "@databricks/appkit"; +import { database } from "@databricks/appkit/beta"; import { WorkspaceClient } from "@databricks/sdk-experimental"; // TODO: re-enable once vector-search is exported from @databricks/appkit // import { vectorSearch } from "@databricks/appkit"; @@ -55,6 +56,7 @@ createApp({ reconnect(), telemetryExamples(), analytics({}), + database(), genie({ spaces: { demo: process.env.DATABRICKS_GENIE_SPACE_ID ?? "placeholder" }, }), diff --git a/docs/docs/api/appkit/Function.appKitDatabaseTypesPlugin.md b/docs/docs/api/appkit/Function.appKitDatabaseTypesPlugin.md new file mode 100644 index 000000000..c80485093 --- /dev/null +++ b/docs/docs/api/appkit/Function.appKitDatabaseTypesPlugin.md @@ -0,0 +1,32 @@ +# Function: appKitDatabaseTypesPlugin() + +```ts +function appKitDatabaseTypesPlugin(options: AppKitDatabaseTypesPluginOptions): Plugin$1; +``` + +Vite plugin — regenerates `shared/appkit-types/database.d.ts` whenever +`config/database/schema.ts` changes during dev. In production (`vite build`) +it runs once at `buildStart`. + +**Activation gate:** only when `config/database/schema.ts` exists, either at +the Vite root or its parent. Apps without a database plugin pay nothing. + +**Dev path (decision #25):** while the dev server is running, the schema is +loaded via `server.ssrLoadModule` — Vite evaluates it in-process, same Node +runtime. No child spawn, no `tsx` cold start. Before a change triggers +regeneration, the module cache is invalidated so the next load sees fresh +source. + +**Production path:** `buildStart` runs before `configureServer`, so the +loader falls through to the default dynamic `import()` — relying on the +parent process's tsx loader for TS support. + +## Parameters + +| Parameter | Type | +| ------ | ------ | +| `options` | `AppKitDatabaseTypesPluginOptions` | + +## Returns + +`Plugin$1` diff --git a/docs/docs/api/appkit/Function.bigid.md b/docs/docs/api/appkit/Function.bigid.md new file mode 100644 index 000000000..29b20e814 --- /dev/null +++ b/docs/docs/api/appkit/Function.bigid.md @@ -0,0 +1,15 @@ +# Function: bigid() + +```ts +function bigid(): AppKitColumnChain; +``` + +Create an int8 (bigserial) primary-key column. + +Maps to Postgres `bigserial` (8-byte integer with an attached sequence). +`appkit db introspect` emits this for live `bigserial`/`int8 + nextval()` +primary keys so the round-trip stays drift-free. + +## Returns + +[`AppKitColumnChain`](Interface.AppKitColumnChain.md) diff --git a/docs/docs/api/appkit/Function.createDrizzleDataPath.md b/docs/docs/api/appkit/Function.createDrizzleDataPath.md new file mode 100644 index 000000000..2c495d7b3 --- /dev/null +++ b/docs/docs/api/appkit/Function.createDrizzleDataPath.md @@ -0,0 +1,23 @@ +# Function: createDrizzleDataPath() + +```ts +function createDrizzleDataPath(pool: Pool, schema: Schema): DataPath; +``` + +Build a `DataPath` backed by `drizzle-orm/node-postgres`. + +Sole `drizzle-orm` import site (decision #30) — swapping query builders +means rewriting only this file. `schema` resolves eager-loading relations +via a two-query pattern (parent + IN(ids)), avoiding N+1 without needing +Drizzle's `relations()` API. + +## Parameters + +| Parameter | Type | +| ------ | ------ | +| `pool` | `Pool` | +| `schema` | [`Schema`](TypeAlias.Schema.md) | + +## Returns + +[`DataPath`](Interface.DataPath.md) diff --git a/docs/docs/api/appkit/Function.createUserScopedDataPath.md b/docs/docs/api/appkit/Function.createUserScopedDataPath.md new file mode 100644 index 000000000..c16b62f77 --- /dev/null +++ b/docs/docs/api/appkit/Function.createUserScopedDataPath.md @@ -0,0 +1,32 @@ +# Function: createUserScopedDataPath() + +```ts +function createUserScopedDataPath( + pool: Pool, + schema: Schema, + context: { + userId: string; +}): DataPath; +``` + +User-scoped `DataPath`: each op runs in a txn with `SET LOCAL app.user_id`. + +The txn is the security boundary — the GUC is txn-scoped, so a connection +returned to the pool can't leak identity to the next checkout. RLS policies +reading `current_setting('app.user_id')` resolve to the OBO user. + +One SP pool services everyone (no per-user pools, OAuth refresh, or LRU). +Cost: one BEGIN+COMMIT per op; amortize via `transaction(fn)` for multi-step. + +## Parameters + +| Parameter | Type | +| ------ | ------ | +| `pool` | `Pool` | +| `schema` | [`Schema`](TypeAlias.Schema.md) | +| `context` | \{ `userId`: `string`; \} | +| `context.userId` | `string` | + +## Returns + +[`DataPath`](Interface.DataPath.md) diff --git a/docs/docs/api/appkit/Function.fk.md b/docs/docs/api/appkit/Function.fk.md index 5a7f7fbe0..41d9aa384 100644 --- a/docs/docs/api/appkit/Function.fk.md +++ b/docs/docs/api/appkit/Function.fk.md @@ -6,7 +6,9 @@ function fk(target: AppKitColumn): FkColumnChain; Create a foreign key column. The reference target is captured live and resolved at `buildTable()` time, so forward references (e.g. `fk(other.id)` -declared before `table("other", ...)`) work. +declared before `table("other", ...)`) work. When the target was already +built, `toTable`/`toColumn` are populated immediately so the introspector +doesn't depend on define-schema's deferred resolver running first. The FK column type is currently fixed to `integer`. If the target is a `bigid()` (`bigserial`) or `uuid()` PK, declare the FK column with the diff --git a/docs/docs/api/appkit/Function.generateDatabaseTypes.md b/docs/docs/api/appkit/Function.generateDatabaseTypes.md new file mode 100644 index 000000000..1be0fece7 --- /dev/null +++ b/docs/docs/api/appkit/Function.generateDatabaseTypes.md @@ -0,0 +1,29 @@ +# Function: generateDatabaseTypes() + +```ts +function generateDatabaseTypes(options: GenerateDatabaseTypesOptions): Promise; +``` + +Read `config/database/schema.ts`, walk it, and emit the registry +augmentation to the configured output file. Silently returns when the +schema file does not exist — apps that don't use the database plugin pay +nothing. + +The algorithm: + +1. Read the schema source from disk (plain text — we hash it, not the AST). +2. On cache hit, re-emit the cached output and return early. +3. Otherwise call the module loader to get the live `Schema` object. +4. Walk the schema into flat `RegistryEntry`s (row/insert/update/filters/includes). +5. Render the `declare module` block and write it. +6. Update the cache with the new hash+output. + +## Parameters + +| Parameter | Type | +| ------ | ------ | +| `options` | `GenerateDatabaseTypesOptions` | + +## Returns + +`Promise`\<`void`\> diff --git a/docs/docs/api/appkit/Function.id.md b/docs/docs/api/appkit/Function.id.md index ac0f85c51..f849cd8c5 100644 --- a/docs/docs/api/appkit/Function.id.md +++ b/docs/docs/api/appkit/Function.id.md @@ -4,10 +4,12 @@ function id(): AppKitColumnChain; ``` -Create a primary key column with a serial type. +Create an int4 (serial) primary-key column. + +Maps to Postgres `serial` (4-byte integer with an attached sequence). Use +`bigid()` for tables that need more than ~2 billion rows or that mirror an +existing `bigserial` column from a brownfield database. ## Returns [`AppKitColumnChain`](Interface.AppKitColumnChain.md) - -The wrapped column chain. diff --git a/docs/docs/api/appkit/Function.timestamp.md b/docs/docs/api/appkit/Function.timestamp.md index af3e26dbd..da9c2955d 100644 --- a/docs/docs/api/appkit/Function.timestamp.md +++ b/docs/docs/api/appkit/Function.timestamp.md @@ -1,11 +1,22 @@ # Function: timestamp() ```ts -function timestamp(): AppKitColumnChain; +function timestamp(options: { + timezone?: boolean; + withTimezone?: boolean; +}): AppKitColumnChain; ``` Create a timestamp column. +## Parameters + +| Parameter | Type | +| ------ | ------ | +| `options` | \{ `timezone?`: `boolean`; `withTimezone?`: `boolean`; \} | +| `options.timezone?` | `boolean` | +| `options.withTimezone?` | `boolean` | + ## Returns [`AppKitColumnChain`](Interface.AppKitColumnChain.md) diff --git a/docs/docs/api/appkit/Interface.CountOptions.md b/docs/docs/api/appkit/Interface.CountOptions.md new file mode 100644 index 000000000..4fd7b8e37 --- /dev/null +++ b/docs/docs/api/appkit/Interface.CountOptions.md @@ -0,0 +1,19 @@ +# Interface: CountOptions + +Options accepted by `DataPath.count`. + +## Properties + +### signal? + +```ts +optional signal: AbortSignal; +``` + +*** + +### where? + +```ts +optional where: WhereSpec; +``` diff --git a/docs/docs/api/appkit/Interface.DataPath.md b/docs/docs/api/appkit/Interface.DataPath.md new file mode 100644 index 000000000..cba52e63c --- /dev/null +++ b/docs/docs/api/appkit/Interface.DataPath.md @@ -0,0 +1,250 @@ +# Interface: DataPath + +AppKit-shaped abstraction over the runtime data path. + +The entity proxy and route layer talk to this interface only. The +implementation in `drizzle-runtime.ts` is the *only* AppKit file that +imports `drizzle-orm` for query execution. Swapping Drizzle for Kysely, +Knex, or raw SQL means rewriting one file. + +Identity, OBO, telemetry, hook dispatch, and validation all live above this +interface — `DataPath` is plain "execute these reads/writes against this +pool". Pool selection (SP vs per-user) happens in `entity-wiring.ts`. + +## Methods + +### count() + +```ts +count(table: AppKitTable, opts: CountOptions): Promise; +``` + +Count rows matching `where`. + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `opts` | [`CountOptions`](Interface.CountOptions.md) | + +#### Returns + +`Promise`\<`number`\> + +*** + +### delete() + +```ts +delete( + table: AppKitTable, + pkColumn: string, + id: string | number, +signal?: AbortSignal): Promise; +``` + +DELETE one row by primary key. No-op when no row matches. + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `pkColumn` | `string` | +| `id` | `string` \| `number` | +| `signal?` | `AbortSignal` | + +#### Returns + +`Promise`\<`void`\> + +*** + +### findOne() + +```ts +findOne( + table: AppKitTable, + pkColumn: string, + id: string | number, +opts?: FindOneOptions): Promise; +``` + +Find one row by primary key, or `null` when no row matches. + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `pkColumn` | `string` | +| `id` | `string` \| `number` | +| `opts?` | `FindOneOptions` | + +#### Returns + +`Promise`\<`Row` \| `null`\> + +*** + +### insert() + +```ts +insert( + table: AppKitTable, + data: Row, +signal?: AbortSignal): Promise; +``` + +INSERT one row and return the inserted row (with server-generated columns). + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `data` | `Row` | +| `signal?` | `AbortSignal` | + +#### Returns + +`Promise`\<`Row`\> + +*** + +### raw() + +```ts +raw(strings: TemplateStringsArray, ...values: unknown[]): Promise; +``` + +Tagged-template SQL escape hatch. Values are bound as parameters; column +and identifier interpolation is intentionally not supported here — use +`getDrizzle()` from the plugin's exports for that case. + +#### Type Parameters + +| Type Parameter | Default type | +| ------ | ------ | +| `T` | `Row` | + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `strings` | `TemplateStringsArray` | +| ...`values` | `unknown`[] | + +#### Returns + +`Promise`\<`T`[]\> + +*** + +### select() + +```ts +select(table: AppKitTable, opts: SelectOptions): Promise; +``` + +Run a SELECT and return rows (with optional eager joins). + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `opts` | [`SelectOptions`](Interface.SelectOptions.md) | + +#### Returns + +`Promise`\<`Row`[]\> + +*** + +### transaction() + +```ts +transaction(fn: (tx: DataPath) => Promise): Promise; +``` + +Run `fn` inside a database transaction. The nested `DataPath` shares the +same surface; rollbacks happen on throw, commits on resolution. + +#### Type Parameters + +| Type Parameter | +| ------ | +| `T` | + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `fn` | (`tx`: `DataPath`) => `Promise`\<`T`\> | + +#### Returns + +`Promise`\<`T`\> + +*** + +### update() + +```ts +update( + table: AppKitTable, + pkColumn: string, + id: string | number, + patch: Row, +signal?: AbortSignal): Promise; +``` + +UPDATE one row by primary key. Returns the updated row, or `null` when +no row matches. Hook dispatch and Zod validation happen above this layer. + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `pkColumn` | `string` | +| `id` | `string` \| `number` | +| `patch` | `Row` | +| `signal?` | `AbortSignal` | + +#### Returns + +`Promise`\<`Row` \| `null`\> + +*** + +### upsert() + +```ts +upsert( + table: AppKitTable, + data: Row, + options: { + onConflict: string; +}, +signal?: AbortSignal): Promise; +``` + +INSERT … ON CONFLICT (`onConflict`) DO UPDATE. Returns the resulting row. +`onConflict` is a column name in the table (single-column unique constraint). + +#### Parameters + +| Parameter | Type | +| ------ | ------ | +| `table` | [`AppKitTable`](Interface.AppKitTable.md) | +| `data` | `Row` | +| `options` | \{ `onConflict`: `string`; \} | +| `options.onConflict` | `string` | +| `signal?` | `AbortSignal` | + +#### Returns + +`Promise`\<`Row`\> diff --git a/docs/docs/api/appkit/Interface.SelectOptions.md b/docs/docs/api/appkit/Interface.SelectOptions.md new file mode 100644 index 000000000..54c595393 --- /dev/null +++ b/docs/docs/api/appkit/Interface.SelectOptions.md @@ -0,0 +1,68 @@ +# Interface: SelectOptions + +Options accepted by `DataPath.select`. + +## Properties + +### columns? + +```ts +optional columns: readonly string[]; +``` + +Project specific columns. Defaults to `*`. + +*** + +### include? + +```ts +optional include: IncludeSpec; +``` + +Eager-load related entities. + +*** + +### limit? + +```ts +optional limit: number; +``` + +*** + +### offset? + +```ts +optional offset: number; +``` + +*** + +### order? + +```ts +optional order: OrderSpec; +``` + +*** + +### signal? + +```ts +optional signal: AbortSignal; +``` + +Reserved. `node-postgres` does not honor `AbortSignal` at the query level +today — runaway queries are bounded server-side by Postgres +`statement_timeout` (set by the plugin on every pool connection). The +AppKit timeout interceptor still rejects the JS promise when fired. + +*** + +### where? + +```ts +optional where: WhereSpec; +``` diff --git a/docs/docs/api/appkit/TypeAlias.IncludeSpec.md b/docs/docs/api/appkit/TypeAlias.IncludeSpec.md new file mode 100644 index 000000000..f13aca142 --- /dev/null +++ b/docs/docs/api/appkit/TypeAlias.IncludeSpec.md @@ -0,0 +1,9 @@ +# Type Alias: IncludeSpec + +```ts +type IncludeSpec = Record; +``` + +Eager-load shape: relation name → either `true` (all default) or an options +bag. The runtime resolves relation names against the parent table's +`$relations` metadata; unknown names throw at query time. diff --git a/docs/docs/api/appkit/TypeAlias.OrderSpec.md b/docs/docs/api/appkit/TypeAlias.OrderSpec.md new file mode 100644 index 000000000..dde004c90 --- /dev/null +++ b/docs/docs/api/appkit/TypeAlias.OrderSpec.md @@ -0,0 +1,7 @@ +# Type Alias: OrderSpec + +```ts +type OrderSpec = Record; +``` + +Order map: column name → direction (default: `asc`). diff --git a/docs/docs/api/appkit/TypeAlias.Schema.md b/docs/docs/api/appkit/TypeAlias.Schema.md index 1f937e235..fce78934e 100644 --- a/docs/docs/api/appkit/TypeAlias.Schema.md +++ b/docs/docs/api/appkit/TypeAlias.Schema.md @@ -6,6 +6,7 @@ type Schema = T & { $migrations: { snapshotHints: unknown; }; + $schemaName: string; $tables: Record; }; ``` @@ -34,6 +35,17 @@ readonly $migrations: { snapshotHints: unknown; ``` +### $schemaName + +```ts +readonly $schemaName: string; +``` + +Postgres schema namespace declared via `defineSchema(..., { schemaName })`. +Consumed by the database plugin (route/postgrest layer) and the +introspector so downstream code never has to re-configure what the schema +already knows about itself. + ### $tables ```ts diff --git a/docs/docs/api/appkit/TypeAlias.WhereSpec.md b/docs/docs/api/appkit/TypeAlias.WhereSpec.md new file mode 100644 index 000000000..aa480eeeb --- /dev/null +++ b/docs/docs/api/appkit/TypeAlias.WhereSpec.md @@ -0,0 +1,7 @@ +# Type Alias: WhereSpec + +```ts +type WhereSpec = Record; +``` + +Filter map: column name → predicate. diff --git a/docs/docs/api/appkit/index.md b/docs/docs/api/appkit/index.md index 1a14efb18..880ff9323 100644 --- a/docs/docs/api/appkit/index.md +++ b/docs/docs/api/appkit/index.md @@ -37,7 +37,9 @@ plugin architecture, and React integration. | [BasePluginConfig](Interface.BasePluginConfig.md) | Base configuration interface for AppKit plugins | | [CacheConfig](Interface.CacheConfig.md) | Configuration for the CacheInterceptor. Controls TTL, size limits, storage backend, and probabilistic cleanup. | | [ColumnMeta](Interface.ColumnMeta.md) | Metadata for an AppKit column. This is used to store the column metadata in the schema. | +| [CountOptions](Interface.CountOptions.md) | Options accepted by `DataPath.count`. | | [DatabaseCredential](Interface.DatabaseCredential.md) | Database credentials with OAuth token for Postgres connection | +| [DataPath](Interface.DataPath.md) | AppKit-shaped abstraction over the runtime data path. | | [DefineSchemaOptions](Interface.DefineSchemaOptions.md) | Options for defining a schema. | | [EndpointConfig](Interface.EndpointConfig.md) | - | | [FilePolicyUser](Interface.FilePolicyUser.md) | Minimal user identity passed to the policy function. | @@ -58,6 +60,7 @@ plugin architecture, and React integration. | [ResourceFieldEntry](Interface.ResourceFieldEntry.md) | Defines a single field for a resource. Each field has its own environment variable and optional description. Single-value types use one key (e.g. id); multi-value types (database, secret) use multiple (e.g. instance_name, database_name or scope, key). | | [ResourceRequirement](Interface.ResourceRequirement.md) | Declares a resource requirement for a plugin. Can be defined statically in a manifest or dynamically via getResourceRequirements(). Narrows the generated base: type → ResourceType enum, permission → ResourcePermission union. | | [SchemaBuilderContext](Interface.SchemaBuilderContext.md) | A context for the schema builder. This is used to build the schema. | +| [SelectOptions](Interface.SelectOptions.md) | Options accepted by `DataPath.select`. | | [ServingEndpointEntry](Interface.ServingEndpointEntry.md) | Shape of a single registry entry. | | [ServingEndpointRegistry](Interface.ServingEndpointRegistry.md) | Registry interface for serving endpoint type generation. Empty by default — augmented by the Vite type generator's `.d.ts` output via module augmentation. When populated, provides autocomplete for alias names and typed request/response/chunk per endpoint. | | [StreamExecutionSettings](Interface.StreamExecutionSettings.md) | Execution settings for streaming endpoints. Extends PluginExecutionSettings with SSE stream configuration. | @@ -73,13 +76,16 @@ plugin architecture, and React integration. | [FileAction](TypeAlias.FileAction.md) | Every action the files plugin can perform. | | [FilePolicy](TypeAlias.FilePolicy.md) | A policy function that decides whether `user` may perform `action` on `resource`. Return `true` to allow, `false` to deny. | | [IAppRouter](TypeAlias.IAppRouter.md) | Express router type for plugin route registration | +| [IncludeSpec](TypeAlias.IncludeSpec.md) | Eager-load shape: relation name → either `true` (all default) or an options bag. The runtime resolves relation names against the parent table's `$relations` metadata; unknown names throw at query time. | | [JobHandle](TypeAlias.JobHandle.md) | Job handle returned by `appkit.jobs("etl")`. Supports OBO access via `.asUser(req)`. | | [JobsExport](TypeAlias.JobsExport.md) | Public API shape of the jobs plugin. Callable to select a job by key. | +| [OrderSpec](TypeAlias.OrderSpec.md) | Order map: column name → direction (default: `asc`). | | [PluginData](TypeAlias.PluginData.md) | Tuple of plugin class, config, and name. Created by `toPlugin()` and passed to `createApp()`. | | [ResourcePermission](TypeAlias.ResourcePermission.md) | Union of all possible permission levels across all resource types. | | [Schema](TypeAlias.Schema.md) | A schema. This is used to define the schema for the database. | | [ServingFactory](TypeAlias.ServingFactory.md) | Factory function returned by `AppKit.serving`. | | [ToPlugin](TypeAlias.ToPlugin.md) | Factory function type returned by `toPlugin()`. Accepts optional config and returns a PluginData tuple. | +| [WhereSpec](TypeAlias.WhereSpec.md) | Filter map: column name → predicate. | ## Variables @@ -94,18 +100,23 @@ plugin architecture, and React integration. | Function | Description | | ------ | ------ | +| [appKitDatabaseTypesPlugin](Function.appKitDatabaseTypesPlugin.md) | Vite plugin — regenerates `shared/appkit-types/database.d.ts` whenever `config/database/schema.ts` changes during dev. In production (`vite build`) it runs once at `buildStart`. | | [appKitServingTypesPlugin](Function.appKitServingTypesPlugin.md) | Vite plugin to generate TypeScript types for AppKit serving endpoints. Fetches OpenAPI schemas from Databricks and generates a .d.ts with ServingEndpointRegistry module augmentation. | | [appKitTypesPlugin](Function.appKitTypesPlugin.md) | Vite plugin to generate types for AppKit queries. Calls generateFromEntryPoint under the hood. | +| [bigid](Function.bigid.md) | Create an int8 (bigserial) primary-key column. | | [bigint](Function.bigint.md) | Create a bigint column. | | [boolean](Function.boolean.md) | Create a boolean column. | | [createApp](Function.createApp.md) | Bootstraps AppKit with the provided configuration. | +| [createDrizzleDataPath](Function.createDrizzleDataPath.md) | Build a `DataPath` backed by `drizzle-orm/node-postgres`. | | [createLakebasePool](Function.createLakebasePool.md) | Create a Lakebase pool with appkit's logger integration. Telemetry automatically uses appkit's OpenTelemetry configuration via global registry. | +| [createUserScopedDataPath](Function.createUserScopedDataPath.md) | User-scoped `DataPath`: each op runs in a txn with `SET LOCAL app.user_id`. | | [defineSchema](Function.defineSchema.md) | Define a schema. This is used to build the schema for the database. | | [enumColumn](Function.enumColumn.md) | Create an enum column. | | [extractServingEndpoints](Function.extractServingEndpoints.md) | Extract serving endpoint config from a server file by AST-parsing it. Looks for `serving({ endpoints: { alias: { env: "..." }, ... } })` calls and extracts the endpoint alias names and their environment variable mappings. | | [findServerFile](Function.findServerFile.md) | Find the server entry file by checking candidate paths in order. | -| [fk](Function.fk.md) | Create a foreign key column. The reference target is captured live and resolved at `buildTable()` time, so forward references (e.g. `fk(other.id)` declared before `table("other", ...)`) work. | +| [fk](Function.fk.md) | Create a foreign key column. The reference target is captured live and resolved at `buildTable()` time, so forward references (e.g. `fk(other.id)` declared before `table("other", ...)`) work. When the target was already built, `toTable`/`toColumn` are populated immediately so the introspector doesn't depend on define-schema's deferred resolver running first. | | [generateDatabaseCredential](Function.generateDatabaseCredential.md) | Generate OAuth credentials for Postgres database connection using the proper Postgres API. | +| [generateDatabaseTypes](Function.generateDatabaseTypes.md) | Read `config/database/schema.ts`, walk it, and emit the registry augmentation to the configured output file. Silently returns when the schema file does not exist — apps that don't use the database plugin pay nothing. | | [getExecutionContext](Function.getExecutionContext.md) | Get the current execution context. | | [getLakebaseOrmConfig](Function.getLakebaseOrmConfig.md) | Get Lakebase connection configuration for ORMs that don't accept pg.Pool directly. | | [getLakebasePgConfig](Function.getLakebasePgConfig.md) | Get Lakebase connection configuration for PostgreSQL clients. | @@ -113,7 +124,7 @@ plugin architecture, and React integration. | [getResourceRequirements](Function.getResourceRequirements.md) | Gets the resource requirements from a plugin's manifest. | | [getUsernameWithApiLookup](Function.getUsernameWithApiLookup.md) | Resolves the PostgreSQL username for a Lakebase connection. | | [getWorkspaceClient](Function.getWorkspaceClient.md) | Get workspace client from config or SDK default auth chain | -| [id](Function.id.md) | Create a primary key column with a serial type. | +| [id](Function.id.md) | Create an int4 (serial) primary-key column. | | [integer](Function.integer.md) | Create an integer column. | | [isPrivateColumn](Function.isPrivateColumn.md) | Returns true if `columnName` is marked `.private()` on `table`. | | [isSQLTypeMarker](Function.isSQLTypeMarker.md) | Type guard to check if a value is a SQL type marker | diff --git a/docs/docs/api/appkit/typedoc-sidebar.ts b/docs/docs/api/appkit/typedoc-sidebar.ts index f93461640..751c9f024 100644 --- a/docs/docs/api/appkit/typedoc-sidebar.ts +++ b/docs/docs/api/appkit/typedoc-sidebar.ts @@ -117,11 +117,21 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/Interface.ColumnMeta", label: "ColumnMeta" }, + { + type: "doc", + id: "api/appkit/Interface.CountOptions", + label: "CountOptions" + }, { type: "doc", id: "api/appkit/Interface.DatabaseCredential", label: "DatabaseCredential" }, + { + type: "doc", + id: "api/appkit/Interface.DataPath", + label: "DataPath" + }, { type: "doc", id: "api/appkit/Interface.DefineSchemaOptions", @@ -222,6 +232,11 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/Interface.SchemaBuilderContext", label: "SchemaBuilderContext" }, + { + type: "doc", + id: "api/appkit/Interface.SelectOptions", + label: "SelectOptions" + }, { type: "doc", id: "api/appkit/Interface.ServingEndpointEntry", @@ -278,6 +293,11 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/TypeAlias.IAppRouter", label: "IAppRouter" }, + { + type: "doc", + id: "api/appkit/TypeAlias.IncludeSpec", + label: "IncludeSpec" + }, { type: "doc", id: "api/appkit/TypeAlias.JobHandle", @@ -288,6 +308,11 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/TypeAlias.JobsExport", label: "JobsExport" }, + { + type: "doc", + id: "api/appkit/TypeAlias.OrderSpec", + label: "OrderSpec" + }, { type: "doc", id: "api/appkit/TypeAlias.PluginData", @@ -312,6 +337,11 @@ const typedocSidebar: SidebarsConfig = { type: "doc", id: "api/appkit/TypeAlias.ToPlugin", label: "ToPlugin" + }, + { + type: "doc", + id: "api/appkit/TypeAlias.WhereSpec", + label: "WhereSpec" } ] }, @@ -345,6 +375,11 @@ const typedocSidebar: SidebarsConfig = { type: "category", label: "Functions", items: [ + { + type: "doc", + id: "api/appkit/Function.appKitDatabaseTypesPlugin", + label: "appKitDatabaseTypesPlugin" + }, { type: "doc", id: "api/appkit/Function.appKitServingTypesPlugin", @@ -355,6 +390,11 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/Function.appKitTypesPlugin", label: "appKitTypesPlugin" }, + { + type: "doc", + id: "api/appkit/Function.bigid", + label: "bigid" + }, { type: "doc", id: "api/appkit/Function.bigint", @@ -370,11 +410,21 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/Function.createApp", label: "createApp" }, + { + type: "doc", + id: "api/appkit/Function.createDrizzleDataPath", + label: "createDrizzleDataPath" + }, { type: "doc", id: "api/appkit/Function.createLakebasePool", label: "createLakebasePool" }, + { + type: "doc", + id: "api/appkit/Function.createUserScopedDataPath", + label: "createUserScopedDataPath" + }, { type: "doc", id: "api/appkit/Function.defineSchema", @@ -405,6 +455,11 @@ const typedocSidebar: SidebarsConfig = { id: "api/appkit/Function.generateDatabaseCredential", label: "generateDatabaseCredential" }, + { + type: "doc", + id: "api/appkit/Function.generateDatabaseTypes", + label: "generateDatabaseTypes" + }, { type: "doc", id: "api/appkit/Function.getExecutionContext", diff --git a/packages/appkit-ui/src/js/database/client.test.ts b/packages/appkit-ui/src/js/database/client.test.ts new file mode 100644 index 000000000..bc1b38870 --- /dev/null +++ b/packages/appkit-ui/src/js/database/client.test.ts @@ -0,0 +1,257 @@ +import { describe, expect, test, vi } from "vitest"; +import { createDatabaseClient } from "./client"; +import { DatabaseHTTPError } from "./errors"; + +interface UserRow { + id: number; + email: string; + role: "admin" | "member"; +} + +type UserClient = { + where: (input: Partial) => UserClient; + order: (input: Partial>) => UserClient; + limit: (n: number) => UserClient; + offset: (n: number) => UserClient; + include: (input: Record) => UserClient; + toArray: (signal?: AbortSignal) => Promise; + first: () => Promise; + find: (id: string | number) => Promise; + count: () => Promise; + create: (data: Partial) => Promise; + update: (id: string | number, patch: Partial) => Promise; + upsert: ( + data: Partial, + options: { onConflict: string }, + ) => Promise; + delete: (id: string | number) => Promise; +}; + +function jsonResponse(body: unknown, init: ResponseInit = {}): Response { + return new Response(JSON.stringify(body), { + status: 200, + headers: { "content-type": "application/json" }, + ...init, + }); +} + +function setup(responses: Response[]) { + const fetchSpy = vi.fn(); + for (const response of responses) { + fetchSpy.mockResolvedValueOnce(response); + } + const db = createDatabaseClient({ + baseUrl: "/api/database", + fetch: fetchSpy, + }); + return { db, fetchSpy }; +} + +describe("createDatabaseClient — list + filters", () => { + test("serializes where + limit into a GET URL", async () => { + const { db, fetchSpy } = setup([jsonResponse([{ id: 1 }])]); + const users = db as unknown as { user: UserClient }; + + const rows = await users.user.where({ role: "admin" }).limit(10).toArray(); + + expect(rows).toEqual([{ id: 1 }]); + expect(fetchSpy).toHaveBeenCalledTimes(1); + const [url, init] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user?role=eq.admin&limit=10"); + expect(init?.signal).toBeUndefined(); + }); + + test(".include({ posts: true }) emits ?include= without touching select", async () => { + const { db, fetchSpy } = setup([jsonResponse([])]); + const users = db as unknown as { user: UserClient }; + + await users.user.include({ posts: true }).toArray(); + + const [url] = fetchSpy.mock.calls[0] ?? []; + expect(url).toContain("include=posts"); + expect(url).not.toContain("select="); + }); + + test("first() returns the first row or null", async () => { + const { db, fetchSpy } = setup([jsonResponse([{ id: 7 }])]); + const users = db as unknown as { user: UserClient }; + + const row = await users.user.where({ role: "admin" }).first(); + expect(row).toEqual({ id: 7 }); + + const [url] = fetchSpy.mock.calls[0] ?? []; + expect(url).toContain("limit=1"); + }); + + test("first() returns null on empty list", async () => { + const { db } = setup([jsonResponse([])]); + const users = db as unknown as { user: UserClient }; + + expect(await users.user.first()).toBeNull(); + }); + + test("passes AbortSignal through to fetch", async () => { + const { db, fetchSpy } = setup([jsonResponse([])]); + const users = db as unknown as { user: UserClient }; + const ctrl = new AbortController(); + + await users.user.toArray(ctrl.signal); + + const [, init] = fetchSpy.mock.calls[0] ?? []; + expect(init?.signal).toBe(ctrl.signal); + }); +}); + +describe("createDatabaseClient — find/count", () => { + test("find() hits /entity/:id and returns parsed body", async () => { + const { db, fetchSpy } = setup([jsonResponse({ id: 42, role: "admin" })]); + const users = db as unknown as { user: UserClient }; + + const row = await users.user.find(42); + + expect(row).toEqual({ id: 42, role: "admin" }); + const [url] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user/42"); + }); + + test("find() returns null on 404", async () => { + const { db } = setup([new Response(null, { status: 404 })]); + const users = db as unknown as { user: UserClient }; + + expect(await users.user.find(999)).toBeNull(); + }); + + test("count() reads { count } from /entity/count", async () => { + const { db, fetchSpy } = setup([jsonResponse({ count: 17 })]); + const users = db as unknown as { user: UserClient }; + + const total = await users.user.where({ role: "admin" }).count(); + + expect(total).toBe(17); + const [url] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user/count?role=eq.admin"); + }); +}); + +describe("createDatabaseClient — mutations", () => { + test("create() POSTs JSON body", async () => { + const { db, fetchSpy } = setup([ + jsonResponse({ id: 1, email: "a@x", role: "member" }, { status: 201 }), + ]); + const users = db as unknown as { user: UserClient }; + + const created = await users.user.create({ email: "a@x" }); + expect(created).toEqual({ id: 1, email: "a@x", role: "member" }); + + const [url, init] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user"); + expect(init?.method).toBe("POST"); + expect(init?.headers).toMatchObject({ "Content-Type": "application/json" }); + expect(init?.body).toBe(JSON.stringify({ email: "a@x" })); + }); + + test("update() PATCHes /entity/:id", async () => { + const { db, fetchSpy } = setup([jsonResponse({ id: 5, role: "admin" })]); + const users = db as unknown as { user: UserClient }; + + const updated = await users.user.update(5, { role: "admin" }); + expect(updated).toEqual({ id: 5, role: "admin" }); + + const [url, init] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user/5"); + expect(init?.method).toBe("PATCH"); + expect(init?.body).toBe(JSON.stringify({ role: "admin" })); + }); + + test("upsert() sends merge-duplicates Prefer header", async () => { + const { db, fetchSpy } = setup([jsonResponse({ id: 1 })]); + const users = db as unknown as { user: UserClient }; + + await users.user.upsert({ id: 1, email: "a@x" }, { onConflict: "id" }); + + const [url, init] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user?on_conflict=id"); + expect(init?.method).toBe("POST"); + expect(init?.headers).toMatchObject({ + Prefer: "resolution=merge-duplicates", + }); + }); + + test("delete() resolves void on 204", async () => { + const { db, fetchSpy } = setup([new Response(null, { status: 204 })]); + const users = db as unknown as { user: UserClient }; + + await expect(users.user.delete(3)).resolves.toBeUndefined(); + + const [url, init] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user/3"); + expect(init?.method).toBe("DELETE"); + }); +}); + +describe("createDatabaseClient — errors", () => { + test("throws DatabaseHTTPError on non-2xx with parsed body", async () => { + const { db } = setup([ + new Response(JSON.stringify({ error: "bad input" }), { + status: 400, + headers: { "content-type": "application/json" }, + }), + ]); + const users = db as unknown as { user: UserClient }; + + await expect(users.user.create({})).rejects.toMatchObject({ + name: "DatabaseHTTPError", + status: 400, + body: { error: "bad input" }, + message: "bad input", + }); + }); + + test("falls back to statusText when body is empty", async () => { + const { db } = setup([ + new Response(null, { status: 500, statusText: "Server Error" }), + ]); + const users = db as unknown as { user: UserClient }; + + await expect(users.user.toArray()).rejects.toBeInstanceOf( + DatabaseHTTPError, + ); + }); + + test("throws on 500 even for delete()", async () => { + const { db } = setup([new Response(null, { status: 500 })]); + const users = db as unknown as { user: UserClient }; + + await expect(users.user.delete(1)).rejects.toBeInstanceOf( + DatabaseHTTPError, + ); + }); +}); + +describe("createDatabaseClient — baseUrl", () => { + test("strips trailing slash from baseUrl", async () => { + const fetchSpy = vi.fn(async () => jsonResponse([])); + const db = createDatabaseClient({ + baseUrl: "/api/database/", + fetch: fetchSpy, + }); + const users = db as unknown as { user: UserClient }; + + await users.user.toArray(); + + const [url] = fetchSpy.mock.calls[0] ?? []; + expect(url).toBe("/api/database/user"); + }); + + test("fresh chain per entity access", async () => { + const fetchSpy = vi.fn(async () => jsonResponse([])); + const db = createDatabaseClient({ fetch: fetchSpy }); + const typed = db as unknown as { user: UserClient; team: UserClient }; + + await typed.user.where({ role: "admin" }).toArray(); + await typed.team.toArray(); + + expect(fetchSpy.mock.calls[0]?.[0]).toContain("/api/database/user"); + expect(fetchSpy.mock.calls[1]?.[0]).toBe("/api/database/team"); + }); +}); diff --git a/packages/appkit-ui/src/js/database/client.ts b/packages/appkit-ui/src/js/database/client.ts new file mode 100644 index 000000000..a69281809 --- /dev/null +++ b/packages/appkit-ui/src/js/database/client.ts @@ -0,0 +1,229 @@ +import { DatabaseHTTPError } from "./errors"; +import type { + ApplyIncludes, + DatabaseClient, + DatabaseClientConfig, + EntityClient, + IncludeInput, + OrderInput, + WhereInput, +} from "./types"; +import { + buildUrl, + EMPTY_STATE, + pushFilter, + pushInclude, + pushOrder, + pushSelect, + type RequestState, +} from "./url-builder"; + +/** + * Browser `DatabaseClient`: `Proxy` over `/` chains that end in `fetch`. + * + * @example + * ```ts + * const db = createDatabaseClient(); + * const admins = await db.user + * .where({ role: "admin" }) + * .order({ createdAt: "desc" }) + * .limit(20) + * .toArray(); + * ``` + */ +export function createDatabaseClient( + config: DatabaseClientConfig = {}, +): DatabaseClient { + const fetchImpl: typeof fetch = config.fetch ?? ((...args) => fetch(...args)); + const baseUrl = normalizeBaseUrl(config.baseUrl ?? "/api/database"); + + const makeChain = ( + entity: string, + state: RequestState, + ): EntityClient => { + const chain: EntityClient = { + where: (input: WhereInput) => + makeChain( + entity, + pushFilter(state, input), + ), + order: (input: OrderInput) => + makeChain( + entity, + pushOrder(state, input), + ), + limit: (n: number) => + makeChain(entity, { + ...state, + limit: n, + }), + offset: (n: number) => + makeChain(entity, { + ...state, + offset: n, + }), + select: (...cols: K[]) => + makeChain, TInsert, TUpdate, TIncludes>( + entity, + pushSelect(state, cols.map(String)), + ), + include: >(input: I) => + makeChain< + TRow & ApplyIncludes, + TInsert, + TUpdate, + TIncludes + >(entity, pushInclude(state, input as never)), + + toArray: async (signal) => { + const res = await fetchImpl(buildUrl(baseUrl, entity, state), { + signal, + }); + return readJson(res); + }, + first: async (signal) => { + const rows = await makeChain( + entity, + { ...state, limit: 1 }, + ).toArray(signal); + return rows[0] ?? null; + }, + find: async (id, signal) => { + const url = `${baseUrl}/${entity}/${encodeURIComponent(String(id))}`; + const res = await fetchImpl(url, { signal }); + if (res.status === 404 || res.status === 204) return null; + return readJson(res); + }, + count: async (signal) => { + const countState: RequestState = { + ...state, + order: undefined, + limit: undefined, + offset: undefined, + select: undefined, + include: undefined, + }; + const url = buildUrl(baseUrl, entity, countState, "count"); + const res = await fetchImpl(url, { signal }); + const json = await readJson<{ count: number }>(res); + return json.count; + }, + + create: async (data, signal) => { + const res = await fetchImpl(`${baseUrl}/${entity}`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(data), + signal, + }); + return readJson(res); + }, + update: async (id, patch, signal) => { + const url = `${baseUrl}/${entity}/${encodeURIComponent(String(id))}`; + const res = await fetchImpl(url, { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(patch), + signal, + }); + // 404 → null like `find()` so callers distinguish missing rows from bad responses. + if (res.status === 404) return null; + return readJson(res); + }, + upsert: async (data, options, signal) => { + const conflictCol = encodeURIComponent(String(options.onConflict)); + const url = `${baseUrl}/${entity}?on_conflict=${conflictCol}`; + const res = await fetchImpl(url, { + method: "POST", + headers: { + "Content-Type": "application/json", + Prefer: "resolution=merge-duplicates", + }, + body: JSON.stringify(data), + signal, + }); + return readJson(res); + }, + delete: async (id, signal) => { + const url = `${baseUrl}/${entity}/${encodeURIComponent(String(id))}`; + const res = await fetchImpl(url, { method: "DELETE", signal }); + if (!res.ok) throw await buildError(res); + }, + }; + + return chain; + }; + + return new Proxy({} as DatabaseClient, { + get(_target, prop) { + if (typeof prop !== "string") return undefined; + return makeChain(prop, EMPTY_STATE); + }, + }); +} + +let defaultClient: DatabaseClient | undefined; + +/** Lazy singleton default client (`/api/database`) — avoids work when `db` isn't imported. */ +export const db: DatabaseClient = new Proxy({} as DatabaseClient, { + get(_target, prop) { + defaultClient ??= createDatabaseClient(); + return Reflect.get(defaultClient as object, prop); + }, +}); + +function normalizeBaseUrl(value: string): string { + return value.endsWith("/") ? value.slice(0, -1) : value; +} + +async function readJson(res: Response): Promise { + if (!res.ok) throw await buildError(res); + if (res.status === 204) return undefined as T; + // Empty body → typed error (JSON.parse("") throws SyntaxError, not DatabaseHTTPError). + const text = await res.text(); + if (text === "") { + throw new DatabaseHTTPError( + res.status, + "Server returned an empty response body", + undefined, + ); + } + try { + return JSON.parse(text) as T; + } catch (err) { + throw new DatabaseHTTPError( + res.status, + `Server returned non-JSON response: ${(err as Error).message}`, + text, + ); + } +} + +async function buildError(res: Response): Promise { + const body = await safeParseBody(res); + const message = + extractMessage(body) ?? res.statusText ?? `HTTP ${res.status}`; + return new DatabaseHTTPError(res.status, message, body); +} + +async function safeParseBody(res: Response): Promise { + try { + const text = await res.text(); + if (!text) return undefined; + try { + return JSON.parse(text); + } catch { + return text; + } + } catch { + return undefined; + } +} + +function extractMessage(body: unknown): string | undefined { + if (!body || typeof body !== "object") return undefined; + const maybe = body as { error?: unknown; message?: unknown }; + if (typeof maybe.error === "string") return maybe.error; + if (typeof maybe.message === "string") return maybe.message; + return undefined; +} diff --git a/packages/appkit-ui/src/js/database/errors.ts b/packages/appkit-ui/src/js/database/errors.ts new file mode 100644 index 000000000..a659c8ea7 --- /dev/null +++ b/packages/appkit-ui/src/js/database/errors.ts @@ -0,0 +1,24 @@ +/** + * Thrown by the browser database client when a request returns a non-2xx + * response. The HTTP status is exposed as `statusCode` to match the rest of + * AppKit's error surface (`AppKitError.statusCode`); a deprecated `status` + * alias is kept for compatibility with existing call sites. + */ +export class DatabaseHTTPError extends Error { + /** HTTP status code returned by the server. */ + readonly statusCode: number; + /** Parsed JSON body or text returned by the server, when available. */ + readonly body?: unknown; + + constructor(statusCode: number, message: string, body?: unknown) { + super(message); + this.name = "DatabaseHTTPError"; + this.statusCode = statusCode; + this.body = body; + } + + /** @deprecated Use `statusCode`. */ + get status(): number { + return this.statusCode; + } +} diff --git a/packages/appkit-ui/src/js/database/index.ts b/packages/appkit-ui/src/js/database/index.ts new file mode 100644 index 000000000..37c271165 --- /dev/null +++ b/packages/appkit-ui/src/js/database/index.ts @@ -0,0 +1,27 @@ +export { createDatabaseClient, db } from "./client"; +export { DatabaseHTTPError } from "./errors"; +export type { + ApplyIncludes, + DatabaseClient, + DatabaseClientConfig, + DatabaseEntityKey, + DatabaseIncludes, + DatabaseInsert, + DatabaseRegistry, + DatabaseRow, + DatabaseUpdate, + EntityClient, + IncludeInput, + OrderInput, + RelatedRow, + WhereInput, +} from "./types"; +export { + buildUrl, + EMPTY_STATE, + pushFilter, + pushInclude, + pushOrder, + pushSelect, + type RequestState, +} from "./url-builder"; diff --git a/packages/appkit-ui/src/js/database/types.ts b/packages/appkit-ui/src/js/database/types.ts new file mode 100644 index 000000000..f66c081db --- /dev/null +++ b/packages/appkit-ui/src/js/database/types.ts @@ -0,0 +1,167 @@ +/** + * Database registry — augmented by the AppKit type generator from `schema.ts`. + * + * Emits `declare module "@databricks/appkit-ui/js" { interface DatabaseRegistry { … } }`. + * Shared by the browser `db` client and server `appkit.database` imports. + */ +// biome-ignore lint/suspicious/noEmptyInterface: intentionally empty — populated via module augmentation +export interface DatabaseRegistry {} + +/** Literal keys from augmentation only (drops index signatures). */ +type AugmentedKeys = keyof { + [K in keyof T as string extends K ? never : K]: T[K]; +}; + +/** Augmented entity names, or `string` before the generator runs. */ +export type DatabaseEntityKey = AugmentedKeys extends never + ? string + : AugmentedKeys; + +/** Row shape for entity `E`. Unknown before module augmentation. */ +export type DatabaseRow = + DatabaseRegistry extends { + [K in E]: { row: infer R }; + } + ? R + : unknown; + +/** Insert shape for entity `E`. Falls back to `Record`. */ +export type DatabaseInsert = + DatabaseRegistry extends { [K in E]: { insert: infer I } } + ? I + : Record; + +/** Update shape for entity `E`. Falls back to `Record`. */ +export type DatabaseUpdate = + DatabaseRegistry extends { [K in E]: { update: infer U } } + ? U + : Record; + +/** Includes map for entity `E`. Empty object when no relations exist. */ +export type DatabaseIncludes = + DatabaseRegistry extends { [K in E]: { includes: infer I } } + ? I + : Record; + +/** + * `.where(...)` predicate: bare values are `eq`; objects pick operators per column. + */ +export type WhereInput = { + [K in keyof TRow]?: + | TRow[K] + | { + eq?: TRow[K]; + neq?: TRow[K]; + gt?: TRow[K]; + gte?: TRow[K]; + lt?: TRow[K]; + lte?: TRow[K]; + like?: string; + ilike?: string; + in?: TRow[K][]; + is?: TRow[K] | null; + }; +}; + +/** Sort directive for `.order(...)`. */ +export type OrderInput = { [K in keyof TRow]?: "asc" | "desc" }; + +/** Related row shape: single `{ row }` or `{ row }[]` from the registry. */ +export type RelatedRow< + TIncludes, + K extends keyof TIncludes, +> = TIncludes[K] extends { row: infer R } + ? R + : TIncludes[K] extends Array<{ row: infer R }> + ? R + : Record; + +/** Per-relation include spec — `true` for default fetch, object for refined select. */ +export type IncludeInput = { + [K in keyof TIncludes]?: + | true + | { + select?: ReadonlyArray>; + }; +}; + +/** Maps included relation keys onto extra fields (object vs array per cardinality). */ +export type ApplyIncludes = { + [K in keyof I & keyof TIncludes]: TIncludes[K] extends Array + ? RelatedRow[] + : RelatedRow; +}; + +/** + * Browser `EntityClient` — mirrors server `EntityClient`; chain methods build + * one HTTP request to `/api/database/`. + */ +export interface EntityClient< + TRow, + TInsert, + TUpdate, + TIncludes = Record, +> { + where( + input: WhereInput, + ): EntityClient; + order( + input: OrderInput, + ): EntityClient; + limit(n: number): EntityClient; + offset(n: number): EntityClient; + select( + ...cols: K[] + ): EntityClient, TInsert, TUpdate, TIncludes>; + + /** Eager-load relations → `?include=` (PostgREST-style). */ + include>( + input: I, + ): EntityClient< + TRow & ApplyIncludes, + TInsert, + TUpdate, + TIncludes + >; + + toArray(signal?: AbortSignal): Promise; + first(signal?: AbortSignal): Promise; + find(id: string | number, signal?: AbortSignal): Promise; + count(signal?: AbortSignal): Promise; + + create(data: TInsert, signal?: AbortSignal): Promise; + /** + * PATCH by id — `null` on 404 (like `find()`); otherwise rejects with `DatabaseHTTPError`. + */ + update( + id: string | number, + patch: TUpdate, + signal?: AbortSignal, + ): Promise; + upsert( + data: TInsert, + options: { onConflict: keyof TRow & string }, + signal?: AbortSignal, + ): Promise; + delete(id: string | number, signal?: AbortSignal): Promise; +} + +/** + * Return type of `createDatabaseClient()`. Entity keys become typed clients once + * `DatabaseRegistry` is augmented; before that, entities are loose `unknown` rows. + */ +export type DatabaseClient = { + [E in DatabaseEntityKey]: EntityClient< + DatabaseRow, + DatabaseInsert, + DatabaseUpdate, + DatabaseIncludes + >; +}; + +export interface DatabaseClientConfig { + /** Base URL for AppKit database routes. Defaults to `/api/database`. */ + baseUrl?: string; + /** Fetch implementation override — useful for tests or custom credentials. */ + fetch?: typeof fetch; +} diff --git a/packages/appkit-ui/src/js/database/url-builder.test.ts b/packages/appkit-ui/src/js/database/url-builder.test.ts new file mode 100644 index 000000000..07eae9ea7 --- /dev/null +++ b/packages/appkit-ui/src/js/database/url-builder.test.ts @@ -0,0 +1,204 @@ +import { describe, expect, test } from "vitest"; +import { + buildUrl, + EMPTY_STATE, + pushFilter, + pushInclude, + pushOrder, + pushSelect, + type RequestState, +} from "./url-builder"; + +describe("pushFilter", () => { + test("treats bare values as equality", () => { + const next = pushFilter(EMPTY_STATE, { role: "admin", age: 30 }); + + expect(next.filters).toEqual([ + { col: "role", expr: "eq.admin" }, + { col: "age", expr: "eq.30" }, + ]); + }); + + test("expands operator objects", () => { + const next = pushFilter(EMPTY_STATE, { + age: { gte: 18, lt: 65 }, + }); + + expect(next.filters).toEqual([ + { col: "age", expr: "gte.18" }, + { col: "age", expr: "lt.65" }, + ]); + }); + + test("serializes `in` with a parenthesized list", () => { + const next = pushFilter(EMPTY_STATE, { + role: { in: ["admin", "owner"] }, + }); + + expect(next.filters).toEqual([{ col: "role", expr: "in.(admin,owner)" }]); + }); + + test("serializes bare arrays as `in`", () => { + const next = pushFilter(EMPTY_STATE, { + status: ["new", "in_progress"], + }); + + expect(next.filters).toEqual([ + { col: "status", expr: "in.(new,in_progress)" }, + ]); + }); + + test("uses `is` for explicit null checks", () => { + const next = pushFilter(EMPTY_STATE, { + deletedAt: { is: null }, + }); + + expect(next.filters).toEqual([{ col: "deletedAt", expr: "is.null" }]); + }); + + test("quotes string values with commas, spaces, or parens", () => { + const next = pushFilter(EMPTY_STATE, { + name: "Doe, Jane", + }); + + expect(next.filters).toEqual([{ col: "name", expr: 'eq."Doe, Jane"' }]); + }); + + test("skips undefined operator values", () => { + const next = pushFilter(EMPTY_STATE, { + age: { gte: undefined, lt: 65 }, + }); + + expect(next.filters).toEqual([{ col: "age", expr: "lt.65" }]); + }); + + test("returns a new state object (immutable)", () => { + const state = pushFilter(EMPTY_STATE, { role: "admin" }); + const next = pushFilter(state, { team: "ops" }); + + expect(state.filters).toEqual([{ col: "role", expr: "eq.admin" }]); + expect(next.filters).toHaveLength(2); + expect(next).not.toBe(state); + }); +}); + +describe("pushOrder", () => { + test("formats single-column order with default asc", () => { + const next = pushOrder(EMPTY_STATE, { createdAt: "asc" }); + + expect(next.order).toBe("createdAt.asc"); + }); + + test("merges successive order calls", () => { + const first = pushOrder(EMPTY_STATE, { createdAt: "desc" }); + const next = pushOrder(first, { id: "asc" }); + + expect(next.order).toBe("createdAt.desc,id.asc"); + }); + + test("is a no-op for empty input", () => { + const next = pushOrder(EMPTY_STATE, {}); + + expect(next).toBe(EMPTY_STATE); + }); +}); + +describe("pushSelect", () => { + test("joins columns with commas", () => { + const next = pushSelect(EMPTY_STATE, ["id", "email"]); + + expect(next.select).toBe("id,email"); + }); + + test("is a no-op for empty projection", () => { + const next = pushSelect(EMPTY_STATE, []); + + expect(next).toBe(EMPTY_STATE); + }); +}); + +describe("pushInclude", () => { + test("serializes `{ posts: true }` as a bare relation name", () => { + const next = pushInclude(EMPTY_STATE, { posts: true }); + + expect(next.include).toBe("posts"); + expect(next.select).toBeUndefined(); + }); + + test("keeps select and include independent", () => { + const withSelect = pushSelect(EMPTY_STATE, ["id", "email"]); + const next = pushInclude(withSelect, { posts: true }); + + expect(next.select).toBe("id,email"); + expect(next.include).toBe("posts"); + }); + + test("renders per-relation column list with parens", () => { + const next = pushInclude(EMPTY_STATE, { + posts: { select: ["id", "title"] }, + }); + + expect(next.include).toBe("posts(id,title)"); + }); + + test("combines multiple relations", () => { + const next = pushInclude(EMPTY_STATE, { + posts: true, + author: { select: ["id"] }, + }); + + expect(next.include).toBe("posts,author(id)"); + }); + + test("is a no-op when input is empty", () => { + const next = pushInclude(EMPTY_STATE, {}); + + expect(next).toBe(EMPTY_STATE); + }); +}); + +describe("buildUrl", () => { + test("emits path only when state is empty", () => { + expect(buildUrl("/api/database", "user", EMPTY_STATE)).toBe( + "/api/database/user", + ); + }); + + test("appends filters and limit in a stable order", () => { + const state: RequestState = { + filters: [ + { col: "role", expr: "eq.admin" }, + { col: "age", expr: "gte.18" }, + ], + limit: 10, + }; + + expect(buildUrl("/api/database", "user", state)).toBe( + "/api/database/user?role=eq.admin&age=gte.18&limit=10", + ); + }); + + test("includes order, offset, select, and include when present", () => { + const state: RequestState = { + filters: [], + order: "id.desc", + offset: 20, + select: "id,email", + include: "posts(title),author", + }; + + expect(buildUrl("/api/database", "user", state)).toBe( + "/api/database/user?order=id.desc&offset=20&select=id%2Cemail&include=posts%28title%29%2Cauthor", + ); + }); + + test("trims trailing slash from base url via caller contract", () => { + // buildUrl itself does not normalize; the client wrapper is responsible. + // This test documents that assumption. + const state: RequestState = { filters: [] }; + + expect(buildUrl("/api/database/", "user", state)).toBe( + "/api/database//user", + ); + }); +}); diff --git a/packages/appkit-ui/src/js/database/url-builder.ts b/packages/appkit-ui/src/js/database/url-builder.ts new file mode 100644 index 000000000..3e06df28c --- /dev/null +++ b/packages/appkit-ui/src/js/database/url-builder.ts @@ -0,0 +1,193 @@ +import type { OrderInput, WhereInput } from "./types"; + +/** + * Accumulated query state for `EntityClient` chains — fields are literal strings + * that `buildUrl` joins into `URLSearchParams`. + */ +export interface RequestState { + filters: Array<{ col: string; expr: string }>; + order?: string; + limit?: number; + offset?: number; + /** Comma-separated column projection (e.g. `"id,email"`). */ + select?: string; + /** Include spec (e.g. `"posts(id,title),author"`) — separate from select. */ + include?: string; +} + +/** + * Starting state for `db.` — frozen so callers can't mutate the shared empty filters. + */ +export const EMPTY_STATE: RequestState = Object.freeze({ + filters: Object.freeze([]) as unknown as RequestState["filters"], +}) as RequestState; + +// Mirror route allowlist — runtime JSON can bypass TS. +const ALLOWED_OPS = new Set([ + "eq", + "neq", + "gt", + "gte", + "lt", + "lte", + "like", + "ilike", + "in", + "is", +]); + +// Cap `in` size — long URLs hit proxy/browser limits (414 / truncation). +const MAX_IN_LIST = 100; + +/** + * Merge `.where(...)`: scalars → `eq`; objects expand ops; bare `null` → `is.null`. + */ +export function pushFilter( + state: RequestState, + input: WhereInput, +): RequestState { + const next: RequestState = { + ...state, + filters: [...state.filters], + }; + for (const [col, value] of Object.entries(input)) { + if (value === null) { + next.filters.push({ col, expr: "is.null" }); + continue; + } + if (typeof value !== "object") { + next.filters.push({ col, expr: `eq.${encodeScalar(value)}` }); + continue; + } + if (Array.isArray(value)) { + assertInListSize(value, col); + next.filters.push({ col, expr: `in.${encodeList(value)}` }); + continue; + } + for (const [op, raw] of Object.entries(value as Record)) { + if (raw === undefined) continue; + if (!ALLOWED_OPS.has(op)) { + throw new Error( + `Unsupported where operator "${op}" on column "${col}"`, + ); + } + if (op === "in" && Array.isArray(raw)) assertInListSize(raw, col); + next.filters.push({ col, expr: `${op}.${encodeOperand(op, raw)}` }); + } + } + return next; +} + +function assertInListSize(values: readonly unknown[], col: string): void { + if (values.length > MAX_IN_LIST) { + throw new Error( + `where(${col}.in) accepts at most ${MAX_IN_LIST} values; got ${values.length}. ` + + `Page the parent query or batch the IN list.`, + ); + } +} + +/** Merge an `.order(...)` input into the state, preserving prior directives. */ +export function pushOrder( + state: RequestState, + input: OrderInput, +): RequestState { + const parts = Object.entries(input).map( + ([col, dir]) => `${col}.${dir ?? "asc"}`, + ); + if (parts.length === 0) return state; + const next = state.order + ? `${state.order},${parts.join(",")}` + : parts.join(","); + return { ...state, order: next }; +} + +/** Merge a typed `.select(...)` projection into the state. */ +export function pushSelect( + state: RequestState, + cols: readonly string[], +): RequestState { + if (cols.length === 0) return state; + return { ...state, select: cols.join(",") }; +} + +/** + * Serialize `.include(...)` to `?include=` — independent of column `select`. + */ +export function pushInclude( + state: RequestState, + input: Record< + string, + | true + | { + select?: readonly string[]; + } + >, +): RequestState { + const parts: string[] = []; + for (const [rel, spec] of Object.entries(input)) { + if (spec === undefined) continue; + if (spec === true) { + parts.push(rel); + continue; + } + if (spec.select?.length) { + parts.push(`${rel}(${spec.select.join(",")})`); + } else { + parts.push(rel); + } + } + if (parts.length === 0) return state; + const next = state.include + ? `${state.include},${parts.join(",")}` + : parts.join(","); + return { ...state, include: next }; +} + +/** + * Final URL for `entity` + `state`. Optional `subpath` (e.g. `count`) is allowlisted + * so dynamic entity keys can't escape the mount. + */ +export function buildUrl( + baseUrl: string, + entity: string, + state: RequestState, + subpath?: string, +): string { + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(entity)) { + throw new Error( + `Invalid entity name "${entity}". Must match /^[A-Za-z_][A-Za-z0-9_]*$/.`, + ); + } + if (subpath !== undefined && !/^[A-Za-z_][A-Za-z0-9_]*$/.test(subpath)) { + throw new Error(`Invalid subpath "${subpath}".`); + } + const params = new URLSearchParams(); + for (const f of state.filters) params.append(f.col, f.expr); + if (state.order) params.set("order", state.order); + if (state.limit !== undefined) params.set("limit", String(state.limit)); + if (state.offset !== undefined) params.set("offset", String(state.offset)); + if (state.select) params.set("select", state.select); + if (state.include) params.set("include", state.include); + const qs = params.toString(); + const tail = subpath ? `/${encodeURIComponent(subpath)}` : ""; + return `${baseUrl}/${encodeURIComponent(entity)}${tail}${qs ? `?${qs}` : ""}`; +} + +function encodeOperand(op: string, value: unknown): string { + if (op === "in") return encodeList(value as unknown[]); + if (value === null) return "null"; + return encodeScalar(value); +} + +function encodeList(values: unknown[]): string { + return `(${values.map(encodeScalar).join(",")})`; +} + +function encodeScalar(value: unknown): string { + if (value === null || value === undefined) return "null"; + if (typeof value === "string" && /[,()"\s]/.test(value)) { + return `"${value.replace(/"/g, '\\"')}"`; + } + return String(value); +} diff --git a/packages/appkit-ui/src/js/index.ts b/packages/appkit-ui/src/js/index.ts index f49cde96e..353b55ace 100644 --- a/packages/appkit-ui/src/js/index.ts +++ b/packages/appkit-ui/src/js/index.ts @@ -12,4 +12,5 @@ export { export * from "./arrow"; export * from "./config"; export * from "./constants"; +export * from "./database"; export * from "./sse"; diff --git a/packages/appkit/src/database/schema-builder/define-schema.ts b/packages/appkit/src/database/schema-builder/define-schema.ts index 78178e9f2..02887b81b 100644 --- a/packages/appkit/src/database/schema-builder/define-schema.ts +++ b/packages/appkit/src/database/schema-builder/define-schema.ts @@ -76,5 +76,6 @@ export function defineSchema>( $drizzle: schemaInstance, $tables: tableMap, $migrations: { snapshotHints: undefined }, + $schemaName: schemaName, } as Schema; } diff --git a/packages/appkit/src/database/schema-builder/types.ts b/packages/appkit/src/database/schema-builder/types.ts index 56e392234..4101f354d 100644 --- a/packages/appkit/src/database/schema-builder/types.ts +++ b/packages/appkit/src/database/schema-builder/types.ts @@ -105,6 +105,13 @@ export type Schema< readonly $drizzle: unknown; readonly $tables: Record; readonly $migrations: { snapshotHints: unknown }; + /** + * Postgres schema namespace declared via `defineSchema(..., { schemaName })`. + * Consumed by the database plugin (route/postgrest layer) and the + * introspector so downstream code never has to re-configure what the schema + * already knows about itself. + */ + readonly $schemaName: string; }; /** diff --git a/packages/appkit/src/index.ts b/packages/appkit/src/index.ts index 607336ea5..eb251302a 100644 --- a/packages/appkit/src/index.ts +++ b/packages/appkit/src/index.ts @@ -108,6 +108,8 @@ export { SpanStatusCode, type TelemetryConfig, } from "./telemetry"; +export { generateDatabaseTypes } from "./type-generator/database/generator"; +export { appKitDatabaseTypesPlugin } from "./type-generator/database/vite-plugin"; export { extractServingEndpoints, findServerFile, diff --git a/packages/appkit/src/type-generator/database/cache.ts b/packages/appkit/src/type-generator/database/cache.ts new file mode 100644 index 000000000..e12a801bf --- /dev/null +++ b/packages/appkit/src/type-generator/database/cache.ts @@ -0,0 +1,134 @@ +import { createHash } from "node:crypto"; +import fs from "node:fs/promises"; +import path from "node:path"; + +/** + * Incremented when the generator's output shape changes in a way that would + * invalidate previously-cached `.d.ts` files. Bump and the next run ignores + * stale caches and re-emits. + * + * v2: hash now folds in transitively-imported relative modules too, so + * splitting `schema.ts` into `./tables/*.ts` no longer hides edits from the + * cache. + */ +export const CACHE_VERSION = 2; + +/** A single cached generation result. */ +export interface DatabaseCacheEntry { + /** sha256 of the schema graph. */ + hash: string; + /** Generated `.d.ts` output last produced from this hash. */ + output: string; +} + +/** Root shape persisted under `node_modules/.databricks/appkit/database/cache.json`. */ +export interface DatabaseCache { + version: number; + entry?: DatabaseCacheEntry; +} + +const CACHE_RELATIVE = "node_modules/.databricks/appkit/database/cache.json"; + +/** Stable sha256 hash of a single source string. */ +export function hashSchemaSource(source: string): string { + return createHash("sha256").update(source).digest("hex"); +} + +/** + * Hash the schema and every relative module it imports, recursively. F41: + * caching only `schema.ts` missed edits to `./tables/*.ts` helpers, so the + * generator returned stale `.d.ts` output until the cache was manually busted. + * + * Implementation is intentionally lightweight (regex over import specifiers) + * rather than a full TS parser: we only care about specifiers, which appear + * on one line and do not require type-aware resolution. Non-relative + * specifiers (npm packages) are ignored. + */ +export async function hashSchemaSourceWithDeps( + schemaPath: string, +): Promise { + const visited = new Map(); + await collectSource(path.resolve(schemaPath), visited); + const combined = Array.from(visited.entries()) + .sort(([a], [b]) => (a < b ? -1 : a > b ? 1 : 0)) + .map(([file, source]) => file + " " + source) + .join(""); + return createHash("sha256").update(combined).digest("hex"); +} + +const RELATIVE_IMPORT = /(?:from|import)\s+["']((?:\.\.?\/)[^"']+)["']/g; +const SOURCE_EXT = [".ts", ".tsx", ".js", ".mjs"]; + +async function collectSource( + filePath: string, + visited: Map, +): Promise { + if (visited.has(filePath)) return; + let source: string; + try { + source = await fs.readFile(filePath, "utf8"); + } catch { + return; + } + visited.set(filePath, source); + + const dir = path.dirname(filePath); + RELATIVE_IMPORT.lastIndex = 0; + let match: RegExpExecArray | null = RELATIVE_IMPORT.exec(source); + while (match !== null) { + const resolved = await resolveRelative(dir, match[1]); + if (resolved) await collectSource(resolved, visited); + match = RELATIVE_IMPORT.exec(source); + } +} + +async function resolveRelative( + fromDir: string, + specifier: string, +): Promise { + const base = path.resolve(fromDir, specifier); + const candidates = [ + base, + ...SOURCE_EXT.map((ext) => base + ext), + ...SOURCE_EXT.map((ext) => path.join(base, "index" + ext)), + ]; + for (const candidate of candidates) { + try { + const stat = await fs.stat(candidate); + if (stat.isFile()) return candidate; + } catch { + // continue + } + } + return null; +} + +/** + * Load the on-disk cache for the given project root. Missing or malformed + * caches return a fresh empty state; they never throw. + */ +export async function loadDatabaseCache( + projectRoot: string, +): Promise { + try { + const raw = await fs.readFile( + path.join(projectRoot, CACHE_RELATIVE), + "utf8", + ); + const parsed = JSON.parse(raw) as DatabaseCache; + if (parsed.version !== CACHE_VERSION) return { version: CACHE_VERSION }; + return parsed; + } catch { + return { version: CACHE_VERSION }; + } +} + +/** Persist the cache under `node_modules/.databricks/appkit/database/cache.json`. */ +export async function saveDatabaseCache( + projectRoot: string, + cache: DatabaseCache, +): Promise { + const target = path.join(projectRoot, CACHE_RELATIVE); + await fs.mkdir(path.dirname(target), { recursive: true }); + await fs.writeFile(target, JSON.stringify(cache, null, 2), "utf8"); +} diff --git a/packages/appkit/src/type-generator/database/generator.ts b/packages/appkit/src/type-generator/database/generator.ts new file mode 100644 index 000000000..3327d2ed7 --- /dev/null +++ b/packages/appkit/src/type-generator/database/generator.ts @@ -0,0 +1,193 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { pathToFileURL } from "node:url"; +import pc from "picocolors"; +import { createLogger } from "../../logging/logger"; +import { + CACHE_VERSION, + type DatabaseCache, + hashSchemaSourceWithDeps, + loadDatabaseCache, + saveDatabaseCache, +} from "./cache"; +import { type RegistryEntry, walkSchema } from "./walk-schema"; + +const logger = createLogger("type-generator:database"); + +/** Path (relative to the project root) of the user's declarative schema file. */ +export const SCHEMA_REL = "config/database/schema.ts"; +/** Default emit target — matches the analytics/serving convention. */ +export const DATABASE_TYPES_FILE = "shared/appkit-types/database.d.ts"; + +/** + * Pluggable schema loader. The Vite plugin wires `server.ssrLoadModule` so the + * schema is evaluated in Vite's own Node context (no child spawn, no tsx cold + * start). Standalone CLIs fall through to the default `import()` loader, which + * picks up TS source via the parent process's `tsx` loader. Tests inject a + * fake to avoid touching the filesystem or a live Vite server. + */ +export type SchemaLoader = ( + schemaPath: string, +) => Promise<{ default: unknown }>; + +const defaultLoader: SchemaLoader = (schemaPath) => + import(pathToFileURL(schemaPath).href) as Promise<{ default: unknown }>; + +export interface GenerateDatabaseTypesOptions { + /** Absolute path to the `.d.ts` output file. */ + outFile: string; + /** Project root — the directory that contains `config/database/schema.ts`. */ + projectRoot: string; + /** + * Override the schema module loader. When omitted, uses `import()` with a + * `file://` URL; Vite passes `ssrLoadModule`; tests pass a fake. + */ + loadModule?: SchemaLoader; + /** When true, bypass the cache in both directions (read and write). */ + noCache?: boolean; +} + +/** + * Read `config/database/schema.ts`, walk it, and emit the registry + * augmentation to the configured output file. Silently returns when the + * schema file does not exist — apps that don't use the database plugin pay + * nothing. + * + * The algorithm: + * + * 1. Read the schema source from disk (plain text — we hash it, not the AST). + * 2. On cache hit, re-emit the cached output and return early. + * 3. Otherwise call the module loader to get the live `Schema` object. + * 4. Walk the schema into flat `RegistryEntry`s (row/insert/update/filters/includes). + * 5. Render the `declare module` block and write it. + * 6. Update the cache with the new hash+output. + */ +export async function generateDatabaseTypes( + options: GenerateDatabaseTypesOptions, +): Promise { + const schemaPath = path.join(options.projectRoot, SCHEMA_REL); + const loadModule = options.loadModule ?? defaultLoader; + const start = performance.now(); + + try { + await fs.access(schemaPath); + } catch { + logger.debug( + "No schema.ts at %s; skipping database type generation", + schemaPath, + ); + return; + } + + // Hash the entire reachable graph (schema.ts + every relative import). A + // refactor that splits the schema into `./tables/*.ts` no longer fools the + // cache into returning stale `.d.ts` output. + const hash = await hashSchemaSourceWithDeps(schemaPath); + const cache = options.noCache + ? { version: CACHE_VERSION } + : await loadDatabaseCache(options.projectRoot); + + if (!options.noCache && cache.entry?.hash === hash) { + await writeOutput(options.outFile, cache.entry.output); + printLog("HIT", start, []); + return; + } + + const mod = await loadModule(schemaPath); + if (!mod || typeof mod !== "object" || !("default" in mod)) { + throw new Error( + `schema.ts at ${schemaPath} has no default export. Use 'export default defineSchema(...)'.`, + ); + } + + const entries = walkSchema(mod.default); + const output = renderDeclaration(entries); + await writeOutput(options.outFile, output); + + if (!options.noCache) { + const next: DatabaseCache = { + version: CACHE_VERSION, + entry: { hash, output }, + }; + await saveDatabaseCache(options.projectRoot, next); + } + + printLog("MISS", start, entries); +} + +/** + * Compose the full `.d.ts` output. When there are no entities we still emit a + * valid module so downstream `tsc --noEmit` runs stay happy; the file acts as + * a stable anchor so future regenerations can replace it atomically. + */ +function renderDeclaration(entries: RegistryEntry[]): string { + const header = [ + "// Auto-generated by AppKit — DO NOT EDIT", + "// Generated from config/database/schema.ts", + "", + ].join("\n"); + + if (entries.length === 0) { + return `${header}export {};\n`; + } + + const body = entries.map(renderEntry).join("\n"); + return [ + header, + 'import "@databricks/appkit-ui/js";', + "", + 'declare module "@databricks/appkit-ui/js" {', + " interface DatabaseRegistry {", + body, + " }", + "}", + "", + ].join("\n"); +} + +function renderEntry(entry: RegistryEntry): string { + return ` ${entry.entity}: { + row: ${entry.row}; + insert: ${entry.insert}; + update: ${entry.update}; + filters: ${entry.filters}; + includes: ${entry.includes}; + };`; +} + +async function writeOutput(outFile: string, content: string): Promise { + await fs.mkdir(path.dirname(outFile), { recursive: true }); + await fs.writeFile(outFile, content, "utf8"); +} + +/** + * Human-readable log — matches the serving generator's cadence so both + * typegen passes look the same in a dev terminal. + */ +function printLog( + status: "HIT" | "MISS", + start: number, + entries: RegistryEntry[], +): void { + const tag = + status === "HIT" + ? `cache ${pc.bold(pc.green("HIT "))}` + : `cache ${pc.bold(pc.yellow("MISS"))}`; + const elapsed = ((performance.now() - start) / 1000).toFixed(2); + const lines: string[] = [ + "", + ` ${pc.bold("Typegen Database")} ${pc.dim(`(${entries.length})`)}`, + ` ${pc.dim("─".repeat(50))}`, + ]; + if (entries.length === 0) { + lines.push( + ` ${tag} ${pc.dim(status === "HIT" ? "(no change)" : "(no entities)")}`, + ); + } else { + for (const e of entries) lines.push(` ${tag} ${e.entity}`); + } + lines.push(` ${pc.dim("─".repeat(50))}`); + lines.push(` ${pc.dim(`${elapsed}s`)}`); + lines.push(""); + for (const line of lines) console.log(line); +} diff --git a/packages/appkit/src/type-generator/database/index.ts b/packages/appkit/src/type-generator/database/index.ts new file mode 100644 index 000000000..c8fbb9a6e --- /dev/null +++ b/packages/appkit/src/type-generator/database/index.ts @@ -0,0 +1,21 @@ +export { + CACHE_VERSION, + type DatabaseCache, + type DatabaseCacheEntry, + hashSchemaSource, + hashSchemaSourceWithDeps, + loadDatabaseCache, + saveDatabaseCache, +} from "./cache"; +export { + DATABASE_TYPES_FILE, + type GenerateDatabaseTypesOptions, + generateDatabaseTypes, + SCHEMA_REL, + type SchemaLoader, +} from "./generator"; +export { + type AppKitDatabaseTypesPluginOptions, + appKitDatabaseTypesPlugin, +} from "./vite-plugin"; +export { type RegistryEntry, walkSchema } from "./walk-schema"; diff --git a/packages/appkit/src/type-generator/database/tests/generator.test.ts b/packages/appkit/src/type-generator/database/tests/generator.test.ts new file mode 100644 index 000000000..01749645f --- /dev/null +++ b/packages/appkit/src/type-generator/database/tests/generator.test.ts @@ -0,0 +1,221 @@ +import fs from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { afterEach, describe, expect, test, vi } from "vitest"; +import { defineSchema, id, text } from "../../../database/schema-builder"; +import { generateDatabaseTypes, SCHEMA_REL } from "../generator"; + +const fakeSchema = defineSchema(({ table }) => ({ + user: table("user", { + id: id(), + email: text().notNull(), + }), +})); + +async function mkApp(source: string): Promise<{ + projectRoot: string; + outFile: string; + cleanup: () => Promise; +}> { + const projectRoot = await fs.mkdtemp( + path.join(os.tmpdir(), "appkit-typegen-"), + ); + const schemaPath = path.join(projectRoot, SCHEMA_REL); + await fs.mkdir(path.dirname(schemaPath), { recursive: true }); + await fs.writeFile(schemaPath, source, "utf8"); + const outFile = path.join(projectRoot, "shared/appkit-types/database.d.ts"); + return { + projectRoot, + outFile, + cleanup: () => fs.rm(projectRoot, { recursive: true, force: true }), + }; +} + +let pendingCleanups: Array<() => Promise> = []; +afterEach(async () => { + for (const fn of pendingCleanups) await fn(); + pendingCleanups = []; +}); + +async function track( + result: Awaited>, +): Promise { + pendingCleanups.push(result.cleanup); + return result; +} + +describe("generateDatabaseTypes — output", () => { + test("skips silently when schema.ts is missing", async () => { + const projectRoot = await fs.mkdtemp( + path.join(os.tmpdir(), "appkit-typegen-empty-"), + ); + pendingCleanups.push(() => + fs.rm(projectRoot, { recursive: true, force: true }), + ); + const outFile = path.join(projectRoot, "shared/appkit-types/database.d.ts"); + + await expect( + generateDatabaseTypes({ + outFile, + projectRoot, + loadModule: () => Promise.reject(new Error("should not be called")), + noCache: true, + }), + ).resolves.toBeUndefined(); + + await expect(fs.access(outFile)).rejects.toThrow(); + }); + + test("writes a registry-augmenting .d.ts for the injected schema", async () => { + const app = await track(await mkApp("// fake schema source\n")); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: async () => ({ default: fakeSchema }), + noCache: true, + }); + + const content = await fs.readFile(app.outFile, "utf8"); + expect(content).toContain("Auto-generated by AppKit"); + expect(content).toContain('declare module "@databricks/appkit-ui/js"'); + expect(content).toContain("interface DatabaseRegistry"); + expect(content).toContain("user: {"); + expect(content).toContain("row: { id: number; email: string; };"); + expect(content).toContain("insert: { id?: number; email: string; };"); + expect(content).toContain("update: { id?: number; email?: string; };"); + expect(content).toContain("includes: {};"); + }); + + test("emits an empty module when the schema has no tables", async () => { + const app = await track(await mkApp("// empty\n")); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: async () => ({ default: { $tables: {} } }), + noCache: true, + }); + + const content = await fs.readFile(app.outFile, "utf8"); + expect(content).toContain("export {};"); + expect(content).not.toContain("declare module"); + }); + + test("throws when the module has no default export", async () => { + const app = await track(await mkApp("// fake\n")); + + await expect( + generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: async () => ({}) as never, + noCache: true, + }), + ).rejects.toThrow(/no default export/); + }); +}); + +describe("generateDatabaseTypes — cache", () => { + test("reuses cached output on matching source hash", async () => { + const app = await track(await mkApp("// v1\n")); + const loader = vi.fn(async () => ({ default: fakeSchema })); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + }); + const first = await fs.readFile(app.outFile, "utf8"); + + // Second run with identical source should hit the cache and skip the loader. + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + }); + const second = await fs.readFile(app.outFile, "utf8"); + + expect(loader).toHaveBeenCalledTimes(1); + expect(second).toBe(first); + }); + + test("invalidates when the source hash changes", async () => { + const app = await track(await mkApp("// v1\n")); + const loader = vi.fn(async () => ({ default: fakeSchema })); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + }); + + // Bump the schema source — cache should miss and the loader should run again. + await fs.writeFile( + path.join(app.projectRoot, SCHEMA_REL), + "// v2\n", + "utf8", + ); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + }); + + expect(loader).toHaveBeenCalledTimes(2); + }); + + test("noCache forces the loader to run even on matching hashes", async () => { + const app = await track(await mkApp("// v1\n")); + const loader = vi.fn(async () => ({ default: fakeSchema })); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + noCache: true, + }); + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + noCache: true, + }); + + expect(loader).toHaveBeenCalledTimes(2); + }); + + test("invalidates when a relative import target changes", async () => { + // Reproduces F41: split-schema imports were not folded into the cache key, + // so editing `./tables/user.ts` left the generator returning stale `.d.ts`. + const app = await track( + await mkApp( + `import { user } from "./tables/user";\nexport default user;\n`, + ), + ); + const tablesDir = path.join(app.projectRoot, "config/database/tables"); + const userTable = path.join(tablesDir, "user.ts"); + await fs.mkdir(tablesDir, { recursive: true }); + await fs.writeFile(userTable, "export const user = 'v1';\n", "utf8"); + + const loader = vi.fn(async () => ({ default: fakeSchema })); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + }); + + // Edit the imported file (not schema.ts itself). Cache must miss. + await fs.writeFile(userTable, "export const user = 'v2';\n", "utf8"); + + await generateDatabaseTypes({ + outFile: app.outFile, + projectRoot: app.projectRoot, + loadModule: loader, + }); + + expect(loader).toHaveBeenCalledTimes(2); + }); +}); diff --git a/packages/appkit/src/type-generator/database/tests/walk-schema.test.ts b/packages/appkit/src/type-generator/database/tests/walk-schema.test.ts new file mode 100644 index 000000000..d1ff23c65 --- /dev/null +++ b/packages/appkit/src/type-generator/database/tests/walk-schema.test.ts @@ -0,0 +1,209 @@ +import { describe, expect, test } from "vitest"; +import { + boolean, + defineSchema, + fk, + id, + integer, + text, + timestamp, +} from "../../../database/schema-builder"; +import { walkSchema } from "../walk-schema"; + +describe("walkSchema — shape", () => { + test("returns one entry per table with row/insert/update/filters/includes", () => { + const schema = defineSchema(({ table }) => ({ + user: table("user", { + id: id(), + email: text().notNull(), + active: boolean().default(true), + }), + })); + + const entries = walkSchema(schema); + + expect(entries).toHaveLength(1); + const user = entries[0]; + expect(user?.entity).toBe("user"); + expect(user?.row).toContain("id: number;"); + expect(user?.row).toContain("email: string;"); + expect(user?.row).toContain("active: boolean | null;"); + expect(user?.includes).toBe("{}"); + }); + + test("emits empty array for non-schema input", () => { + expect(walkSchema(undefined)).toEqual([]); + expect(walkSchema(null)).toEqual([]); + expect(walkSchema({})).toEqual([]); + expect(walkSchema({ $tables: "nope" })).toEqual([]); + }); +}); + +describe("walkSchema — row / insert / update", () => { + test("row marks nullable columns with `| null`", () => { + const schema = defineSchema(({ table }) => ({ + user: table("user", { + id: id(), + email: text().notNull(), + bio: text(), + }), + })); + + const user = walkSchema(schema)[0]; + expect(user?.row).toContain("bio: string | null;"); + expect(user?.row).toContain("email: string;"); // not nullable + }); + + test("insert marks defaulted + server-generated + nullable columns optional", () => { + const schema = defineSchema(({ table }) => ({ + user: table("user", { + id: id(), + email: text().notNull(), + bio: text(), + createdAt: timestamp().defaultNow(), + }), + })); + + const user = walkSchema(schema)[0]; + // Required (not nullable, no default, not server-generated): + expect(user?.insert).toContain("email: string;"); + // Server-generated: + expect(user?.insert).toContain("id?: number;"); + // Has default: + expect(user?.insert).toContain("createdAt?: string | null;"); + // Nullable: + expect(user?.insert).toContain("bio?: string | null;"); + }); + + test("update marks every column optional", () => { + const schema = defineSchema(({ table }) => ({ + user: table("user", { + id: id(), + email: text().notNull(), + }), + })); + + const user = walkSchema(schema)[0]; + expect(user?.update).toContain("id?: number;"); + expect(user?.update).toContain("email?: string;"); + }); +}); + +describe("walkSchema — filters", () => { + test("classifies common pg types into filter kinds", () => { + const schema = defineSchema(({ table }) => ({ + event: table("event", { + id: id(), + name: text().notNull(), + count: integer(), + isFinal: boolean(), + occurredAt: timestamp(), + }), + })); + + const event = walkSchema(schema)[0]; + expect(event?.filters).toContain('id: "number"'); + expect(event?.filters).toContain('name: "string"'); + expect(event?.filters).toContain('count: "number"'); + expect(event?.filters).toContain('isFinal: "boolean"'); + expect(event?.filters).toContain('occurredAt: "date"'); + }); +}); + +describe("walkSchema — includes: forward + reverse", () => { + test("single FK: forward on child + reverse on parent", () => { + const schema = defineSchema(({ table }) => { + const userCols = { + id: id(), + email: text().notNull(), + }; + const user = table("user", userCols); + const post = table("post", { + id: id(), + title: text().notNull(), + authorId: fk(userCols.id), + }); + return { user, post }; + }); + + const entries = walkSchema(schema); + const user = entries.find((e) => e.entity === "user"); + const post = entries.find((e) => e.entity === "post"); + + // Forward (many-to-one): post → user, keyed by target entity. + expect(post?.includes).toContain( + 'user: { row: DatabaseRegistry["user"]["row"] };', + ); + // Reverse (one-to-many): user ← post, keyed by source entity, array-shaped. + expect(user?.includes).toContain( + 'post: Array<{ row: DatabaseRegistry["post"]["row"] }>;', + ); + }); + + test("two FKs from the same source disambiguate by column name on both sides", () => { + const schema = defineSchema(({ table }) => { + const userCols = { + id: id(), + email: text().notNull(), + }; + const user = table("user", userCols); + const post = table("post", { + id: id(), + title: text().notNull(), + authorId: fk(userCols.id), + editorId: fk(userCols.id), + }); + return { user, post }; + }); + + const entries = walkSchema(schema); + const user = entries.find((e) => e.entity === "user"); + const post = entries.find((e) => e.entity === "post"); + + // Forward collision: post.includes exposes both FKs by column name. + expect(post?.includes).toContain( + 'authorId: { row: DatabaseRegistry["user"]["row"] };', + ); + expect(post?.includes).toContain( + 'editorId: { row: DatabaseRegistry["user"]["row"] };', + ); + expect(post?.includes).not.toContain("user: { row:"); + + // Reverse collision: user.includes also splits by column. + expect(user?.includes).toContain( + 'authorId: Array<{ row: DatabaseRegistry["post"]["row"] }>;', + ); + expect(user?.includes).toContain( + 'editorId: Array<{ row: DatabaseRegistry["post"]["row"] }>;', + ); + expect(user?.includes).not.toContain("post: Array<"); + }); + + test("distinct source tables do not collide", () => { + const schema = defineSchema(({ table }) => { + const userCols = { + id: id(), + }; + const user = table("user", userCols); + const post = table("post", { + id: id(), + authorId: fk(userCols.id), + }); + const comment = table("comment", { + id: id(), + authorId: fk(userCols.id), + }); + return { user, post, comment }; + }); + + const entries = walkSchema(schema); + const user = entries.find((e) => e.entity === "user"); + + expect(user?.includes).toContain( + 'post: Array<{ row: DatabaseRegistry["post"]["row"] }>;', + ); + expect(user?.includes).toContain( + 'comment: Array<{ row: DatabaseRegistry["comment"]["row"] }>;', + ); + }); +}); diff --git a/packages/appkit/src/type-generator/database/vite-plugin.ts b/packages/appkit/src/type-generator/database/vite-plugin.ts new file mode 100644 index 000000000..1d27373f3 --- /dev/null +++ b/packages/appkit/src/type-generator/database/vite-plugin.ts @@ -0,0 +1,109 @@ +import fs from "node:fs"; +import path from "node:path"; +import type { Plugin, ViteDevServer } from "vite"; +import { createLogger } from "../../logging/logger"; +import { + DATABASE_TYPES_FILE, + generateDatabaseTypes, + SCHEMA_REL, + type SchemaLoader, +} from "./generator"; + +const logger = createLogger("type-generator:database:vite-plugin"); + +export interface AppKitDatabaseTypesPluginOptions { + /** Output `.d.ts` path relative to project root. Defaults to `shared/appkit-types/database.d.ts`. */ + outFile?: string; +} + +/** + * Vite plugin — regenerates `shared/appkit-types/database.d.ts` whenever + * `config/database/schema.ts` changes during dev. In production (`vite build`) + * it runs once at `buildStart`. + * + * **Activation gate:** only when `config/database/schema.ts` exists, either at + * the Vite root or its parent. Apps without a database plugin pay nothing. + * + * **Dev path (decision #25):** while the dev server is running, the schema is + * loaded via `server.ssrLoadModule` — Vite evaluates it in-process, same Node + * runtime. No child spawn, no `tsx` cold start. Before a change triggers + * regeneration, the module cache is invalidated so the next load sees fresh + * source. + * + * **Production path:** `buildStart` runs before `configureServer`, so the + * loader falls through to the default dynamic `import()` — relying on the + * parent process's tsx loader for TS support. + */ +export function appKitDatabaseTypesPlugin( + options: AppKitDatabaseTypesPluginOptions = {}, +): Plugin { + let projectRoot = process.cwd(); + let outFile = path.resolve( + projectRoot, + options.outFile ?? DATABASE_TYPES_FILE, + ); + let schemaFile = path.resolve(projectRoot, SCHEMA_REL); + let viteServer: ViteDevServer | undefined; + + async function regenerate(): Promise { + try { + const loadModule: SchemaLoader | undefined = viteServer + ? (schemaPath) => + viteServer!.ssrLoadModule(schemaPath) as Promise<{ + default: unknown; + }> + : undefined; + + await generateDatabaseTypes({ + outFile, + projectRoot, + loadModule, + }); + } catch (error) { + if (process.env.NODE_ENV === "production") throw error; + logger.error("Database type generation failed: %O", error); + } + } + + return { + name: "appkit-database-types", + + apply() { + // Activation gate is intentionally filesystem-based — reading the schema + // would force a tsx load before Vite is ready. + const cwd = process.cwd(); + const probe = path.resolve(cwd, SCHEMA_REL); + const probeParent = path.resolve(cwd, "..", SCHEMA_REL); + return fs.existsSync(probe) || fs.existsSync(probeParent); + }, + + configResolved(config) { + // When Vite runs from client/ (cd client && vite build), the project + // root is the parent directory; when Vite runs from the app root the + // client/ is a subdir. Resolving from config.root handles both shapes. + projectRoot = path.resolve(config.root, ".."); + outFile = path.resolve( + projectRoot, + options.outFile ?? DATABASE_TYPES_FILE, + ); + schemaFile = path.resolve(projectRoot, SCHEMA_REL); + }, + + async buildStart() { + await regenerate(); + }, + + configureServer(server) { + viteServer = server; + server.watcher.add(schemaFile); + server.watcher.on("change", async (file) => { + if (path.resolve(file) !== schemaFile) return; + logger.info("schema.ts changed; regenerating database types"); + // Invalidate Vite's cache so ssrLoadModule re-evaluates fresh source. + const mod = server.moduleGraph.getModuleById(schemaFile); + if (mod) server.moduleGraph.invalidateModule(mod); + await regenerate(); + }); + }, + }; +} diff --git a/packages/appkit/src/type-generator/database/walk-schema.ts b/packages/appkit/src/type-generator/database/walk-schema.ts new file mode 100644 index 000000000..5548802c4 --- /dev/null +++ b/packages/appkit/src/type-generator/database/walk-schema.ts @@ -0,0 +1,249 @@ +import { adaptDrizzleTable } from "../../database/introspector/drizzle-adapter"; +import type { IntrospectedColumn } from "../../database/introspector/types"; +import type { Schema } from "../../database/schema-builder/types"; + +/** + * One registry entry — string fields are ready-to-splice TS type literals. + */ +export interface RegistryEntry { + /** JS entity key (the property name in `$tables`, e.g. `"activityLog"`). */ + entity: string; + /** Type literal for `row: ...`. */ + row: string; + /** Type literal for `insert: ...`. */ + insert: string; + /** Type literal for `update: ...`. */ + update: string; + /** Type literal for `filters: ...`. */ + filters: string; + /** Type literal for `includes: ...`. `"{}"` when no relations exist. */ + includes: string; +} + +/** FK edge `table → target` from the forward pass. */ +interface ForwardEdge { + fromColumn: string; + target: string; +} + +/** Incoming FK `target ← source` for one-to-many rendering. */ +interface ReverseEdge { + fromEntity: string; + fromColumn: string; +} + +/** + * Walk `Schema` → flat registry entries (pure, no I/O). + * + * Include inference: record forward + reverse FK edges per table, then render + * `includes` — duplicate FK pairs use column keys like PostgREST (`posts!author_id`). + */ +export function walkSchema(schema: unknown): RegistryEntry[] { + if (!schema || typeof schema !== "object") return []; + const s = schema as Schema; + const tables = s.$tables; + if (!tables || typeof tables !== "object") return []; + + // SQL table name → JS entity key (`Relation.toTable` is SQL, not the registry key). + const sqlNameToEntity = new Map(); + for (const [entity, table] of Object.entries(tables)) { + sqlNameToEntity.set(table.name, entity); + } + + const forwardByEntity = new Map(); + const reverseByEntity = new Map(); + + for (const [entity, table] of Object.entries(tables)) { + const forward: ForwardEdge[] = []; + for (const rel of table.$relations ?? []) { + const target = sqlNameToEntity.get(rel.toTable); + if (!target) continue; + forward.push({ fromColumn: rel.fromColumn, target }); + const rev = reverseByEntity.get(target) ?? []; + rev.push({ fromEntity: entity, fromColumn: rel.fromColumn }); + reverseByEntity.set(target, rev); + } + forwardByEntity.set(entity, forward); + } + + const entries: RegistryEntry[] = []; + for (const [entity, table] of Object.entries(tables)) { + // Strip `column.private()` before emit — avoids leaking credential-ish fields in `.d.ts`. + const columns = adaptDrizzleTable(table).columns.filter( + (c) => table.$columns[c.name]?.private !== true, + ); + entries.push({ + entity, + row: renderRow(columns), + insert: renderInsert(columns), + update: renderUpdate(columns), + filters: renderFilters(columns), + includes: renderIncludes( + forwardByEntity.get(entity) ?? [], + reverseByEntity.get(entity) ?? [], + ), + }); + } + + return entries; +} + +/** Render `{ col: TS | null; ... }` for all columns. */ +function renderRow(columns: IntrospectedColumn[]): string { + if (columns.length === 0) return "{}"; + const fields = columns.map( + (c) => + `${safeProp(c.name)}: ${withNull(pgTypeToTs(c.pgType), c.nullable)};`, + ); + return `{ ${fields.join(" ")} }`; +} + +/** Insert shape — optional when nullable, defaulted, or server-generated. */ +function renderInsert(columns: IntrospectedColumn[]): string { + if (columns.length === 0) return "{}"; + const fields = columns.map((c) => { + const optional = c.nullable || c.hasDefault || c.serverGenerated === true; + const q = optional ? "?" : ""; + return `${safeProp(c.name)}${q}: ${withNull(pgTypeToTs(c.pgType), c.nullable)};`; + }); + return `{ ${fields.join(" ")} }`; +} + +/** Render update shape — every column is optional. */ +function renderUpdate(columns: IntrospectedColumn[]): string { + if (columns.length === 0) return "{}"; + const fields = columns.map( + (c) => + `${safeProp(c.name)}?: ${withNull(pgTypeToTs(c.pgType), c.nullable)};`, + ); + return `{ ${fields.join(" ")} }`; +} + +/** Render the `filters` map used by the type generator to classify columns. */ +function renderFilters(columns: IntrospectedColumn[]): string { + if (columns.length === 0) return "{}"; + const fields = columns.map( + (c) => + `${safeProp(c.name)}: ${JSON.stringify(pgTypeToFilterKind(c.pgType))};`, + ); + return `{ ${fields.join(" ")} }`; +} + +/** + * `includes` literal — forward edges as `{ row }`, reverse as `Array<{ row }>`; + * multiple FKs to the same target key by column (PostgREST-style). + */ +function renderIncludes( + forward: ForwardEdge[], + reverse: ReverseEdge[], +): string { + const parts: string[] = []; + + const forwardByTarget = groupBy(forward, (f) => f.target); + for (const [target, edges] of forwardByTarget) { + if (edges.length === 1) { + parts.push( + `${safeProp(target)}: { row: DatabaseRegistry[${JSON.stringify(target)}]["row"] };`, + ); + } else { + for (const edge of edges) { + parts.push( + `${safeProp(edge.fromColumn)}: { row: DatabaseRegistry[${JSON.stringify(target)}]["row"] };`, + ); + } + } + } + + const reverseBySource = groupBy(reverse, (r) => r.fromEntity); + for (const [source, edges] of reverseBySource) { + if (edges.length === 1) { + parts.push( + `${safeProp(source)}: Array<{ row: DatabaseRegistry[${JSON.stringify(source)}]["row"] }>;`, + ); + } else { + for (const edge of edges) { + parts.push( + `${safeProp(edge.fromColumn)}: Array<{ row: DatabaseRegistry[${JSON.stringify(source)}]["row"] }>;`, + ); + } + } + } + + if (parts.length === 0) return "{}"; + return `{ ${parts.join(" ")} }`; +} + +/** Map pg types to TS — timestamps stay `string` (JSON always delivers text). */ +function pgTypeToTs(pgType: string): string { + switch (pgType) { + case "int2": + case "int4": + case "int8": + case "numeric": + case "float4": + case "float8": + return "number"; + case "bool": + return "boolean"; + case "jsonb": + case "json": + return "unknown"; + case "text": + case "varchar": + case "char": + case "uuid": + case "timestamp": + case "timestamptz": + case "date": + case "time": + case "timetz": + return "string"; + default: + return "unknown"; + } +} + +/** Coarse filter kind for stable generator output when new pg types appear. */ +function pgTypeToFilterKind( + pgType: string, +): "string" | "number" | "boolean" | "date" { + switch (pgType) { + case "int2": + case "int4": + case "int8": + case "numeric": + case "float4": + case "float8": + return "number"; + case "bool": + return "boolean"; + case "timestamp": + case "timestamptz": + case "date": + case "time": + case "timetz": + return "date"; + default: + return "string"; + } +} + +function withNull(ts: string, nullable: boolean): string { + return nullable ? `${ts} | null` : ts; +} + +/** Quote object keys only when they wouldn't be a valid JS identifier bare. */ +function safeProp(name: string): string { + return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(name) ? name : JSON.stringify(name); +} + +function groupBy(list: T[], key: (item: T) => K): Map { + const out = new Map(); + for (const item of list) { + const k = key(item); + const bucket = out.get(k) ?? []; + bucket.push(item); + out.set(k, bucket); + } + return out; +} diff --git a/packages/appkit/src/type-generator/index.ts b/packages/appkit/src/type-generator/index.ts index c9a528fe7..88a49b12d 100644 --- a/packages/appkit/src/type-generator/index.ts +++ b/packages/appkit/src/type-generator/index.ts @@ -111,3 +111,5 @@ export const TYPES_DIR = "appkit-types"; export const ANALYTICS_TYPES_FILE = "analytics.d.ts"; /** Default filename for serving endpoint type declarations. */ export const SERVING_TYPES_FILE = "serving.d.ts"; + +export * from "./database"; diff --git a/template/appkit.plugins.json b/template/appkit.plugins.json index be0b61f8b..cfdebfe31 100644 --- a/template/appkit.plugins.json +++ b/template/appkit.plugins.json @@ -29,7 +29,7 @@ "database": { "name": "database", "displayName": "Database", - "description": "Lakebase Postgres pool + schema declaration via defineSchema. CRUD/OBO/RLS surface ships incrementally in subsequent stack layers; this layer provides the pool, schema convention loader, and column metadata.", + "description": "Application database with schema-driven CRUD, type generation, OBO, and RLS", "package": "@databricks/appkit", "resources": { "required": [ @@ -84,7 +84,7 @@ ], "optional": [] }, - "onSetupMessage": "Database plugin installed. Configure your schema in config/database/schema.ts via defineSchema(). The plugin currently exposes pool access (appkit.database.getPool()); CRUD, OBO, and RLS surfaces ship in subsequent stack layers.", + "onSetupMessage": "Database plugin installed. Next: npx appkit db init\n - For a new database: define entities in config/database/schema.ts (and optional demo rows in config/database/seed.sql) before running init.\n - For an existing Lakebase: just run init; it will introspect the schema for you.\nThe init command picks the Databricks profile and Lakebase project, creates a per-user dev branch, writes .env, and runs the right setup or introspection.", "stability": "beta" }, "files": {