From 48272146038de0318ce119326c8f36cc274bac03 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 16:06:28 -0700 Subject: [PATCH 001/108] Add Effect lint skills and diagnostics --- .../wrdn-effect-atom-reactivity-keys/SKILL.md | 25 ++ .../skills/wrdn-effect-promise-exit/SKILL.md | 115 ++++++ .../wrdn-effect-schema-boundaries/SKILL.md | 30 ++ .../SKILL.md | 107 ++++++ .../skills/wrdn-effect-typed-errors/SKILL.md | 329 ++++++++++++++++++ .../wrdn-effect-value-inferred-types/SKILL.md | 95 +++++ .../skills/wrdn-effect-vitest-tests/SKILL.md | 29 ++ .../skills/wrdn-package-boundaries/SKILL.md | 26 ++ .../wrdn-typescript-type-safety/SKILL.md | 14 + .oxlintrc.jsonc | 12 + scripts/oxlint-plugin-executor.js | 18 + .../rules/no-conditional-tests.js | 3 +- .../no-cross-package-relative-imports.js | 2 +- .../rules/no-double-cast.js | 2 +- .../rules/no-effect-internal-tags.js | 2 +- .../rules/no-error-constructor.js | 40 +++ .../rules/no-inline-object-type-assertion.js | 2 +- .../rules/no-instanceof-error.js | 22 ++ .../rules/no-instanceof-tagged-error.js | 2 +- .../rules/no-manual-tag-check.js | 14 +- .../rules/no-promise-catch.js | 30 ++ .../rules/no-promise-client-surface.js | 2 +- .../rules/no-promise-reject.js | 74 ++++ .../rules/no-raw-error-throw.js | 2 +- .../rules/no-redundant-error-factory.js | 54 ++- .../rules/no-try-catch-or-throw.js | 23 ++ .../rules/no-ts-nocheck.js | 2 +- .../rules/no-unknown-error-message.js | 49 +++ .../rules/no-unknown-shape-probing.js | 2 +- .../rules/no-vitest-import.js | 2 +- .../rules/prefer-schema-inferred-types.js | 66 ++++ .../prefer-value-inferred-extension-types.js | 81 +++++ .../rules/prefer-yield-tagged-error.js | 40 +++ .../rules/require-reactivity-keys.js | 2 +- 34 files changed, 1297 insertions(+), 21 deletions(-) create mode 100644 .agents/skills/wrdn-effect-atom-reactivity-keys/SKILL.md create mode 100644 .agents/skills/wrdn-effect-promise-exit/SKILL.md create mode 100644 .agents/skills/wrdn-effect-schema-boundaries/SKILL.md create mode 100644 .agents/skills/wrdn-effect-schema-inferred-types/SKILL.md create mode 100644 .agents/skills/wrdn-effect-typed-errors/SKILL.md create mode 100644 .agents/skills/wrdn-effect-value-inferred-types/SKILL.md create mode 100644 .agents/skills/wrdn-effect-vitest-tests/SKILL.md create mode 100644 .agents/skills/wrdn-package-boundaries/SKILL.md create mode 100644 .agents/skills/wrdn-typescript-type-safety/SKILL.md create mode 100644 scripts/oxlint-plugin-executor/rules/no-error-constructor.js create mode 100644 scripts/oxlint-plugin-executor/rules/no-instanceof-error.js create mode 100644 scripts/oxlint-plugin-executor/rules/no-promise-catch.js create mode 100644 scripts/oxlint-plugin-executor/rules/no-promise-reject.js create mode 100644 scripts/oxlint-plugin-executor/rules/no-try-catch-or-throw.js create mode 100644 scripts/oxlint-plugin-executor/rules/no-unknown-error-message.js create mode 100644 scripts/oxlint-plugin-executor/rules/prefer-schema-inferred-types.js create mode 100644 scripts/oxlint-plugin-executor/rules/prefer-value-inferred-extension-types.js create mode 100644 scripts/oxlint-plugin-executor/rules/prefer-yield-tagged-error.js diff --git a/.agents/skills/wrdn-effect-atom-reactivity-keys/SKILL.md b/.agents/skills/wrdn-effect-atom-reactivity-keys/SKILL.md new file mode 100644 index 000000000..d480f3652 --- /dev/null +++ b/.agents/skills/wrdn-effect-atom-reactivity-keys/SKILL.md @@ -0,0 +1,25 @@ +--- +name: wrdn-effect-atom-reactivity-keys +description: Add reactivityKeys to effect-atom write mutation calls. Use when lint flags a useAtomSet mutation call that mutates data without invalidation keys. +allowed-tools: Read Grep Glob Bash +--- + +Effect-atom write mutations must say which reads they invalidate. + +## Fix Shape + +- Find the `useAtomSet(...)` write mutation call. +- Add `reactivityKeys` to the mutation payload at the call site. +- Use the narrowest keys that cover the rows/lists affected by the write. +- Keep read-only probe/preview OAuth flows out of this pattern. +- If the mutation should update UI immediately, check whether `wrdn-effect-atom-optimistic` also applies. + +## Good + +```ts +await updateSource({ + params: { scopeId, sourceId }, + payload, + reactivityKeys: [["sources", scopeId]], +}); +``` diff --git a/.agents/skills/wrdn-effect-promise-exit/SKILL.md b/.agents/skills/wrdn-effect-promise-exit/SKILL.md new file mode 100644 index 000000000..a6d7b864a --- /dev/null +++ b/.agents/skills/wrdn-effect-promise-exit/SKILL.md @@ -0,0 +1,115 @@ +--- +name: wrdn-effect-promise-exit +description: Replace React/effect-atom mutation handlers that use promise-mode plus try/catch with promiseExit and explicit Exit handling. Use when lint or review flags try/catch around useAtomSet mutation calls, especially UI handlers that set error/busy state after a failed mutation. +allowed-tools: Read Grep Glob Bash +--- + +You fix one pattern: a React handler awaits an effect-atom mutation in `mode: "promise"` and catches failures with `try/catch`. + +The preferred UI boundary is `mode: "promiseExit"` plus `Exit.isFailure`. This keeps mutation failures as values, matches Effect's error model, and prevents optimistic mutation cleanup from depending on thrown exceptions. + +## Trace before changing + +1. **Find the mutation setter.** Look for `const doX = useAtomSet(, { mode: "promise" })`. +2. **Confirm it is an effect-atom mutation boundary.** The setter should come from `@effect/atom-react` and a mutation atom from `./atoms`, `../api/atoms`, or plugin React atoms. +3. **Find thrown-control handling.** The same handler has `try { await doX(...) } catch (e) { ... }`, usually setting error text, resetting `adding`/`saving`, or showing a toast. +4. **Check for non-mutation async work in the same block.** If the block also awaits follow-up mutations, convert those to `promiseExit` too or keep a narrow boundary only around truly non-effect APIs. +5. **Do not rewrite unrelated local async code.** Probe requests, OAuth popup helpers, `fetch`, and browser APIs may need a different skill unless the lint finding specifically points at the mutation call. + +## Fix shape + +- Change the setter to `{ mode: "promiseExit" }`. +- Import `* as Exit from "effect/Exit"` if missing. +- Import `* as Option from "effect/Option"` only when extracting an optional error. +- Replace `try/catch` around the mutation with: + - `const exit = await doX(args);` + - `if (Exit.isFailure(exit)) { ...; return; }` + - success work after the failure branch. +- Use `Exit.findErrorOption(exit)` when preserving an existing error message or typed error branch. +- Keep existing typed error handling when present, e.g. `SecretInUseError`, `ConnectionInUseError`. + +## Bad + +```tsx +const doAdd = useAtomSet(addGraphqlSource, { mode: "promise" }); + +const handleAdd = async () => { + setAdding(true); + setAddError(null); + try { + await doAdd({ + params: { scopeId }, + payload, + reactivityKeys: sourceWriteKeys, + }); + props.onComplete(); + } catch (e) { + setAddError(e instanceof Error ? e.message : "Failed to add source"); + setAdding(false); + } +}; +``` + +## Good + +```tsx +import * as Exit from "effect/Exit"; +import * as Option from "effect/Option"; + +const doAdd = useAtomSet(addGraphqlSource, { mode: "promiseExit" }); + +const handleAdd = async () => { + setAdding(true); + setAddError(null); + const exit = await doAdd({ + params: { scopeId }, + payload, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { + const error = Exit.findErrorOption(exit); + setAddError( + Option.isSome(error) && error.value instanceof Error + ? error.value.message + : "Failed to add source", + ); + setAdding(false); + return; + } + props.onComplete(); +}; +``` + +## Follow-up mutation chains + +If success work depends on the mutation result, read it after the failure branch: + +```tsx +const exit = await doAdd(args); +if (Exit.isFailure(exit)) { + setAdding(false); + return; +} + +const sourceId = exit.value.namespace; +``` + +If a follow-up effect-atom mutation can fail and the UI treats that as add failure, make that setter `promiseExit` too and branch the same way. Do not put the follow-up mutation in `try/catch` just because the first mutation now returns `Exit`. + +## What not to report + +- `try/catch` around non-effect APIs such as `new URL`, `JSON.parse`, raw `fetch`, or browser popup code. Those may be real lint findings, but they need a different remediation skill. +- `useAtomSet(..., { mode: "promise" })` with no local failure handling and no lint finding. Some call sites intentionally let callers decide the boundary. +- Tests or SDK/server Effect code. This skill is for React/effect-atom UI mutation handlers. +- Manual optimistic placeholder cleanup. Use `wrdn-effect-atom-optimistic` for that; if both patterns appear together, fix optimistic plumbing first, then use `promiseExit` for the remaining mutation boundary. + +## Output requirements + +When reviewing, report: + +- **File and line** of the `useAtomSet(..., { mode: "promise" })` or `try/catch`. +- **Mutation** being called. +- **Why** it should return `Exit` at this UI boundary. +- **Fix**: the exact setter mode and the failure branch to add. + +When editing, keep changes local to the handler and imports unless a follow-up mutation in the same success path must also become `promiseExit`. diff --git a/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md b/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md new file mode 100644 index 000000000..1e192deec --- /dev/null +++ b/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md @@ -0,0 +1,30 @@ +--- +name: wrdn-effect-schema-boundaries +description: Normalize unknown or loosely typed data at boundaries with Effect Schema, named guards, or typed adapters. Use when lint flags double casts, inline object assertions, unknown shape probing, or ad hoc property checks on unknown values. +allowed-tools: Read Grep Glob Bash +--- + +You fix one pattern: domain code is asserting or probing an unknown shape instead of parsing it once at the boundary. + +## Fix Shape + +- Prefer `Schema.decodeUnknownEffect(MySchema)(value)` for untrusted input. +- Keep domain code typed after the decode; do not keep `unknown` and probe it repeatedly. +- Replace `as unknown as X`, `as Record`, inline object assertions, `"field" in value`, and `Reflect.get` with a schema, typed adapter, or named guard. +- A named guard is acceptable only when parsing is not the right abstraction and the guard has a precise return type. + +## Good + +```ts +const ParsedConfig = Schema.Struct({ + endpoint: Schema.String, +}); + +const config = yield * Schema.decodeUnknownEffect(ParsedConfig)(raw); +``` + +## Bad + +```ts +const config = raw as unknown as { endpoint: string }; +``` diff --git a/.agents/skills/wrdn-effect-schema-inferred-types/SKILL.md b/.agents/skills/wrdn-effect-schema-inferred-types/SKILL.md new file mode 100644 index 000000000..5dbbbaf73 --- /dev/null +++ b/.agents/skills/wrdn-effect-schema-inferred-types/SKILL.md @@ -0,0 +1,107 @@ +--- +name: wrdn-effect-schema-inferred-types +description: Replace duplicated TypeScript shape declarations next to Effect Schema definitions with schema-derived types. Use when lint or review flags an interface/type alias that repeats fields already described by a nearby Schema.Struct, Schema.Union, Schema.TaggedStruct, or other Effect Schema model. +allowed-tools: Read Grep Glob Bash +--- + +You fix one pattern: a runtime `Schema` and a manual TypeScript type describe the same shape. + +The preferred boundary is schema-first. Define the schema once, export `type X = typeof XSchema.Type` or `type X = Schema.Schema.Type`, and make domain code consume the inferred type. This prevents drift between parsing and static types. + +## Trace before changing + +1. **Find the runtime schema.** Look for `Schema.Struct`, `Schema.Union`, `Schema.TaggedStruct`, `Schema.Record`, `Schema.Array`, or `Schema.decodeTo`. +2. **Find the duplicate static shape.** A nearby `interface X` or `type X = { ... }` repeats the same fields, nullability, optionality, or literals. +3. **Check export consumers.** If callers import the type, keep the exported type name stable and change only its definition. +4. **Confirm the schema is the source of truth.** If the manual type is wider/narrower than runtime parsing, decide whether the schema or consumers are wrong before replacing it. +5. **Handle recursion narrowly.** Recursive schemas may need one private recursive helper type to annotate `Schema.suspend`; keep exported domain types inferred from the schema. + +## Fix shape + +- Move the schema before the exported type alias when needed. +- Replace duplicated exported interfaces with aliases derived from the schema: + +```ts +export const SourceSchema = Schema.Struct({ + id: SourceId, + name: Schema.String, + enabled: Schema.Boolean, +}); + +export type Source = typeof SourceSchema.Type; +``` + +- Use `Schema.Schema.Type` when it reads better for non-exported or generic schemas: + +```ts +type IntrospectionResult = Schema.Schema.Type; +``` + +- If using `Schema.decodeTo`, infer the domain type from the decoded/domain schema, not from the raw transport schema. +- Do not keep a manual interface solely for documentation. Add schema annotations or comments only when they clarify behavior the schema cannot express. + +## Bad + +```ts +export interface StoredSource { + readonly id: string; + readonly url: string; + readonly headers: readonly Header[]; +} + +export const StoredSourceSchema = Schema.Struct({ + id: Schema.String, + url: Schema.String, + headers: Schema.Array(HeaderSchema), +}); +``` + +## Good + +```ts +export const StoredSourceSchema = Schema.Struct({ + id: Schema.String, + url: Schema.String, + headers: Schema.Array(HeaderSchema), +}); + +export type StoredSource = typeof StoredSourceSchema.Type; +``` + +## Recursive schemas + +Use a private helper only where TypeScript needs an annotation for self-reference: + +```ts +interface TypeRefRecursive { + readonly kind: string; + readonly ofType: TypeRefRecursive | null; +} + +const TypeRefSchema: Schema.Codec = Schema.Struct({ + kind: Schema.String, + ofType: Schema.NullOr(Schema.suspend(() => TypeRefSchema)), +}); + +export type TypeRef = typeof TypeRefSchema.Type; +``` + +The exported domain type is still schema-derived. The private helper exists only to satisfy the recursive schema definition. + +## What not to report + +- Domain types that intentionally do not have a runtime schema. +- Input builder types where the schema parses a different transport representation. +- Branded IDs or opaque aliases that are used by schemas but are not themselves duplicate object shapes. +- Private recursive helper types used only to type `Schema.suspend`, as long as exported consumer-facing types are inferred. + +## Output requirements + +When reviewing, report: + +- **File and line** of the duplicated manual type. +- **Schema** that already owns the shape. +- **Why** the manual type can drift. +- **Fix**: the exact inferred alias to use. + +When editing, keep exported type names stable unless every caller is updated in the same change. diff --git a/.agents/skills/wrdn-effect-typed-errors/SKILL.md b/.agents/skills/wrdn-effect-typed-errors/SKILL.md new file mode 100644 index 000000000..15290171f --- /dev/null +++ b/.agents/skills/wrdn-effect-typed-errors/SKILL.md @@ -0,0 +1,329 @@ +--- +name: wrdn-effect-typed-errors +description: Fix lint findings that use untyped JavaScript error handling instead of Effect typed failures. Use when lint flags new Error, throw, try/catch, Promise.catch, Promise.reject, instanceof Error, unknown error message/stringification, or redundant helpers that only construct tagged errors. +allowed-tools: Read Grep Glob Bash +--- + +You fix one family of patterns: untyped JavaScript error handling in Effect code. + +The preferred boundary is typed `Schema.TaggedError` / `Data.TaggedError` values in the Effect error channel. Construct the tagged error directly at the failure site unless a helper performs real classification or normalization. + +## Trace before changing + +1. **Identify the boundary.** Is this Effect domain code, React UI code, a third-party callback, or plain test/tooling code? +2. **Find the existing domain errors.** Check nearby `errors.ts`, `Schema.TaggedError`, `Data.TaggedError`, and API `.addError(...)` declarations before adding a new class. +3. **Decide whether a new error is needed.** Add a new tagged error only if callers have a distinct recovery path, HTTP status, UI affordance, retry policy, or telemetry classification. +4. **Preserve failure semantics.** If the old code failed, the new code should fail in the Effect error channel. Do not replace thrown failures with fallback values like `false`, `null`, `undefined`, `[]`, or `"unknown"` unless the existing contract already treats that condition as non-fatal. +5. **Preserve the typed channel.** Do not convert typed failures into `Error`, thrown exceptions, `String(error)`, or `.message` reads from unknown values. +6. **Recognize real boundaries.** Runtime workers, Vite/CLI tooling, callback APIs, and third-party interfaces may have to throw, catch, or reject at the boundary. Do not contort those files into fake Effect shapes. Keep the boundary idiom when it is contained and immediately wrapped into an Effect error channel, stable IPC envelope, or test/tooling result. +7. **Do not hide construction behind trivial helpers.** Inline `new DomainError(...)` unless the helper branches on input or maps an external error format into a domain error. + +## Preserve behavior first + +The lint rule is about **where the failure lives**, not whether the operation should still fail. + +Bad fix: this removes the lint finding by silently changing invalid input into a non-match. + +```ts +case "in": + if (!Array.isArray(value)) return false; + return value.some((v) => cmp(lhs, v)); +``` + +Good fix: keep the invalid input as a failure, but make it typed. + +```ts +case "in": + if (!Array.isArray(value)) { + return Effect.fail( + new StorageError({ message: "Value must be an array", cause: clause }), + ); + } + return Effect.succeed(value.some((v) => cmp(lhs, v))); +``` + +When the containing helper was synchronous, make the helper return `Effect.Effect` and thread that through callers. Do not collapse the error into a success value to avoid changing call sites. + +## Boundary exceptions + +The lint rule is not a mandate to make every file Effect-shaped. It is acceptable to keep `try/catch`, `throw`, `new Error`, `.catch`, or `String(error)` at a true adapter boundary when all of these are true: + +- the surrounding API is inherently throwing, callback-based, Promise-based, process/IPC-based, or plain JS tooling +- the untyped behavior is contained to the boundary function or module +- control is immediately translated into a typed Effect failure, stable IPC payload, stable test assertion, or deliberately best-effort cleanup +- the suppression is narrow and explains the boundary + +Good boundary suppression: + +```ts +// oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: JSON.parse feeds stable IPC failure envelope +try { + const message = JSON.parse(line); + handleHostMessage(message); +} catch (error) { + writeIpcMessage({ type: "failed", error: formatBoundaryError(error) }); +} +``` + +Bad boundary fix: do not replace natural boundary code with fake thenables, fake error objects, promise chains that emulate `try/catch`, or broad helper machinery solely to make lint pass. + +```ts +return makeRejectedThenable(makeErrorLike("Tool path missing")); +``` + +For Effect domain code, fix the code. For boundary code, either wrap once with `Effect.try` / `Effect.tryPromise` at the entry point or use a narrow suppression with a reason. + +## Fix shapes + +### Throw / new Error + +Bad: + +```ts +throw new Error("Missing source"); +``` + +Good in `Effect.gen`: + +```ts +return yield* new SourceNotFoundError({ sourceId }); +``` + +Good in combinators: + +```ts +Effect.fail(new SourceNotFoundError({ sourceId })); +``` + +If a third-party interface requires throwing, keep the throw at the adapter edge only and convert back into a typed failure as soon as control returns to Effect. Prefer a narrow `oxlint-disable-next-line` with a `boundary:` reason over code contortions. + +### Effect.fail inside generators + +Prefer yielding the error directly in generator code: + +```ts +return yield* new SourceNotFoundError({ sourceId }); +``` + +Do not write: + +```ts +return yield* Effect.fail(new SourceNotFoundError({ sourceId })); +``` + +Use `Effect.fail(...)` in non-generator combinator code: + +```ts +Effect.flatMap( + source, + Option.match({ + onNone: () => Effect.fail(new SourceNotFoundError({ sourceId })), + onSome: Effect.succeed, + }), +); +``` + +### Promise.catch / Promise.reject + +Bad: + +```ts +await client.close().catch(() => {}); +return Promise.reject(new Error("failed")); +``` + +Good: + +```ts +Effect.tryPromise({ + try: () => client.close(), + catch: (cause) => new ClientCloseError({ cause }), +}); +``` + +If the failure is intentionally ignored: + +```ts +Effect.ignore( + Effect.tryPromise({ + try: () => client.close(), + catch: (cause) => new ClientCloseError({ cause }), + }), +); +``` + +### try/catch + +Bad: + +```ts +try { + return JSON.parse(text); +} catch (cause) { + return new ParseError({ message: String(cause) }); +} +``` + +Good for schema-backed input: + +```ts +Schema.decodeUnknownEffect(Schema.fromJsonString(InputSchema))(text).pipe( + Effect.mapError(() => new ParseError({ message: "Failed to parse input" })), +); +``` + +Good for non-schema throwing APIs: + +```ts +Effect.try({ + try: () => new URL(value), + catch: (cause) => new UrlParseError({ value, cause }), +}); +``` + +### Unknown error message / instanceof Error + +Bad: + +```ts +err instanceof Error ? err.message : String(err); +``` + +Also bad: destructuring `message` only hides the same unknown-state problem from a shallow property-access lint. + +```ts +const { message } = err; +return message; +``` + +Prefer one of: + +```ts +Effect.mapError((err) => new DomainError({ cause: err })); +``` + +```ts +Effect.catchTag("KnownError", (err) => Effect.fail(new DomainError({ message: err.message }))); +``` + +Only read `.message` from a typed error union when that field is explicitly part of the user-facing contract. Most boundary errors should instead use a stable product message and keep the original value in a separate `cause`, trace, log, or telemetry channel. Do not inspect unknown thrown values for domain behavior or customer copy. + +If the lint rule overfires inside a branch that has already narrowed to a specific typed error, keep the direct typed read and use a narrow suppression with a reason. Do not rewrite to destructuring just to avoid the lint selector. + +Bad: leaks internal provider/native details to users. + +```ts +Effect.tryPromise({ + try: () => client.call(), + catch: (cause) => + new SourceError({ + message: cause instanceof Error ? cause.message : String(cause), + }), +}); +``` + +Good: user-facing message is stable; internal detail goes into `cause` only if the error type has an internal channel. + +```ts +Effect.tryPromise({ + try: () => client.call(), + catch: (cause) => + new SourceError({ + message: "Failed to connect to source", + cause, + }), +}); +``` + +If the error schema is serialized to customers and only has `message`, do not put internal details there. Prefer adding a non-serialized/internal `cause` field or logging/telemetry over suppressing the lint rule. + +### Manual tags and broad error laundering + +Bad: manually probing `_tag` to recover from typed Effect failures. + +```ts +Effect.mapError((err) => + "_tag" in err && err._tag === "SecretOwnedByConnectionError" + ? new SourceError({ message: "Failed to resolve secret" }) + : err, +); +``` + +Good: catch the one typed case you intentionally translate. + +```ts +effect.pipe( + Effect.catchTag("SecretOwnedByConnectionError", () => + Effect.fail(new SourceError({ message: "Failed to resolve secret" })), + ), +); +``` + +Do not wrap a typed error union into one local error only to satisfy a narrower helper signature. Widen the helper/cache/invocation error channel when callers can still use the original typed failure. Wrap only when the new error adds product meaning, such as turning a connection-owned secret into a source configuration problem. + +For Effect data types, use public helpers instead of `_tag` checks: + +```ts +if (Option.isNone(parsed)) return null; +if (Exit.isFailure(exit)) return ... +``` + +### Redundant error helpers + +Bad: + +```ts +const connectionError = (message: string) => + new McpConnectionError({ transport: "remote", message }); + +return yield* connectionError("Endpoint URL is required"); +``` + +Good: + +```ts +return yield* new McpConnectionError({ + transport: "remote", + message: "Endpoint URL is required", +}); +``` + +Helpers are allowed only when they do real work, such as: + +- choosing between different tagged errors +- decoding/parsing an external error shape +- preserving protocol-specific fields +- normalizing third-party SDK failures into one domain error + +## New error or existing error? + +Reuse an existing tagged error when only the message changes. + +Create a new tagged error when a caller can reasonably branch differently: + +- different HTTP status +- retry vs no retry +- auth/sign-in affordance +- not-found vs conflict vs validation +- user-actionable vs internal failure +- different telemetry grouping that should not depend on message text + +Do not create one tagged error per sentence of prose. + +## What not to report + +- Test assertions that intentionally construct errors as fixture values. +- Runtime adapter edges that must satisfy a third-party throwing API, IPC contract, process worker contract, or tooling contract, as long as the untyped behavior is contained and converted to typed Effect failure or a stable boundary envelope. +- Real normalization helpers like `toOAuth2Error(cause)` that inspect protocol fields and preserve structured semantics. +- React/effect-atom mutation handlers using `try/catch`; use `wrdn-effect-promise-exit` for that UI-specific boundary. + +## Output requirements + +When reviewing, report: + +- **File and line** of the untyped error pattern. +- **Rule** being violated. +- **Existing domain error** to use, or the new tagged error that should exist. +- **Fix** in the relevant shape: direct `yield* new ErrorType(...)`, `Effect.tryPromise`, schema decode, or direct constructor inline. + +When editing, keep the error type precise and avoid broad message parsing. diff --git a/.agents/skills/wrdn-effect-value-inferred-types/SKILL.md b/.agents/skills/wrdn-effect-value-inferred-types/SKILL.md new file mode 100644 index 000000000..11a7ab443 --- /dev/null +++ b/.agents/skills/wrdn-effect-value-inferred-types/SKILL.md @@ -0,0 +1,95 @@ +--- +name: wrdn-effect-value-inferred-types +description: Replace duplicated object API types with types inferred from the runtime value or factory that owns the shape. Use when lint or review flags an interface/type alias that mirrors a returned object such as a plugin extension, client surface, route map, or handler table. +allowed-tools: Read Grep Glob Bash +--- + +You fix one pattern: a TypeScript object type manually mirrors a runtime object that already owns the shape. + +Prefer value-first APIs. Build the object in a named factory, then export `type X = ReturnType`. Consumers keep importing the stable type name, but the type cannot drift from the implementation. + +## Trace before changing + +1. **Find the source value.** Look for an object returned from a named factory, `extension: (...) => ({ ... })`, a client object, route map, or handler table. +2. **Find the duplicate type.** A nearby `interface X` or `type X = { ... }` repeats the object methods/properties. +3. **Check whether the value is the source of truth.** If the interface is a contract with multiple implementations, keep the interface. +4. **Preserve the exported type name.** Replace its definition with `ReturnType` and update callers only if needed. +5. **Use `satisfies` only at boundaries.** Do not make the implementation satisfy a duplicate shape that could drift. + +## Fix shape + +```ts +const makePluginExtension = (ctx: PluginCtx) => { + const addSource = ... + const removeSource = ... + + return { + addSource, + removeSource, + }; +}; + +export type PluginExtension = ReturnType; +``` + +For factories that need options: + +```ts +const makePluginExtension = + (options: PluginOptions) => + (ctx: PluginCtx) => ({ + addSource: ..., + }); + +export type PluginExtension = ReturnType>; +``` + +## Bad + +```ts +export interface McpPluginExtension { + readonly addSource: (config: McpSourceConfig) => Effect.Effect; + readonly removeSource: (namespace: string, scope: string) => Effect.Effect; +} + +extension: (ctx) => { + return { + addSource, + removeSource, + } satisfies McpPluginExtension; +}; +``` + +## Good + +```ts +const makeMcpPluginExtension = (ctx: PluginCtx) => { + return { + addSource, + removeSource, + }; +}; + +export type McpPluginExtension = ReturnType; + +extension: makeMcpPluginExtension; +``` + +## What not to report + +- Service/dependency interfaces with multiple implementations. +- Public config input types that are intentionally a stable authored API. +- Branded IDs, discriminated unions, or small aliases that do not mirror one object value. +- Test fakes typed against an existing exported contract. +- Schema-owned data shapes; use `wrdn-effect-schema-inferred-types` for those. + +## Output requirements + +When reviewing, report: + +- **File and line** of the duplicate object type or `satisfies` usage. +- **Value/factory** that owns the shape. +- **Why** the manual type can drift. +- **Fix**: the exact `ReturnType` alias to introduce. + +When editing, name the factory after the exported type, e.g. `makeMcpPluginExtension` for `McpPluginExtension`. diff --git a/.agents/skills/wrdn-effect-vitest-tests/SKILL.md b/.agents/skills/wrdn-effect-vitest-tests/SKILL.md new file mode 100644 index 000000000..bceb412af --- /dev/null +++ b/.agents/skills/wrdn-effect-vitest-tests/SKILL.md @@ -0,0 +1,29 @@ +--- +name: wrdn-effect-vitest-tests +description: Keep tests deterministic and Effect-aware. Use when lint flags direct vitest imports or conditional assertions inside tests. +allowed-tools: Read Grep Glob Bash +--- + +Use `@effect/vitest` for tests in this repo. + +## Fix Shape + +- Import `describe`, `it`, `expect`, and helpers from `@effect/vitest`. +- Import utility helpers from `@effect/vitest/utils` when needed. +- Do not import from raw `vitest` except in config or tooling files. +- Do not put `expect(...)` behind `if`, ternary, logical, or switch branches. +- Split conditional behavior into separate tests, or assert the branch condition and expected value explicitly. + +## Bad + +```ts +if (result.ok) { + expect(result.value).toBe("x"); +} +``` + +## Good + +```ts +expect(result).toEqual({ ok: true, value: "x" }); +``` diff --git a/.agents/skills/wrdn-package-boundaries/SKILL.md b/.agents/skills/wrdn-package-boundaries/SKILL.md new file mode 100644 index 000000000..34dfdc92f --- /dev/null +++ b/.agents/skills/wrdn-package-boundaries/SKILL.md @@ -0,0 +1,26 @@ +--- +name: wrdn-package-boundaries +description: Preserve workspace package boundaries. Use when lint flags relative imports that cross package roots. +allowed-tools: Read Grep Glob Bash +--- + +Workspace packages should import each other through package exports, not relative paths. + +## Fix Shape + +- Replace cross-package relative imports with the target package name. +- If the needed module is not exported, add the smallest package export that matches the package's public surface. +- Keep relative imports only within the same package root. +- Do not reach into another package's private source tree from an app or package. + +## Good + +```ts +import { createExecutor } from "@executor-js/sdk"; +``` + +## Bad + +```ts +import { createExecutor } from "../../../core/sdk/src"; +``` diff --git a/.agents/skills/wrdn-typescript-type-safety/SKILL.md b/.agents/skills/wrdn-typescript-type-safety/SKILL.md new file mode 100644 index 000000000..452ceccc7 --- /dev/null +++ b/.agents/skills/wrdn-typescript-type-safety/SKILL.md @@ -0,0 +1,14 @@ +--- +name: wrdn-typescript-type-safety +description: Remove TypeScript escape hatches. Use when lint flags @ts-nocheck or similar broad type bypasses. +allowed-tools: Read Grep Glob Bash +--- + +Fix the type boundary instead of disabling TypeScript. + +## Fix Shape + +- Remove `@ts-nocheck`. +- Narrow the failing expression, add a schema/guard at an unknown boundary, or improve the local type. +- If a cast is unavoidable, keep it narrow and document the invariant at the cast site. +- Do not silence an entire file for a localized mismatch. diff --git a/.oxlintrc.jsonc b/.oxlintrc.jsonc index b6a2517f4..846cb9fb6 100644 --- a/.oxlintrc.jsonc +++ b/.oxlintrc.jsonc @@ -9,7 +9,19 @@ "executor/no-cross-package-relative-imports": "error", "executor/require-reactivity-keys": "error", "executor/no-effect-internal-tags": "error", + "executor/no-error-constructor": "error", + "executor/no-instanceof-error": "error", + "executor/no-instanceof-tagged-error": "error", + "executor/no-manual-tag-check": "error", + "executor/no-promise-catch": "error", + "executor/no-promise-reject": "error", + "executor/no-redundant-error-factory": "error", "executor/no-ts-nocheck": "error", + "executor/no-try-catch-or-throw": "error", + "executor/no-unknown-error-message": "error", + "executor/prefer-schema-inferred-types": "error", + "executor/prefer-value-inferred-extension-types": "error", + "executor/prefer-yield-tagged-error": "error", "react/forbid-elements": [ "error", { diff --git a/scripts/oxlint-plugin-executor.js b/scripts/oxlint-plugin-executor.js index 94ea9e2ae..755b122ec 100644 --- a/scripts/oxlint-plugin-executor.js +++ b/scripts/oxlint-plugin-executor.js @@ -2,15 +2,24 @@ import noConditionalTests from "./oxlint-plugin-executor/rules/no-conditional-te import noCrossPackageRelativeImports from "./oxlint-plugin-executor/rules/no-cross-package-relative-imports.js"; import noDoubleCast from "./oxlint-plugin-executor/rules/no-double-cast.js"; import noEffectInternalTags from "./oxlint-plugin-executor/rules/no-effect-internal-tags.js"; +import noErrorConstructor from "./oxlint-plugin-executor/rules/no-error-constructor.js"; import noInlineObjectTypeAssertion from "./oxlint-plugin-executor/rules/no-inline-object-type-assertion.js"; +import noInstanceofError from "./oxlint-plugin-executor/rules/no-instanceof-error.js"; import noInstanceofTaggedError from "./oxlint-plugin-executor/rules/no-instanceof-tagged-error.js"; import noManualTagCheck from "./oxlint-plugin-executor/rules/no-manual-tag-check.js"; +import noPromiseCatch from "./oxlint-plugin-executor/rules/no-promise-catch.js"; import noPromiseClientSurface from "./oxlint-plugin-executor/rules/no-promise-client-surface.js"; +import noPromiseReject from "./oxlint-plugin-executor/rules/no-promise-reject.js"; import noRawErrorThrow from "./oxlint-plugin-executor/rules/no-raw-error-throw.js"; import noRedundantErrorFactory from "./oxlint-plugin-executor/rules/no-redundant-error-factory.js"; import noTsNocheck from "./oxlint-plugin-executor/rules/no-ts-nocheck.js"; +import noTryCatchOrThrow from "./oxlint-plugin-executor/rules/no-try-catch-or-throw.js"; +import noUnknownErrorMessage from "./oxlint-plugin-executor/rules/no-unknown-error-message.js"; import noUnknownShapeProbing from "./oxlint-plugin-executor/rules/no-unknown-shape-probing.js"; import noVitestImport from "./oxlint-plugin-executor/rules/no-vitest-import.js"; +import preferSchemaInferredTypes from "./oxlint-plugin-executor/rules/prefer-schema-inferred-types.js"; +import preferYieldTaggedError from "./oxlint-plugin-executor/rules/prefer-yield-tagged-error.js"; +import preferValueInferredExtensionTypes from "./oxlint-plugin-executor/rules/prefer-value-inferred-extension-types.js"; import requireReactivityKeys from "./oxlint-plugin-executor/rules/require-reactivity-keys.js"; export default { @@ -24,13 +33,22 @@ export default { "no-cross-package-relative-imports": noCrossPackageRelativeImports, "require-reactivity-keys": requireReactivityKeys, "no-effect-internal-tags": noEffectInternalTags, + "no-error-constructor": noErrorConstructor, "no-ts-nocheck": noTsNocheck, "no-inline-object-type-assertion": noInlineObjectTypeAssertion, + "no-instanceof-error": noInstanceofError, "no-instanceof-tagged-error": noInstanceofTaggedError, "no-manual-tag-check": noManualTagCheck, + "no-promise-catch": noPromiseCatch, "no-promise-client-surface": noPromiseClientSurface, + "no-promise-reject": noPromiseReject, "no-raw-error-throw": noRawErrorThrow, "no-redundant-error-factory": noRedundantErrorFactory, + "no-try-catch-or-throw": noTryCatchOrThrow, + "no-unknown-error-message": noUnknownErrorMessage, "no-unknown-shape-probing": noUnknownShapeProbing, + "prefer-schema-inferred-types": preferSchemaInferredTypes, + "prefer-value-inferred-extension-types": preferValueInferredExtensionTypes, + "prefer-yield-tagged-error": preferYieldTaggedError, }, }; diff --git a/scripts/oxlint-plugin-executor/rules/no-conditional-tests.js b/scripts/oxlint-plugin-executor/rules/no-conditional-tests.js index 52eba8e09..fd724764b 100644 --- a/scripts/oxlint-plugin-executor/rules/no-conditional-tests.js +++ b/scripts/oxlint-plugin-executor/rules/no-conditional-tests.js @@ -57,7 +57,8 @@ export default { if (name !== "expect") return; context.report({ node, - message: "Avoid conditional expect calls; split the test or assert both branches explicitly.", + message: + "Avoid conditional expect calls; split the test or assert both branches explicitly. Skill: wrdn-effect-vitest-tests.", }); }, FunctionDeclaration: enterFunction, diff --git a/scripts/oxlint-plugin-executor/rules/no-cross-package-relative-imports.js b/scripts/oxlint-plugin-executor/rules/no-cross-package-relative-imports.js index 13e54d7aa..0c34fe6be 100644 --- a/scripts/oxlint-plugin-executor/rules/no-cross-package-relative-imports.js +++ b/scripts/oxlint-plugin-executor/rules/no-cross-package-relative-imports.js @@ -23,7 +23,7 @@ export default { context.report({ node: node.source, - message: `Import ${target.name} via its package export instead of a relative path.`, + message: `Import ${target.name} via its package export instead of a relative path. Skill: wrdn-package-boundaries.`, }); }, }; diff --git a/scripts/oxlint-plugin-executor/rules/no-double-cast.js b/scripts/oxlint-plugin-executor/rules/no-double-cast.js index 012462cce..03fd91df5 100644 --- a/scripts/oxlint-plugin-executor/rules/no-double-cast.js +++ b/scripts/oxlint-plugin-executor/rules/no-double-cast.js @@ -19,7 +19,7 @@ export default { context.report({ node, message: - "Avoid double casts through unknown/any; use a typed boundary, schema decode, or a narrow allow comment with a reason.", + "Avoid double casts through unknown/any; use a typed boundary, schema decode, or a narrow allow comment with a reason. Skill: wrdn-effect-schema-boundaries.", }); }, }; diff --git a/scripts/oxlint-plugin-executor/rules/no-effect-internal-tags.js b/scripts/oxlint-plugin-executor/rules/no-effect-internal-tags.js index 9b65aa293..18dff2773 100644 --- a/scripts/oxlint-plugin-executor/rules/no-effect-internal-tags.js +++ b/scripts/oxlint-plugin-executor/rules/no-effect-internal-tags.js @@ -71,7 +71,7 @@ function reportIfEffectTagComparison( context.report({ node: access, - message: `Use Effect's public helpers instead of checking internal _tag "${tag}".`, + message: `Use Effect's public helpers instead of checking internal _tag "${tag}". Skill: wrdn-effect-typed-errors.`, }); } diff --git a/scripts/oxlint-plugin-executor/rules/no-error-constructor.js b/scripts/oxlint-plugin-executor/rules/no-error-constructor.js new file mode 100644 index 000000000..6a587443a --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-error-constructor.js @@ -0,0 +1,40 @@ +import { nodeName } from "../utils.js"; + +const errorConstructors = new Set([ + "AggregateError", + "Error", + "EvalError", + "RangeError", + "ReferenceError", + "SyntaxError", + "TypeError", + "URIError", +]); + +const message = + "Do not construct built-in Error objects in Effect domain code. Use typed domain errors and Effect.fail instead; at true adapter boundaries use a narrow suppression with a boundary reason. Skill: wrdn-effect-typed-errors."; + +const isErrorConstructor = (node) => errorConstructors.has(nodeName(node)); + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow built-in Error constructors.", + }, + }, + create(context) { + return { + NewExpression(node) { + if (isErrorConstructor(node.callee)) { + context.report({ node, message }); + } + }, + CallExpression(node) { + if (isErrorConstructor(node.callee)) { + context.report({ node, message }); + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/no-inline-object-type-assertion.js b/scripts/oxlint-plugin-executor/rules/no-inline-object-type-assertion.js index a741f199d..9f03db925 100644 --- a/scripts/oxlint-plugin-executor/rules/no-inline-object-type-assertion.js +++ b/scripts/oxlint-plugin-executor/rules/no-inline-object-type-assertion.js @@ -1,7 +1,7 @@ import { isIdentifier } from "../utils.js"; const message = - "Do not assert against inline object-shaped types. Use a named type, Schema, or a proper type guard."; + "Do not assert against inline object-shaped types. Use a named type, Schema, or a proper type guard. Skill: wrdn-effect-schema-boundaries."; const isUnknownKeyword = (node) => node?.type === "TSUnknownKeyword"; diff --git a/scripts/oxlint-plugin-executor/rules/no-instanceof-error.js b/scripts/oxlint-plugin-executor/rules/no-instanceof-error.js new file mode 100644 index 000000000..661bd32b2 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-instanceof-error.js @@ -0,0 +1,22 @@ +import { nodeName } from "../utils.js"; + +const message = + "Do not use instanceof Error. Preserve typed failures with Effect tagged-error handling. Skill: wrdn-effect-typed-errors."; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow instanceof Error checks.", + }, + }, + create(context) { + return { + BinaryExpression(node) { + if (node.operator === "instanceof" && nodeName(node.right) === "Error") { + context.report({ node, message }); + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/no-instanceof-tagged-error.js b/scripts/oxlint-plugin-executor/rules/no-instanceof-tagged-error.js index d8a566b14..a0ac94866 100644 --- a/scripts/oxlint-plugin-executor/rules/no-instanceof-tagged-error.js +++ b/scripts/oxlint-plugin-executor/rules/no-instanceof-tagged-error.js @@ -1,7 +1,7 @@ import { isIdentifier, nodeName } from "../utils.js"; const message = - "Do not use instanceof for tagged errors. Use Effect.catchTag, Effect.catchTags, or a _tag-based guard."; + "Do not use instanceof for tagged errors. Use Effect.catchTag, Effect.catchTags, or a _tag-based guard. Skill: wrdn-effect-typed-errors."; const looksLikeTaggedErrorName = (name) => typeof name === "string" && name !== "Error" && name.endsWith("Error"); diff --git a/scripts/oxlint-plugin-executor/rules/no-manual-tag-check.js b/scripts/oxlint-plugin-executor/rules/no-manual-tag-check.js index f6ebe68da..45f39bbbe 100644 --- a/scripts/oxlint-plugin-executor/rules/no-manual-tag-check.js +++ b/scripts/oxlint-plugin-executor/rules/no-manual-tag-check.js @@ -1,7 +1,7 @@ import { isIdentifier, isStringLiteral } from "../utils.js"; const message = - "Do not inspect _tag manually. Use Effect.catchTag, Effect.catchTags, Predicate.isTagged, or another Effect tagged-error API."; + "Do not inspect _tag manually. Use Effect.catchTag/catchTags for error handling, Predicate.isTagged for guards, or public Effect helpers for Effect data. Skill: wrdn-effect-typed-errors."; const isTagProperty = (node) => isIdentifier(node, "_tag") || (isStringLiteral(node) && node.value === "_tag"); @@ -15,6 +15,16 @@ export default { }, create(context) { return { + BinaryExpression(node) { + if (node.operator === "in" && isTagProperty(node.left)) { + context.report({ node, message }); + return; + } + if (!["===", "!==", "==", "!="].includes(node.operator)) return; + if (isTagAccess(node.left) || isTagAccess(node.right)) { + context.report({ node, message }); + } + }, MemberExpression(node) { if (isTagProperty(node.property)) { context.report({ node, message }); @@ -23,3 +33,5 @@ export default { }; }, }; + +const isTagAccess = (node) => node?.type === "MemberExpression" && isTagProperty(node.property); diff --git a/scripts/oxlint-plugin-executor/rules/no-promise-catch.js b/scripts/oxlint-plugin-executor/rules/no-promise-catch.js new file mode 100644 index 000000000..daa07271e --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-promise-catch.js @@ -0,0 +1,30 @@ +import { getPropertyName, isIdentifier, unwrapExpression } from "../utils.js"; + +const message = + "Do not use Promise .catch(). Model async failures with Effect.tryPromise and typed Effect error handling. Skill: wrdn-effect-typed-errors."; + +const isCatchMember = (node) => { + const expression = unwrapExpression(node); + if (isIdentifier(unwrapExpression(expression?.object), "Effect")) return false; + return ( + expression?.type === "MemberExpression" && getPropertyName(expression.property) === "catch" + ); +}; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow Promise-style .catch() error handling.", + }, + }, + create(context) { + return { + CallExpression(node) { + if (isCatchMember(node.callee)) { + context.report({ node, message }); + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/no-promise-client-surface.js b/scripts/oxlint-plugin-executor/rules/no-promise-client-surface.js index 3ae5916f6..047ec94f4 100644 --- a/scripts/oxlint-plugin-executor/rules/no-promise-client-surface.js +++ b/scripts/oxlint-plugin-executor/rules/no-promise-client-surface.js @@ -1,7 +1,7 @@ import { containsPromiseType, nodeName } from "../utils.js"; const message = - "Do not expose Promise-shaped client surfaces. Wrap third-party SDK promises at the adapter boundary and expose Effect methods."; + "Do not expose Promise-shaped client surfaces. Wrap third-party SDK promises at the adapter boundary and expose Effect methods. Skill: effect-client-wrapper."; const isExported = (node) => node?.parent?.type === "ExportNamedDeclaration"; diff --git a/scripts/oxlint-plugin-executor/rules/no-promise-reject.js b/scripts/oxlint-plugin-executor/rules/no-promise-reject.js new file mode 100644 index 000000000..2e996cc7c --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-promise-reject.js @@ -0,0 +1,74 @@ +import { getPropertyName, isIdentifier, unwrapExpression } from "../utils.js"; + +const promiseRejectMessage = + "Do not use Promise.reject(). Model async failures with Effect.fail or Effect.tryPromise. Skill: wrdn-effect-typed-errors."; +const rejectCallbackMessage = + "Do not call Promise executor reject(). Model async failures with Effect.fail or Effect.tryPromise. Skill: wrdn-effect-typed-errors."; + +const isPromiseReject = (node) => { + const expression = unwrapExpression(node); + return ( + expression?.type === "MemberExpression" && + isIdentifier(unwrapExpression(expression.object), "Promise") && + getPropertyName(expression.property) === "reject" + ); +}; + +const isPromiseConstructor = (node) => + node?.type === "NewExpression" && isIdentifier(unwrapExpression(node.callee), "Promise"); + +const isFunction = (node) => + node?.type === "ArrowFunctionExpression" || + node?.type === "FunctionExpression" || + node?.type === "FunctionDeclaration"; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow Promise rejection APIs.", + }, + }, + create(context) { + const promiseExecutors = new WeakSet(); + const rejectNames = []; + + const enterFunction = (node) => { + if (!promiseExecutors.has(node)) return; + const rejectParam = node.params?.[1]; + if (isIdentifier(rejectParam)) { + rejectNames.push(rejectParam.name); + } else { + rejectNames.push(undefined); + } + }; + + const exitFunction = (node) => { + if (promiseExecutors.has(node)) rejectNames.pop(); + }; + + return { + NewExpression(node) { + if (!isPromiseConstructor(node)) return; + const executor = node.arguments?.[0]; + if (isFunction(executor)) promiseExecutors.add(executor); + }, + CallExpression(node) { + if (isPromiseReject(node.callee)) { + context.report({ node, message: promiseRejectMessage }); + return; + } + + if (isIdentifier(node.callee) && rejectNames.includes(node.callee.name)) { + context.report({ node, message: rejectCallbackMessage }); + } + }, + FunctionDeclaration: enterFunction, + "FunctionDeclaration:exit": exitFunction, + FunctionExpression: enterFunction, + "FunctionExpression:exit": exitFunction, + ArrowFunctionExpression: enterFunction, + "ArrowFunctionExpression:exit": exitFunction, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/no-raw-error-throw.js b/scripts/oxlint-plugin-executor/rules/no-raw-error-throw.js index 92d9bc8d4..773b105fe 100644 --- a/scripts/oxlint-plugin-executor/rules/no-raw-error-throw.js +++ b/scripts/oxlint-plugin-executor/rules/no-raw-error-throw.js @@ -1,7 +1,7 @@ import { isIdentifier } from "../utils.js"; const message = - "Do not throw raw Error objects in Effect code. Return Effect.fail with a tagged error or assert directly in tests."; + "Do not throw raw Error objects in Effect code. Return Effect.fail with a tagged error or assert directly in tests. Skill: wrdn-effect-typed-errors."; const isNewError = (node) => node?.type === "NewExpression" && isIdentifier(node.callee, "Error"); diff --git a/scripts/oxlint-plugin-executor/rules/no-redundant-error-factory.js b/scripts/oxlint-plugin-executor/rules/no-redundant-error-factory.js index 1b1426712..2e3fccca3 100644 --- a/scripts/oxlint-plugin-executor/rules/no-redundant-error-factory.js +++ b/scripts/oxlint-plugin-executor/rules/no-redundant-error-factory.js @@ -1,27 +1,65 @@ import { isIdentifier } from "../utils.js"; const message = - "Do not add redundant make*Error wrappers that only construct a tagged error. Construct the tagged error directly."; + "Do not add redundant helpers that only construct a tagged error. Construct the tagged error directly. Skill: wrdn-effect-typed-errors."; const isErrorFactoryName = (name) => /^make[A-Z].*Error$/.test(name); +const isErrorHelperName = (name) => + isErrorFactoryName(name) || String(name ?? "").endsWith("Error"); + +const parameterName = (param) => { + if (isIdentifier(param)) return param.name; + if (param?.type === "AssignmentPattern" && isIdentifier(param.left)) return param.left.name; + if (param?.type === "RestElement" && isIdentifier(param.argument)) return param.argument.name; + return undefined; +}; + const isNewErrorExpression = (node) => node?.type === "NewExpression" && isIdentifier(node.callee) && node.callee.name.endsWith("Error"); +const isForwardedValue = (node, parameterNames) => { + if (node?.type === "Literal" || node?.type === "StringLiteral") return true; + if (node?.type === "Identifier") return parameterNames.has(node.name); + return ( + node?.type === "MemberExpression" && + isIdentifier(node.object) && + parameterNames.has(node.object.name) + ); +}; + +const isObjectWithOnlyForwardedFields = (node, parameterNames) => { + if (node?.type !== "ObjectExpression") return true; + return (node.properties ?? []).every((property) => { + if (property.type === "SpreadElement") return false; + return isForwardedValue(property.value, parameterNames); + }); +}; + +const isRedundantNewErrorExpression = (node, parameterNames) => { + if (!isNewErrorExpression(node)) return false; + if ((node.arguments ?? []).length === 0) return true; + if (node.arguments.length > 1) return false; + const argument = node.arguments[0]; + if (argument?.type === "Identifier") return parameterNames.has(argument.name); + return isObjectWithOnlyForwardedFields(argument, parameterNames); +}; + const returnsOnlyNewError = (node) => { - if (isNewErrorExpression(node)) return true; + const parameterNames = new Set((node?.params ?? []).map(parameterName).filter(Boolean)); + if (isRedundantNewErrorExpression(node?.body ?? node, parameterNames)) return true; if (node?.type !== "BlockStatement") return false; const statements = node.body ?? []; return ( statements.length === 1 && statements[0]?.type === "ReturnStatement" && - isNewErrorExpression(statements[0].argument) + isRedundantNewErrorExpression(statements[0].argument, parameterNames) ); }; -const reportIfRedundantFactory = (context, name, body, node) => { - if (isErrorFactoryName(name) && returnsOnlyNewError(body)) { - context.report({ node, message }); +const reportIfRedundantFactory = (context, name, fnNode, reportNode) => { + if (isErrorHelperName(name) && returnsOnlyNewError(fnNode)) { + context.report({ node: reportNode, message }); } }; @@ -35,7 +73,7 @@ export default { create(context) { return { FunctionDeclaration(node) { - reportIfRedundantFactory(context, node.id?.name, node.body, node); + reportIfRedundantFactory(context, node.id?.name, node, node); }, VariableDeclarator(node) { if (!isIdentifier(node.id)) return; @@ -45,7 +83,7 @@ export default { ) { return; } - reportIfRedundantFactory(context, node.id.name, node.init.body, node); + reportIfRedundantFactory(context, node.id.name, node.init, node); }, }; }, diff --git a/scripts/oxlint-plugin-executor/rules/no-try-catch-or-throw.js b/scripts/oxlint-plugin-executor/rules/no-try-catch-or-throw.js new file mode 100644 index 000000000..4b876efd9 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-try-catch-or-throw.js @@ -0,0 +1,23 @@ +const tryCatchMessage = + "Do not use try/catch blocks in Effect domain code. Model failures with Effect instead; at true adapter boundaries use a narrow suppression with a boundary reason. Skill: wrdn-effect-typed-errors; React useAtomSet mutation handlers use wrdn-effect-promise-exit."; +const throwMessage = + "Do not throw errors in Effect domain code. Model failures with Effect.fail or typed error values instead; at true adapter boundaries use a narrow suppression with a boundary reason. Skill: wrdn-effect-typed-errors."; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow try/catch blocks and throw statements.", + }, + }, + create(context) { + return { + TryStatement(node) { + context.report({ node, message: tryCatchMessage }); + }, + ThrowStatement(node) { + context.report({ node, message: throwMessage }); + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/no-ts-nocheck.js b/scripts/oxlint-plugin-executor/rules/no-ts-nocheck.js index fc0b10aef..1ce03adf3 100644 --- a/scripts/oxlint-plugin-executor/rules/no-ts-nocheck.js +++ b/scripts/oxlint-plugin-executor/rules/no-ts-nocheck.js @@ -18,7 +18,7 @@ export default { context.report({ node, - message: `Do not use ${directiveName}; fix the types or narrow the file scope.`, + message: `Do not use ${directiveName}; fix the types or narrow the file scope. Skill: wrdn-typescript-type-safety.`, }); }, }; diff --git a/scripts/oxlint-plugin-executor/rules/no-unknown-error-message.js b/scripts/oxlint-plugin-executor/rules/no-unknown-error-message.js new file mode 100644 index 000000000..87987bf1b --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-unknown-error-message.js @@ -0,0 +1,49 @@ +import { getPropertyName, isIdentifier, nodeName, unwrapExpression } from "../utils.js"; + +const stringMessage = + "Do not stringify unknown errors. Keep typed failures in Effect or normalize at a typed boundary. Skill: wrdn-effect-typed-errors."; +const messagePropertyMessage = + "Do not read .message from unknown errors. Preserve typed failures with Effect tagged-error handling. Skill: wrdn-effect-typed-errors."; +const destructuredMessage = + "Do not destructure .message from unknown errors. Preserve typed failures with Effect tagged-error handling. Skill: wrdn-effect-typed-errors."; + +const errorLikeNames = new Set(["cause", "e", "err", "error", "reason", "unknownError"]); + +const isErrorLikeIdentifier = (node) => { + const name = nodeName(unwrapExpression(node)); + return errorLikeNames.has(name); +}; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow common unknown-error string and message normalization patterns.", + }, + }, + create(context) { + return { + CallExpression(node) { + if (!isIdentifier(unwrapExpression(node.callee), "String")) return; + if (node.arguments.some(isErrorLikeIdentifier)) { + context.report({ node, message: stringMessage }); + } + }, + MemberExpression(node) { + if (getPropertyName(node.property) !== "message") return; + if (isErrorLikeIdentifier(node.object)) { + context.report({ node, message: messagePropertyMessage }); + } + }, + VariableDeclarator(node) { + if (node.id?.type !== "ObjectPattern" || !isErrorLikeIdentifier(node.init)) return; + for (const property of node.id.properties ?? []) { + if (property.type !== "Property") continue; + if (getPropertyName(property.key) === "message") { + context.report({ node: property, message: destructuredMessage }); + } + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/no-unknown-shape-probing.js b/scripts/oxlint-plugin-executor/rules/no-unknown-shape-probing.js index f409aef2a..4a6f6b51d 100644 --- a/scripts/oxlint-plugin-executor/rules/no-unknown-shape-probing.js +++ b/scripts/oxlint-plugin-executor/rules/no-unknown-shape-probing.js @@ -1,7 +1,7 @@ import { isIdentifier, isStringLiteral } from "../utils.js"; const message = - "Do not probe unknown object shapes in domain code. Normalize at a boundary with Schema, a typed adapter, or a named guard."; + "Do not probe unknown object shapes in domain code. Normalize at a boundary with Schema, a typed adapter, or a named guard. Skill: wrdn-effect-schema-boundaries."; const isReflectGet = (node) => node?.type === "MemberExpression" && diff --git a/scripts/oxlint-plugin-executor/rules/no-vitest-import.js b/scripts/oxlint-plugin-executor/rules/no-vitest-import.js index f420b8f2a..fe4b15ee9 100644 --- a/scripts/oxlint-plugin-executor/rules/no-vitest-import.js +++ b/scripts/oxlint-plugin-executor/rules/no-vitest-import.js @@ -15,7 +15,7 @@ export default { context.report({ node: node.source, message: - "Import test helpers from @effect/vitest or @effect/vitest/utils instead of vitest.", + "Import test helpers from @effect/vitest or @effect/vitest/utils instead of vitest. Skill: wrdn-effect-vitest-tests.", }); }, }; diff --git a/scripts/oxlint-plugin-executor/rules/prefer-schema-inferred-types.js b/scripts/oxlint-plugin-executor/rules/prefer-schema-inferred-types.js new file mode 100644 index 000000000..cab0d6bfe --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/prefer-schema-inferred-types.js @@ -0,0 +1,66 @@ +import { getCallName, isIdentifier, typeReferenceName } from "../utils.js"; + +const message = + "This object type duplicates a nearby Effect Schema. Export an inferred type from the schema instead. Skill: wrdn-effect-schema-inferred-types."; + +const schemaSuffixPattern = /(Schema|Model|Struct)$/; + +const schemaBaseName = (name) => { + const base = name.replace(schemaSuffixPattern, ""); + return base.length > 0 && base !== name ? base : undefined; +}; + +const isSchemaMemberCall = (node) => + node?.type === "CallExpression" && + node.callee?.type === "MemberExpression" && + isIdentifier(node.callee.object, "Schema"); + +const isSchemaModelExpression = (node) => { + if (isSchemaMemberCall(node)) return true; + if (node?.type === "CallExpression" && getCallName(node.callee) === "pipe") { + return isSchemaModelExpression(node.callee.object); + } + return false; +}; + +const isObjectTypeAlias = (node) => node.typeAnnotation?.type === "TSTypeLiteral"; + +const isInferredSchemaType = (node) => { + if (node.typeAnnotation?.type !== "TSTypeReference") return false; + const name = typeReferenceName(node.typeAnnotation); + return name === "Schema.Schema.Type"; +}; + +export default { + meta: { + type: "problem", + docs: { + description: message, + }, + }, + create(context) { + const schemaBases = new Set(); + const candidates = []; + + return { + VariableDeclarator(node) { + if (!isIdentifier(node.id) || !isSchemaModelExpression(node.init)) return; + const base = schemaBaseName(node.id.name); + if (base) schemaBases.add(base); + }, + TSInterfaceDeclaration(node) { + candidates.push({ node, name: node.id?.name }); + }, + TSTypeAliasDeclaration(node) { + if (!isObjectTypeAlias(node) || isInferredSchemaType(node)) return; + candidates.push({ node, name: node.id?.name }); + }, + "Program:exit"() { + for (const candidate of candidates) { + if (!candidate.name || !schemaBases.has(candidate.name)) continue; + context.report({ node: candidate.node, message }); + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/prefer-value-inferred-extension-types.js b/scripts/oxlint-plugin-executor/rules/prefer-value-inferred-extension-types.js new file mode 100644 index 000000000..59b9076ec --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/prefer-value-inferred-extension-types.js @@ -0,0 +1,81 @@ +import { isIdentifier } from "../utils.js"; + +const message = + "Do not duplicate plugin extension object shapes. Derive the extension type from the extension factory return value. Skill: wrdn-effect-value-inferred-types."; + +const extensionNamePattern = /(?:Plugin)?Extension$/; + +const isExtensionTypeName = (name) => typeof name === "string" && extensionNamePattern.test(name); + +const isExtensionProperty = (node) => + node?.type === "Property" && + !node.computed && + ((node.key?.type === "Identifier" && node.key.name === "extension") || + ((node.key?.type === "Literal" || node.key?.type === "StringLiteral") && + node.key.value === "extension")); + +const isSatisfiesExtension = (node, extensionTypeNames) => + node?.type === "TSSatisfiesExpression" && + node.typeAnnotation?.type === "TSTypeReference" && + isIdentifier(node.typeAnnotation.typeName) && + extensionTypeNames.has(node.typeAnnotation.typeName.name); + +const returnsSatisfiesExtension = (node, extensionTypeNames) => { + if (!node) return false; + if (isSatisfiesExtension(node, extensionTypeNames)) return true; + if (node.type === "BlockStatement") { + return (node.body ?? []).some( + (statement) => + statement.type === "ReturnStatement" && + isSatisfiesExtension(statement.argument, extensionTypeNames), + ); + } + return false; +}; + +const isAnnotatedExtensionFunction = (node, extensionTypeNames) => + (node?.type === "ArrowFunctionExpression" || node?.type === "FunctionExpression") && + node.returnType?.typeAnnotation?.type === "TSTypeReference" && + isIdentifier(node.returnType.typeAnnotation.typeName) && + extensionTypeNames.has(node.returnType.typeAnnotation.typeName.name); + +export default { + meta: { + type: "problem", + docs: { + description: message, + }, + }, + create(context) { + const extensionTypeNames = new Set(); + const extensionProperties = []; + + return { + TSInterfaceDeclaration(node) { + if (isExtensionTypeName(node.id?.name)) { + extensionTypeNames.add(node.id.name); + } + }, + TSTypeAliasDeclaration(node) { + if (isExtensionTypeName(node.id?.name) && node.typeAnnotation?.type === "TSTypeLiteral") { + extensionTypeNames.add(node.id.name); + } + }, + Property(node) { + if (!isExtensionProperty(node)) return; + extensionProperties.push(node); + }, + "Program:exit"() { + for (const node of extensionProperties) { + const value = node.value; + if ( + isAnnotatedExtensionFunction(value, extensionTypeNames) || + returnsSatisfiesExtension(value?.body, extensionTypeNames) + ) { + context.report({ node, message }); + } + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/prefer-yield-tagged-error.js b/scripts/oxlint-plugin-executor/rules/prefer-yield-tagged-error.js new file mode 100644 index 000000000..182983982 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/prefer-yield-tagged-error.js @@ -0,0 +1,40 @@ +import { getPropertyName, isIdentifier } from "../utils.js"; + +const message = + "Yield tagged errors directly in Effect.gen instead of yielding Effect.fail(new ErrorType(...)). Skill: wrdn-effect-typed-errors."; + +const isEffectFail = (node) => + node?.type === "MemberExpression" && + isIdentifier(node.object, "Effect") && + getPropertyName(node.property) === "fail"; + +const isTaggedErrorConstruction = (node) => + node?.type === "NewExpression" && + isIdentifier(node.callee) && + node.callee.name !== "Error" && + node.callee.name.endsWith("Error"); + +const isYieldedEffectFailOfTaggedError = (node) => + node?.type === "YieldExpression" && + node.delegate === true && + node.argument?.type === "CallExpression" && + isEffectFail(node.argument.callee) && + isTaggedErrorConstruction(node.argument.arguments?.[0]); + +export default { + meta: { + type: "problem", + docs: { + description: message, + }, + }, + create(context) { + return { + YieldExpression(node) { + if (isYieldedEffectFailOfTaggedError(node)) { + context.report({ node, message }); + } + }, + }; + }, +}; diff --git a/scripts/oxlint-plugin-executor/rules/require-reactivity-keys.js b/scripts/oxlint-plugin-executor/rules/require-reactivity-keys.js index f31557923..a3245dd2d 100644 --- a/scripts/oxlint-plugin-executor/rules/require-reactivity-keys.js +++ b/scripts/oxlint-plugin-executor/rules/require-reactivity-keys.js @@ -56,7 +56,7 @@ export default { context.report({ node: call, - message: `Mutation ${mutation.mutationName} must pass reactivityKeys at the call site.`, + message: `Mutation ${mutation.mutationName} must pass reactivityKeys at the call site. Skill: wrdn-effect-atom-reactivity-keys.`, }); }, }; From 9a2afe88f65980dbd932a8150406cf9609f812a3 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 16:13:26 -0700 Subject: [PATCH 002/108] Use Effect resource cleanup in schema test --- apps/cloud/src/services/db.schema.test.ts | 35 ++++++++++++++--------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/apps/cloud/src/services/db.schema.test.ts b/apps/cloud/src/services/db.schema.test.ts index e5fb2378a..d6cdd9de9 100644 --- a/apps/cloud/src/services/db.schema.test.ts +++ b/apps/cloud/src/services/db.schema.test.ts @@ -15,6 +15,7 @@ import { describe, expect, it } from "@effect/vitest"; import { drizzle } from "drizzle-orm/postgres-js"; +import { Effect } from "effect"; import postgres from "postgres"; import * as cloudSchema from "./schema"; @@ -44,20 +45,26 @@ describe("combinedSchema", () => { // getters could theoretically drop tables if evaluated before their // declarations. Construct a drizzle instance and walk its fullSchema // to catch that class of bug too. - it("drizzle(combinedSchema) exposes every table under _.fullSchema", () => { + it.effect("drizzle(combinedSchema) exposes every table under _.fullSchema", () => // postgres() lazily connects — safe to build with a dummy url, we // never .query() so no socket is opened. - const sql = postgres("postgres://u:p@127.0.0.1:1/x", { max: 1 }); - try { - const db = drizzle(sql, { schema: combinedSchema }); - const drizzleInternals = (value: unknown): { _: { fullSchema: Record } } => - value as { _: { fullSchema: Record } }; - const fullSchema = drizzleInternals(db)._.fullSchema; - for (const key of Object.keys(executorSchema)) { - expect(fullSchema, `fullSchema missing "${key}"`).toHaveProperty(key); - } - } finally { - sql.end({ timeout: 0 }).catch(() => undefined); - } - }); + Effect.acquireRelease( + Effect.sync(() => postgres("postgres://u:p@127.0.0.1:1/x", { max: 1 })), + (sql) => Effect.promise(() => sql.end({ timeout: 0 })), + ).pipe( + Effect.flatMap((sql) => + Effect.sync(() => { + const db = drizzle(sql, { schema: combinedSchema }); + const drizzleInternals = ( + value: unknown, + ): { _: { fullSchema: Record } } => + value as { _: { fullSchema: Record } }; + const fullSchema = drizzleInternals(db)._.fullSchema; + for (const key of Object.keys(executorSchema)) { + expect(fullSchema, `fullSchema missing "${key}"`).toHaveProperty(key); + } + }), + ), + ), + ); }); From 62de4e00452337790a0ee9836a202f86a767e0c1 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 16:57:45 -0700 Subject: [PATCH 003/108] Scope Effect lint rules away from boundaries --- .oxlintrc.jsonc | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/.oxlintrc.jsonc b/.oxlintrc.jsonc index 846cb9fb6..2948eb2fb 100644 --- a/.oxlintrc.jsonc +++ b/.oxlintrc.jsonc @@ -37,6 +37,44 @@ ], }, "overrides": [ + { + "files": [ + "apps/cli/src/**/*.{ts,tsx}", + "apps/desktop/src/main.ts", + "scripts/**/*.{ts,js}", + "packages/kernel/runtime-*/src/**/*.{ts,tsx,js,mjs}", + ], + "rules": { + "executor/no-error-constructor": "off", + "executor/no-instanceof-error": "off", + "executor/no-promise-catch": "off", + "executor/no-promise-reject": "off", + "executor/no-try-catch-or-throw": "off", + "executor/no-unknown-error-message": "off", + }, + }, + { + "files": ["apps/marketing/src/**/*.astro"], + "rules": { + "executor/no-error-constructor": "off", + "executor/no-instanceof-error": "off", + "executor/no-try-catch-or-throw": "off", + "executor/no-unknown-error-message": "off", + }, + }, + { + "files": ["packages/core/vite-plugin/src/**/*.{ts,tsx}"], + "rules": { + "executor/no-try-catch-or-throw": "off", + }, + }, + { + "files": ["packages/react/src/components/**/*.{ts,tsx}"], + "rules": { + "executor/no-error-constructor": "off", + "executor/no-try-catch-or-throw": "off", + }, + }, { "files": ["packages/plugins/workos-vault/src/**/*.{ts,tsx}"], "rules": { From 269f02971fbe69ff3ab65b868cadc6693b00212a Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 17:11:28 -0700 Subject: [PATCH 004/108] Add JSON parse schema boundary lint --- .../wrdn-effect-schema-boundaries/SKILL.md | 12 ++++++++- .oxlintrc.jsonc | 3 +++ scripts/oxlint-plugin-executor.js | 2 ++ .../rules/no-json-parse.js | 27 +++++++++++++++++++ 4 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 scripts/oxlint-plugin-executor/rules/no-json-parse.js diff --git a/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md b/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md index 1e192deec..74a6b6d95 100644 --- a/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md +++ b/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md @@ -9,8 +9,10 @@ You fix one pattern: domain code is asserting or probing an unknown shape instea ## Fix Shape - Prefer `Schema.decodeUnknownEffect(MySchema)(value)` for untrusted input. +- Prefer `Schema.decodeUnknownEffect(Schema.fromJsonString(MySchema))(text)` or + `Schema.decodeUnknownOption(Schema.parseJson())(text)` for JSON strings. - Keep domain code typed after the decode; do not keep `unknown` and probe it repeatedly. -- Replace `as unknown as X`, `as Record`, inline object assertions, `"field" in value`, and `Reflect.get` with a schema, typed adapter, or named guard. +- Replace `JSON.parse`, `as unknown as X`, `as Record`, inline object assertions, `"field" in value`, and `Reflect.get` with a schema, typed adapter, or named guard. - A named guard is acceptable only when parsing is not the right abstraction and the guard has a precise return type. ## Good @@ -23,8 +25,16 @@ const ParsedConfig = Schema.Struct({ const config = yield * Schema.decodeUnknownEffect(ParsedConfig)(raw); ``` +```ts +const config = yield * Schema.decodeUnknownEffect(Schema.fromJsonString(ParsedConfig))(rawText); +``` + ## Bad ```ts const config = raw as unknown as { endpoint: string }; ``` + +```ts +const config = JSON.parse(rawText) as { endpoint: string }; +``` diff --git a/.oxlintrc.jsonc b/.oxlintrc.jsonc index 2948eb2fb..35faac42c 100644 --- a/.oxlintrc.jsonc +++ b/.oxlintrc.jsonc @@ -12,6 +12,7 @@ "executor/no-error-constructor": "error", "executor/no-instanceof-error": "error", "executor/no-instanceof-tagged-error": "error", + "executor/no-json-parse": "error", "executor/no-manual-tag-check": "error", "executor/no-promise-catch": "error", "executor/no-promise-reject": "error", @@ -47,6 +48,7 @@ "rules": { "executor/no-error-constructor": "off", "executor/no-instanceof-error": "off", + "executor/no-json-parse": "off", "executor/no-promise-catch": "off", "executor/no-promise-reject": "off", "executor/no-try-catch-or-throw": "off", @@ -58,6 +60,7 @@ "rules": { "executor/no-error-constructor": "off", "executor/no-instanceof-error": "off", + "executor/no-json-parse": "off", "executor/no-try-catch-or-throw": "off", "executor/no-unknown-error-message": "off", }, diff --git a/scripts/oxlint-plugin-executor.js b/scripts/oxlint-plugin-executor.js index 755b122ec..62edc8c17 100644 --- a/scripts/oxlint-plugin-executor.js +++ b/scripts/oxlint-plugin-executor.js @@ -6,6 +6,7 @@ import noErrorConstructor from "./oxlint-plugin-executor/rules/no-error-construc import noInlineObjectTypeAssertion from "./oxlint-plugin-executor/rules/no-inline-object-type-assertion.js"; import noInstanceofError from "./oxlint-plugin-executor/rules/no-instanceof-error.js"; import noInstanceofTaggedError from "./oxlint-plugin-executor/rules/no-instanceof-tagged-error.js"; +import noJsonParse from "./oxlint-plugin-executor/rules/no-json-parse.js"; import noManualTagCheck from "./oxlint-plugin-executor/rules/no-manual-tag-check.js"; import noPromiseCatch from "./oxlint-plugin-executor/rules/no-promise-catch.js"; import noPromiseClientSurface from "./oxlint-plugin-executor/rules/no-promise-client-surface.js"; @@ -38,6 +39,7 @@ export default { "no-inline-object-type-assertion": noInlineObjectTypeAssertion, "no-instanceof-error": noInstanceofError, "no-instanceof-tagged-error": noInstanceofTaggedError, + "no-json-parse": noJsonParse, "no-manual-tag-check": noManualTagCheck, "no-promise-catch": noPromiseCatch, "no-promise-client-surface": noPromiseClientSurface, diff --git a/scripts/oxlint-plugin-executor/rules/no-json-parse.js b/scripts/oxlint-plugin-executor/rules/no-json-parse.js new file mode 100644 index 000000000..654f23fa8 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-json-parse.js @@ -0,0 +1,27 @@ +import { isIdentifier } from "../utils.js"; + +const message = + "Do not use JSON.parse in domain code. Parse JSON with Effect Schema, for example Schema.parseJson or Schema.fromJsonString(...). Skill: wrdn-effect-schema-boundaries."; + +const isJsonParse = (node) => + node?.type === "MemberExpression" && + isIdentifier(node.object, "JSON") && + isIdentifier(node.property, "parse"); + +export default { + meta: { + type: "problem", + docs: { + description: message, + }, + }, + create(context) { + return { + CallExpression(node) { + if (isJsonParse(node.callee)) { + context.report({ node, message }); + } + }, + }; + }, +}; From 10d34f804696bf9641c6c4575b2c7b10fdc04525 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 17:16:45 -0700 Subject: [PATCH 005/108] Add redundant primitive cast lint --- .../wrdn-effect-schema-boundaries/SKILL.md | 6 ++- .oxlintrc.jsonc | 1 + scripts/oxlint-plugin-executor.js | 2 + .../rules/no-redundant-primitive-cast.js | 47 +++++++++++++++++++ 4 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 scripts/oxlint-plugin-executor/rules/no-redundant-primitive-cast.js diff --git a/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md b/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md index 74a6b6d95..4692bbc25 100644 --- a/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md +++ b/.agents/skills/wrdn-effect-schema-boundaries/SKILL.md @@ -12,7 +12,7 @@ You fix one pattern: domain code is asserting or probing an unknown shape instea - Prefer `Schema.decodeUnknownEffect(Schema.fromJsonString(MySchema))(text)` or `Schema.decodeUnknownOption(Schema.parseJson())(text)` for JSON strings. - Keep domain code typed after the decode; do not keep `unknown` and probe it repeatedly. -- Replace `JSON.parse`, `as unknown as X`, `as Record`, inline object assertions, `"field" in value`, and `Reflect.get` with a schema, typed adapter, or named guard. +- Replace `JSON.parse`, `value as string`, `as unknown as X`, `as Record`, inline object assertions, `"field" in value`, and `Reflect.get` with a schema, typed adapter, or named guard. - A named guard is acceptable only when parsing is not the right abstraction and the guard has a precise return type. ## Good @@ -38,3 +38,7 @@ const config = raw as unknown as { endpoint: string }; ```ts const config = JSON.parse(rawText) as { endpoint: string }; ``` + +```ts +const pattern = updated.pattern as string; +``` diff --git a/.oxlintrc.jsonc b/.oxlintrc.jsonc index 35faac42c..a5164d145 100644 --- a/.oxlintrc.jsonc +++ b/.oxlintrc.jsonc @@ -16,6 +16,7 @@ "executor/no-manual-tag-check": "error", "executor/no-promise-catch": "error", "executor/no-promise-reject": "error", + "executor/no-redundant-primitive-cast": "error", "executor/no-redundant-error-factory": "error", "executor/no-ts-nocheck": "error", "executor/no-try-catch-or-throw": "error", diff --git a/scripts/oxlint-plugin-executor.js b/scripts/oxlint-plugin-executor.js index 62edc8c17..e54cef4ac 100644 --- a/scripts/oxlint-plugin-executor.js +++ b/scripts/oxlint-plugin-executor.js @@ -12,6 +12,7 @@ import noPromiseCatch from "./oxlint-plugin-executor/rules/no-promise-catch.js"; import noPromiseClientSurface from "./oxlint-plugin-executor/rules/no-promise-client-surface.js"; import noPromiseReject from "./oxlint-plugin-executor/rules/no-promise-reject.js"; import noRawErrorThrow from "./oxlint-plugin-executor/rules/no-raw-error-throw.js"; +import noRedundantPrimitiveCast from "./oxlint-plugin-executor/rules/no-redundant-primitive-cast.js"; import noRedundantErrorFactory from "./oxlint-plugin-executor/rules/no-redundant-error-factory.js"; import noTsNocheck from "./oxlint-plugin-executor/rules/no-ts-nocheck.js"; import noTryCatchOrThrow from "./oxlint-plugin-executor/rules/no-try-catch-or-throw.js"; @@ -45,6 +46,7 @@ export default { "no-promise-client-surface": noPromiseClientSurface, "no-promise-reject": noPromiseReject, "no-raw-error-throw": noRawErrorThrow, + "no-redundant-primitive-cast": noRedundantPrimitiveCast, "no-redundant-error-factory": noRedundantErrorFactory, "no-try-catch-or-throw": noTryCatchOrThrow, "no-unknown-error-message": noUnknownErrorMessage, diff --git a/scripts/oxlint-plugin-executor/rules/no-redundant-primitive-cast.js b/scripts/oxlint-plugin-executor/rules/no-redundant-primitive-cast.js new file mode 100644 index 000000000..853307bc2 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-redundant-primitive-cast.js @@ -0,0 +1,47 @@ +import { isConfigOrTooling, unwrapExpression } from "../utils.js"; + +const message = + "Avoid primitive casts like value as string. Remove redundant casts, or normalize unknown data with Schema/a typed adapter before use. Skill: wrdn-effect-schema-boundaries."; + +const primitiveTypes = new Set([ + "TSStringKeyword", + "TSNumberKeyword", + "TSBooleanKeyword", +]); + +const isPrimitiveType = (node) => primitiveTypes.has(node?.type); + +const isPossiblyRedundantExpression = (node) => { + const expression = unwrapExpression(node); + return ( + expression?.type === "Identifier" || + expression?.type === "MemberExpression" || + expression?.type === "ChainExpression" + ); +}; + +export default { + meta: { + type: "problem", + docs: { + description: message, + }, + }, + create(context) { + if (isConfigOrTooling(context.filename)) return {}; + + const check = (node) => { + if ( + isPrimitiveType(node.typeAnnotation) && + isPossiblyRedundantExpression(node.expression) + ) { + context.report({ node, message }); + } + }; + + return { + TSAsExpression: check, + TSTypeAssertion: check, + }; + }, +}; From 0bbd75655f194ea88aaed076a505c82c33f797a9 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 17:04:03 -0700 Subject: [PATCH 006/108] Clean up core OAuth discovery errors --- packages/core/sdk/src/oauth-discovery.test.ts | 23 ++- packages/core/sdk/src/oauth-discovery.ts | 154 +++++++----------- packages/core/sdk/src/oauth-helpers.test.ts | 14 +- packages/core/sdk/src/oauth-helpers.ts | 35 ++-- 4 files changed, 103 insertions(+), 123 deletions(-) diff --git a/packages/core/sdk/src/oauth-discovery.test.ts b/packages/core/sdk/src/oauth-discovery.test.ts index 2d0cd9462..88d17b13c 100644 --- a/packages/core/sdk/src/oauth-discovery.test.ts +++ b/packages/core/sdk/src/oauth-discovery.test.ts @@ -1,5 +1,5 @@ import { afterEach, describe, expect, it, vi } from "@effect/vitest"; -import { Cause, Effect, Exit } from "effect"; +import { Cause, Effect, Exit, Schema } from "effect"; import { OAuthDiscoveryError, @@ -11,6 +11,14 @@ import { type Handler = (url: string, init: RequestInit) => Response | Promise; +const DcrRequestBody = Schema.Struct({ + redirect_uris: Schema.Array(Schema.String), + token_endpoint_auth_method: Schema.String, +}); +const decodeDcrRequestBody = Schema.decodeUnknownSync( + Schema.fromJsonString(DcrRequestBody), +); + const installFetchRouter = ( handlers: readonly { match: (url: string) => boolean; handle: Handler }[], ): { calls: Array<{ url: string; init: RequestInit }> } => { @@ -196,7 +204,7 @@ describe("registerDynamicClient", () => { const call = calls[0]!; expect(call.init.method).toBe("POST"); - const body = JSON.parse(call.init.body as string); + const body = decodeDcrRequestBody(call.init.body); expect(body.redirect_uris).toEqual(["https://app.example.com/cb"]); expect(body.token_endpoint_auth_method).toBe("none"); }); @@ -251,11 +259,12 @@ describe("registerDynamicClient", () => { expect(Exit.isFailure(exit)).toBe(true); if (!Exit.isFailure(exit)) return; const reason = exit.cause.reasons.find(Cause.isFailReason); - if (!(reason?.error instanceof OAuthDiscoveryError)) { - throw new Error("expected OAuthDiscoveryError"); - } - expect(reason.error.status).toBe(400); - expect(reason.error.message).toMatch(/invalid_client_metadata/); + const error = reason?.error; + expect(error).toEqual(expect.objectContaining({ + _tag: "OAuthDiscoveryError", + status: 400, + message: expect.stringMatching(/invalid_client_metadata/), + })); }); }); diff --git a/packages/core/sdk/src/oauth-discovery.ts b/packages/core/sdk/src/oauth-discovery.ts index cd33e9b87..682eecea1 100644 --- a/packages/core/sdk/src/oauth-discovery.ts +++ b/packages/core/sdk/src/oauth-discovery.ts @@ -18,7 +18,7 @@ // callers actually need. // --------------------------------------------------------------------------- -import { Data, Effect, Result, Schema } from "effect"; +import { Data, Effect, Option, Result, Schema } from "effect"; import * as oauth from "oauth4webapi"; import { @@ -45,22 +45,15 @@ export class OAuthDiscoveryError extends Data.TaggedError( readonly cause?: unknown; }> {} -const discoveryError = ( - message: string, - options: { status?: number; cause?: unknown } = {}, -): OAuthDiscoveryError => - new OAuthDiscoveryError({ - message, - status: options.status, - cause: options.cause, - }); - // --------------------------------------------------------------------------- // Schemas (narrow structural parsing — the RFCs leave many fields // optional; we validate only the subset consumers read) // --------------------------------------------------------------------------- const StringArray = Schema.Array(Schema.String); +const JsonUnknownFromString = Schema.fromJsonString(Schema.Unknown); +const decodeJsonUnknownSync = Schema.decodeUnknownSync(JsonUnknownFromString); +const decodeJsonUnknownOption = Schema.decodeUnknownOption(JsonUnknownFromString); export const OAuthProtectedResourceMetadataSchema = Schema.Struct({ resource: Schema.optional(Schema.String), @@ -155,20 +148,17 @@ export interface DiscoveryRequestOptions { const MCP_PROTOCOL_VERSION_HEADER = "mcp-protocol-version"; const isLoopbackHttpUrl = (value: string): boolean => { - try { - const url = new URL(value); - if (url.protocol !== "http:") return false; - const hostname = url.hostname.toLowerCase(); - return ( - hostname === "localhost" || - hostname === "0.0.0.0" || - hostname === "::1" || - hostname === "[::1]" || - hostname.startsWith("127.") - ); - } catch { - return false; - } + if (!URL.canParse(value)) return false; + const url = new URL(value); + if (url.protocol !== "http:") return false; + const hostname = url.hostname.toLowerCase(); + return ( + hostname === "localhost" || + hostname === "0.0.0.0" || + hostname === "::1" || + hostname === "[::1]" || + hostname.startsWith("127.") + ); }; const oauth4webapiOptions = ( @@ -260,30 +250,26 @@ export const discoverProtectedResourceMetadata = ( } const text = await response.text(); if (text.length === 0) return "skip" as const; - return { status: response.status, body: JSON.parse(text) } as const; + return { status: response.status, body: decodeJsonUnknownSync(text) } as const; }, catch: (cause) => - discoveryError( - `Failed to fetch ${url}: ${cause instanceof Error ? cause.message : String(cause)}`, - { cause }, - ), + new OAuthDiscoveryError({ + message: `Failed to fetch protected resource metadata at ${url}`, + cause, + }), }); if (result === "skip") continue; if (!("body" in result)) { - return yield* Effect.fail( - discoveryError( - `Protected resource metadata returned status ${result.status}`, - { status: result.status }, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Protected resource metadata returned status ${result.status}`, + status: result.status, + }); } const metadata = yield* decodeResourceMetadata(result.body).pipe( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Protected resource metadata is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Protected resource metadata is malformed", cause: err, }), ), @@ -348,15 +334,11 @@ export const discoverAuthorizationServerMetadata = ( raw: as, }; }, - catch: (cause) => { - if (cause instanceof OAuthDiscoveryError) return cause; - return discoveryError( - `Discovery (${algorithm}) failed for ${issuer}: ${ - cause instanceof Error ? cause.message : String(cause) - }`, - { cause }, - ); - }, + catch: (cause) => + new OAuthDiscoveryError({ + message: `Discovery (${algorithm}) failed for ${issuer}`, + cause, + }), }).pipe( // If one algorithm fails mid-roundtrip (network, parse, issuer // mismatch) we still want to try the other before giving up. @@ -370,9 +352,7 @@ export const discoverAuthorizationServerMetadata = ( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Authorization server metadata is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Authorization server metadata is malformed", cause: err, }), ), @@ -437,11 +417,7 @@ const interpretDcrFailure = ( ): DcrErrorBody | DcrTransport => { // RFC 6749 error envelope: `{error, error_description?}` with 4xx. if (status >= 400 && status < 500) { - const parsed = Result.try({ - try: () => (text ? (JSON.parse(text) as unknown) : null), - catch: () => null, - }); - const body = Result.isSuccess(parsed) ? parsed.success : null; + const body = text ? Option.getOrNull(decodeJsonUnknownOption(text)) : null; if ( body && typeof body === "object" && @@ -497,7 +473,7 @@ export const registerDynamicClient = ( }), catch: (cause) => new DcrTransport({ - message: `Dynamic Client Registration request failed: ${cause instanceof Error ? cause.message : String(cause)}`, + message: "Dynamic Client Registration request failed", cause, }), }); @@ -505,9 +481,10 @@ export const registerDynamicClient = ( // Accept both 200 and 201 as success — RFC 7591 mandates 201, but // Todoist (and others) return 200 OK with the client information body. if (response.status !== 200 && response.status !== 201) { - const text = yield* Effect.promise(() => - response.text().catch(() => ""), - ); + const text = yield* Effect.tryPromise({ + try: () => response.text(), + catch: () => "", + }); return yield* interpretDcrFailure(response.status, text); } @@ -520,22 +497,20 @@ export const registerDynamicClient = ( cause, }), }); - const json = yield* Effect.try({ - try: () => JSON.parse(text) as unknown, - catch: (cause) => + const json = yield* Schema.decodeUnknownEffect(JsonUnknownFromString)(text).pipe( + Effect.mapError((cause) => new DcrTransport({ message: "Dynamic Client Registration response was not valid JSON", status: response.status, cause, }), - }); + ), + ); return yield* decodeClientInformation(json).pipe( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Dynamic Client Registration response is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Dynamic Client Registration response is malformed", cause: err, }), ), @@ -544,16 +519,18 @@ export const registerDynamicClient = ( Effect.catchTags({ DcrErrorBody: (err) => Effect.fail( - discoveryError( - `Dynamic Client Registration failed: ${err.error}${ - err.error_description ? ` — ${err.error_description}` : "" + new OAuthDiscoveryError({ + message: `Dynamic Client Registration failed: ${err.error}${ + err.error_description ? ` - ${err.error_description}` : "" }`, - { status: err.status, cause: err }, - ), + status: err.status, + cause: err, + }), ), DcrTransport: (err) => Effect.fail( - discoveryError(`Dynamic Client Registration failed: ${err.message}`, { + new OAuthDiscoveryError({ + message: "Dynamic Client Registration failed", status: err.status, cause: err.cause ?? err, }), @@ -649,29 +626,23 @@ export const beginDynamicAuthorization = ( ); if (!authServer) { - return yield* Effect.fail( - discoveryError( - `No OAuth authorization server metadata at ${authorizationServerUrl}`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `No OAuth authorization server metadata at ${authorizationServerUrl}`, + }); } const pkceMethods = authServer.metadata.code_challenge_methods_supported ?? []; if (pkceMethods.length > 0 && !pkceMethods.includes("S256")) { - return yield* Effect.fail( - discoveryError( - `Authorization server does not support PKCE S256 (advertised: ${pkceMethods.join(", ")})`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Authorization server does not support PKCE S256 (advertised: ${pkceMethods.join(", ")})`, + }); } const responseTypes = authServer.metadata.response_types_supported ?? []; if (responseTypes.length > 0 && !responseTypes.includes("code")) { - return yield* Effect.fail( - discoveryError( - `Authorization server does not support response_type=code (advertised: ${responseTypes.join(", ")})`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Authorization server does not support response_type=code (advertised: ${responseTypes.join(", ")})`, + }); } const baseClientMetadata: DynamicClientMetadata = { @@ -689,9 +660,10 @@ export const beginDynamicAuthorization = ( const reg = authServer.metadata.registration_endpoint; if (!reg) { return Effect.fail( - discoveryError( - "Authorization server does not advertise registration_endpoint — cannot auto-register a client", - ), + new OAuthDiscoveryError({ + message: + "Authorization server does not advertise registration_endpoint - cannot auto-register a client", + }), ); } return registerDynamicClient( diff --git a/packages/core/sdk/src/oauth-helpers.test.ts b/packages/core/sdk/src/oauth-helpers.test.ts index 93d9fd7f9..7700d42a6 100644 --- a/packages/core/sdk/src/oauth-helpers.test.ts +++ b/packages/core/sdk/src/oauth-helpers.test.ts @@ -365,7 +365,7 @@ describe("exchangeAuthorizationCode", () => { }); it("returns a typed OAuth2Error on transport failure", async () => { - globalThis.fetch = vi.fn().mockRejectedValue(new Error("boom")) as typeof fetch; + globalThis.fetch = vi.fn().mockRejectedValue({ message: "boom" }) as typeof fetch; const exit = await Effect.runPromiseExit( exchangeAuthorizationCode({ tokenUrl: "https://example.com/token", @@ -380,7 +380,7 @@ describe("exchangeAuthorizationCode", () => { const err = exit.cause; const failure = JSON.stringify(err); expect(failure).toContain("OAuth2Error"); - expect(failure).toContain("boom"); + expect(failure).toContain("OAuth token exchange failed"); }); it("propagates RFC 6749 error_description text in the OAuth2Error", async () => { @@ -563,7 +563,7 @@ describe("shouldRefreshToken", () => { describe("OAuth2Error tagging", () => { beforeEach(() => { - globalThis.fetch = vi.fn().mockRejectedValue(new Error("network down")) as typeof fetch; + globalThis.fetch = vi.fn().mockRejectedValue({ message: "network down" }) as typeof fetch; }); afterEach(() => { globalThis.fetch = originalFetch; @@ -585,8 +585,10 @@ describe("OAuth2Error tagging", () => { it("OAuth2Error is constructable directly with message and cause", () => { const err = new OAuth2Error({ message: "test", cause: { foo: 1 } }); - expect(err._tag).toBe("OAuth2Error"); - expect(err.message).toBe("test"); - expect(err.cause).toEqual({ foo: 1 }); + expect(err).toMatchObject({ + _tag: "OAuth2Error", + message: "test", + cause: { foo: 1 }, + }); }); }); diff --git a/packages/core/sdk/src/oauth-helpers.ts b/packages/core/sdk/src/oauth-helpers.ts index ec616973b..c62abc03b 100644 --- a/packages/core/sdk/src/oauth-helpers.ts +++ b/packages/core/sdk/src/oauth-helpers.ts @@ -134,7 +134,7 @@ const toOAuth2Error = (cause: unknown): OAuth2Error => { }); } return new OAuth2Error({ - message: `OAuth token exchange failed: ${String(cause)}`, + message: "OAuth token exchange failed", cause, }); }; @@ -173,20 +173,17 @@ const asFromTokenUrlAndIssuer = ( }; const isLoopbackHttpUrl = (value: string): boolean => { - try { - const url = new URL(value); - if (url.protocol !== "http:") return false; - const hostname = url.hostname.toLowerCase(); - return ( - hostname === "localhost" || - hostname === "0.0.0.0" || - hostname === "::1" || - hostname === "[::1]" || - hostname.startsWith("127.") - ); - } catch { - return false; - } + if (!URL.canParse(value)) return false; + const url = new URL(value); + if (url.protocol !== "http:") return false; + const hostname = url.hostname.toLowerCase(); + return ( + hostname === "localhost" || + hostname === "0.0.0.0" || + hostname === "::1" || + hostname === "[::1]" || + hostname.startsWith("127.") + ); }; const oauth4webapiRequestOptions = ( @@ -230,10 +227,10 @@ const tokenResponseFrom = ( // its claims against the AS metadata and rejects mismatches we don't care // about. Strip the field before delegation. const stripIdToken = async (response: Response): Promise => { - const body = await response - .clone() - .json() - .catch(() => null); + const body = await response.clone().json().then( + (value: unknown) => value, + () => null, + ); if ( !body || typeof body !== "object" || From 573ed37507c4489e7cf4e111acdcd47ee018e5e1 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 18:02:50 -0700 Subject: [PATCH 007/108] Clean up MCP invocation boundaries --- packages/plugins/mcp/src/sdk/invoke.ts | 115 +++++++++---------------- 1 file changed, 42 insertions(+), 73 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/invoke.ts b/packages/plugins/mcp/src/sdk/invoke.ts index 6cd7e9dbd..39f1f0e88 100644 --- a/packages/plugins/mcp/src/sdk/invoke.ts +++ b/packages/plugins/mcp/src/sdk/invoke.ts @@ -10,7 +10,7 @@ // 4. Retrying once on connection failure (invalidate + reconnect). // --------------------------------------------------------------------------- -import { Cause, Effect, Exit, Schema, ScopedCache } from "effect"; +import { Cause, Effect, Exit, Option, Predicate, Schema, ScopedCache } from "effect"; import { ElicitRequestSchema } from "@modelcontextprotocol/sdk/types.js"; @@ -29,15 +29,13 @@ import type { McpStoredSourceData } from "./types"; // Helpers // --------------------------------------------------------------------------- -const asRecord = (value: unknown): Record => - typeof value === "object" && value !== null && !Array.isArray(value) - ? (value as Record) - : {}; +const ArgsRecord = Schema.Record(Schema.String, Schema.Unknown); +const decodeArgsRecord = Schema.decodeUnknownOption(ArgsRecord); -const connectionCacheKey = ( - sd: McpStoredSourceData, - invokerScope: string, -): string => +const argsRecord = (value: unknown): Record => + Option.getOrElse(decodeArgsRecord(value), () => ({})); + +const connectionCacheKey = (sd: McpStoredSourceData, invokerScope: string): string => sd.transport === "stdio" ? `stdio:${sd.command}` : // Remote sources may resolve per-user secrets (OAuth tokens, header @@ -83,42 +81,34 @@ const toElicitationRequest = (params: McpElicitParams): ElicitationRequest => requestedSchema: params.requestedSchema, }); -const installElicitationHandler = ( - client: McpConnection["client"], - elicit: Elicit, -): void => { - client.setRequestHandler( - ElicitRequestSchema, - async (request: { params: unknown }) => { - const params = decodeElicitParams(request.params); - const req = toElicitationRequest(params); - // Use runPromiseExit so we can inspect typed failures — `elicit` - // fails with `ElicitationDeclinedError` on decline/cancel, which - // we translate into the equivalent MCP elicit response instead of - // surfacing as a JSON-RPC error. - const exit = await Effect.runPromiseExit(elicit(req)); - if (Exit.isSuccess(exit)) { - const response = exit.value; - return { - action: response.action, - ...(response.action === "accept" && response.content - ? { content: response.content } - : {}), - }; - } - const failure = exit.cause.reasons.find(Cause.isFailReason); - if (failure) { - const err = failure.error as { - readonly _tag?: string; - readonly action?: "decline" | "cancel"; - }; - if (err._tag === "ElicitationDeclinedError") { - return { action: err.action ?? "decline" }; - } +const installElicitationHandler = (client: McpConnection["client"], elicit: Elicit): void => { + client.setRequestHandler(ElicitRequestSchema, async (request: { params: unknown }) => { + const params = decodeElicitParams(request.params); + const req = toElicitationRequest(params); + // Use runPromiseExit so we can inspect typed failures — `elicit` + // fails with `ElicitationDeclinedError` on decline/cancel, which + // we translate into the equivalent MCP elicit response instead of + // surfacing as a JSON-RPC error. + const exit = await Effect.runPromiseExit(elicit(req)); + if (Exit.isSuccess(exit)) { + const response = exit.value; + return { + action: response.action, + ...(response.action === "accept" && response.content ? { content: response.content } : {}), + }; + } + const failure = exit.cause.reasons.find(Cause.isFailReason); + if (failure) { + const err = failure.error; + if (Predicate.isTagged(err, "ElicitationDeclinedError")) { + const action = + Predicate.hasProperty(err, "action") && err.action === "cancel" ? "cancel" : "decline"; + return { action }; } - throw Cause.squash(exit.cause); - }, - ); + } + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: MCP SDK async request handlers signal unexpected failures by rejecting + throw Cause.squash(exit.cause); + }); }; // --------------------------------------------------------------------------- @@ -135,12 +125,10 @@ const useConnection = ( installElicitationHandler(connection.client, elicit); return yield* Effect.tryPromise({ try: () => connection.client.callTool({ name: toolName, arguments: args }), - catch: (cause) => + catch: () => new McpInvocationError({ toolName, - message: `MCP tool call failed for ${toolName}: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: `MCP tool call failed for ${toolName}`, }), }).pipe( Effect.withSpan("plugin.mcp.client.call_tool", { @@ -163,15 +151,8 @@ export interface InvokeMcpToolInput { * collapse multiple users onto one shared connection. */ readonly invokerScope: string; readonly resolveConnector: () => Effect.Effect; - readonly connectionCache: ScopedCache.ScopedCache< - string, - McpConnection, - McpConnectionError - >; - readonly pendingConnectors: Map< - string, - Effect.Effect - >; + readonly connectionCache: ScopedCache.ScopedCache; + readonly pendingConnectors: Map>; readonly elicit: Elicit; } @@ -179,12 +160,10 @@ export const invokeMcpTool = ( input: InvokeMcpToolInput, ): Effect.Effect => { const transport: string = - input.sourceData.transport === "stdio" - ? "stdio" - : (input.sourceData.remoteTransport ?? "auto"); + input.sourceData.transport === "stdio" ? "stdio" : (input.sourceData.remoteTransport ?? "auto"); return Effect.gen(function* () { const cacheKey = connectionCacheKey(input.sourceData, input.invokerScope); - const args = asRecord(input.args); + const args = argsRecord(input.args); // Register the connector for the cache lookup (side-channel pattern // — the ScopedCache lookup closure reads from `pendingConnectors`). @@ -209,12 +188,7 @@ export const invokeMcpTool = ( }), ); - return yield* useConnection( - firstConnection, - input.toolName, - args, - input.elicit, - ).pipe( + return yield* useConnection(firstConnection, input.toolName, args, input.elicit).pipe( // On failure, invalidate the cache and retry once with a fresh // connection. Matches the old invoker's retry-once semantics. Effect.catch(() => @@ -222,12 +196,7 @@ export const invokeMcpTool = ( yield* ScopedCache.invalidate(input.connectionCache, cacheKey); input.pendingConnectors.set(cacheKey, connector); const fresh = yield* ScopedCache.get(input.connectionCache, cacheKey); - return yield* useConnection( - fresh, - input.toolName, - args, - input.elicit, - ); + return yield* useConnection(fresh, input.toolName, args, input.elicit); }).pipe( Effect.withSpan("plugin.mcp.invoke.retry", { attributes: { From 0fab45725ad7a36df30f4d7873c13487d6c466e9 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 18:07:21 -0700 Subject: [PATCH 008/108] Clean up MCP manifest option handling --- packages/plugins/mcp/src/sdk/manifest.ts | 28 +++++++++++++----------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/manifest.ts b/packages/plugins/mcp/src/sdk/manifest.ts index 6b92f1b8a..ddf3d2229 100644 --- a/packages/plugins/mcp/src/sdk/manifest.ts +++ b/packages/plugins/mcp/src/sdk/manifest.ts @@ -1,4 +1,4 @@ -import { Schema } from "effect"; +import { Option, Schema } from "effect"; import { McpToolAnnotations } from "./types"; @@ -51,7 +51,7 @@ const decodeListToolsResult = Schema.decodeUnknownOption(ListToolsResult); const decodeServerInfo = Schema.decodeUnknownOption(ServerInfo); export const isListToolsResult = (value: unknown): boolean => - decodeListToolsResult(value)._tag === "Some"; + Option.isSome(decodeListToolsResult(value)); // --------------------------------------------------------------------------- // Tool ID sanitization @@ -86,14 +86,19 @@ export const extractManifestFromListToolsResult = ( ): McpToolManifest => { const seen = new Map(); - const listed = decodeListToolsResult(listToolsResult).pipe((opt) => - opt._tag === "Some" ? opt.value.tools : [], + const listed = decodeListToolsResult(listToolsResult).pipe( + Option.map((result) => result.tools), + Option.getOrElse(() => []), ); - const server = decodeServerInfo(metadata?.serverInfo).pipe((opt): McpServerMetadata | null => - opt._tag === "Some" - ? { name: opt.value.name ?? null, version: opt.value.version ?? null } - : null, + const server = decodeServerInfo(metadata?.serverInfo).pipe( + Option.map( + (info): McpServerMetadata => ({ + name: info.name ?? null, + version: info.version ?? null, + }), + ), + Option.getOrNull, ); const tools = listed.flatMap((tool): McpToolManifestEntry[] => { @@ -126,11 +131,8 @@ const slugify = (value: string): string => .replace(/^_+|_+$/g, ""); const hostnameOf = (url: string): string | null => { - try { - return new URL(url).hostname; - } catch { - return null; - } + if (!URL.canParse(url)) return null; + return new URL(url).hostname; }; const basenameOf = (path: string): string => path.trim().split(/[\\/]/).pop() ?? path.trim(); From ea3d68218953bcaffcb8d088450d82e3371c5477 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 18:06:45 -0700 Subject: [PATCH 009/108] Clean up MCP connection error messages --- packages/plugins/mcp/src/sdk/connection.ts | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/connection.ts b/packages/plugins/mcp/src/sdk/connection.ts index b8b4368da..7f588328a 100644 --- a/packages/plugins/mcp/src/sdk/connection.ts +++ b/packages/plugins/mcp/src/sdk/connection.ts @@ -87,12 +87,10 @@ const connectClient = (input: { yield* Effect.tryPromise({ try: () => client.connect(transportInstance), - catch: (cause) => + catch: () => new McpConnectionError({ transport: input.transport, - message: `Failed connecting via ${input.transport}: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: `Failed connecting via ${input.transport}`, }), }).pipe( Effect.withSpan("plugin.mcp.connection.handshake", { @@ -124,12 +122,10 @@ export const createMcpConnector = (input: ConnectorInput): McpConnector => { // `node:child_process`) is only loaded when stdio is actually used. const { createStdioTransport } = yield* Effect.tryPromise({ try: () => import("./stdio-connector"), - catch: (cause) => + catch: () => new McpConnectionError({ transport: "stdio", - message: `Failed to load stdio transport module: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: "Failed to load stdio transport module", }), }); From 253b01146fbb87befb8bd88355d9d5cb775c0320 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 18:13:32 -0700 Subject: [PATCH 010/108] Clean up execution engine tagged checks --- packages/core/execution/src/engine.ts | 32 ++++++++++++--------------- 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/packages/core/execution/src/engine.ts b/packages/core/execution/src/engine.ts index b902c93d3..793a1ded4 100644 --- a/packages/core/execution/src/engine.ts +++ b/packages/core/execution/src/engine.ts @@ -1,4 +1,4 @@ -import { Deferred, Effect, Fiber, Ref } from "effect"; +import { Deferred, Effect, Fiber, Predicate, Ref } from "effect"; import type * as Cause from "effect/Cause"; import type { @@ -24,9 +24,7 @@ import { buildExecuteDescription } from "./description"; // Types // --------------------------------------------------------------------------- -export type ExecutionEngineConfig< - E extends Cause.YieldableError = CodeExecutionError, -> = { +export type ExecutionEngineConfig = { readonly executor: Executor; readonly codeExecutor: CodeExecutor; }; @@ -107,8 +105,10 @@ export const formatPausedExecution = ( } => { const req = paused.elicitationContext.request; const lines: string[] = [`Execution paused: ${req.message}`]; + const isUrlElicitation = Predicate.isTagged(req, "UrlElicitation"); + const isFormElicitation = Predicate.isTagged(req, "FormElicitation"); - if (req._tag === "UrlElicitation") { + if (isUrlElicitation) { lines.push(`\nOpen this URL in a browser:\n${req.url}`); lines.push("\nAfter the browser flow, resume with the executionId below:"); } else { @@ -127,10 +127,10 @@ export const formatPausedExecution = ( status: "waiting_for_interaction", executionId: paused.id, interaction: { - kind: req._tag === "UrlElicitation" ? "url" : "form", + kind: isUrlElicitation ? "url" : "form", message: req.message, - ...(req._tag === "UrlElicitation" ? { url: req.url } : {}), - ...(req._tag === "FormElicitation" ? { requestedSchema: req.requestedSchema } : {}), + ...(isUrlElicitation ? { url: req.url } : {}), + ...(isFormElicitation ? { requestedSchema: req.requestedSchema } : {}), }, }, }; @@ -202,12 +202,12 @@ const makeFullInvoker = (executor: Executor, invokeOptions: InvokeOptions): Sand } const limit = readOptionalLimit(args.limit, "tools.search"); - if (limit instanceof ExecutionToolError) { + if (Predicate.isTagged(limit, "ExecutionToolError")) { return Effect.fail(limit); } const offset = readOptionalOffset(args.offset, "tools.search"); - if (offset instanceof ExecutionToolError) { + if (Predicate.isTagged(offset, "ExecutionToolError")) { return Effect.fail(offset); } @@ -242,7 +242,7 @@ const makeFullInvoker = (executor: Executor, invokeOptions: InvokeOptions): Sand isRecord(args) ? args.limit : undefined, "tools.executor.sources.list", ); - if (limit instanceof ExecutionToolError) { + if (Predicate.isTagged(limit, "ExecutionToolError")) { return Effect.fail(limit); } @@ -250,7 +250,7 @@ const makeFullInvoker = (executor: Executor, invokeOptions: InvokeOptions): Sand isRecord(args) ? args.offset : undefined, "tools.executor.sources.list", ); - if (offset instanceof ExecutionToolError) { + if (Predicate.isTagged(offset, "ExecutionToolError")) { return Effect.fail(offset); } @@ -340,9 +340,7 @@ export type ExecutionEngine readonly getDescription: Effect.Effect; }; -export const createExecutionEngine = < - E extends Cause.YieldableError = CodeExecutionError, ->( +export const createExecutionEngine = ( config: ExecutionEngineConfig, ): ExecutionEngine => { const { executor, codeExecutor } = config; @@ -470,9 +468,7 @@ export const createExecutionEngine = < const invoker = makeFullInvoker(executor, { onElicitation: options.onElicitation, }); - return yield* codeExecutor - .execute(code, invoker) - .pipe(Effect.withSpan("executor.code.exec")); + return yield* codeExecutor.execute(code, invoker).pipe(Effect.withSpan("executor.code.exec")); }); return { From 427d4e2dbc076bc261fad60bbacf64af0f4207ad Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 18:18:21 -0700 Subject: [PATCH 011/108] Ban Effect escape hatches --- .../skills/wrdn-effect-typed-errors/SKILL.md | 19 +++++++++++ .oxlintrc.jsonc | 3 ++ scripts/oxlint-plugin-executor.js | 2 ++ .../rules/no-effect-escape-hatch.js | 33 +++++++++++++++++++ 4 files changed, 57 insertions(+) create mode 100644 scripts/oxlint-plugin-executor/rules/no-effect-escape-hatch.js diff --git a/.agents/skills/wrdn-effect-typed-errors/SKILL.md b/.agents/skills/wrdn-effect-typed-errors/SKILL.md index 15290171f..a0517190f 100644 --- a/.agents/skills/wrdn-effect-typed-errors/SKILL.md +++ b/.agents/skills/wrdn-effect-typed-errors/SKILL.md @@ -152,6 +152,25 @@ Effect.ignore( ); ``` +### Effect die / orDie escape hatches + +Bad in domain code: + +```ts +program.pipe(Effect.orDie) +Effect.die(error) +``` + +Good: + +```ts +program.pipe( + Effect.mapError((cause) => new DomainError({ message: "Operation failed", cause })), +) +``` + +`Effect.die`, `Effect.dieMessage`, `Effect.orDie`, and `Effect.orDieWith` turn typed failures into defects. Use them only at a true runtime boundary where the host cannot represent typed failures, and keep that usage behind a narrow lint suppression with a `boundary:` reason. Do not use `orDie` to avoid threading an error type through normal Effect code. + ### try/catch Bad: diff --git a/.oxlintrc.jsonc b/.oxlintrc.jsonc index a5164d145..2f8f0a6d8 100644 --- a/.oxlintrc.jsonc +++ b/.oxlintrc.jsonc @@ -8,6 +8,7 @@ "executor/no-double-cast": "error", "executor/no-cross-package-relative-imports": "error", "executor/require-reactivity-keys": "error", + "executor/no-effect-escape-hatch": "error", "executor/no-effect-internal-tags": "error", "executor/no-error-constructor": "error", "executor/no-instanceof-error": "error", @@ -47,6 +48,7 @@ "packages/kernel/runtime-*/src/**/*.{ts,tsx,js,mjs}", ], "rules": { + "executor/no-effect-escape-hatch": "off", "executor/no-error-constructor": "off", "executor/no-instanceof-error": "off", "executor/no-json-parse": "off", @@ -59,6 +61,7 @@ { "files": ["apps/marketing/src/**/*.astro"], "rules": { + "executor/no-effect-escape-hatch": "off", "executor/no-error-constructor": "off", "executor/no-instanceof-error": "off", "executor/no-json-parse": "off", diff --git a/scripts/oxlint-plugin-executor.js b/scripts/oxlint-plugin-executor.js index e54cef4ac..e09c0367a 100644 --- a/scripts/oxlint-plugin-executor.js +++ b/scripts/oxlint-plugin-executor.js @@ -1,6 +1,7 @@ import noConditionalTests from "./oxlint-plugin-executor/rules/no-conditional-tests.js"; import noCrossPackageRelativeImports from "./oxlint-plugin-executor/rules/no-cross-package-relative-imports.js"; import noDoubleCast from "./oxlint-plugin-executor/rules/no-double-cast.js"; +import noEffectEscapeHatch from "./oxlint-plugin-executor/rules/no-effect-escape-hatch.js"; import noEffectInternalTags from "./oxlint-plugin-executor/rules/no-effect-internal-tags.js"; import noErrorConstructor from "./oxlint-plugin-executor/rules/no-error-constructor.js"; import noInlineObjectTypeAssertion from "./oxlint-plugin-executor/rules/no-inline-object-type-assertion.js"; @@ -34,6 +35,7 @@ export default { "no-double-cast": noDoubleCast, "no-cross-package-relative-imports": noCrossPackageRelativeImports, "require-reactivity-keys": requireReactivityKeys, + "no-effect-escape-hatch": noEffectEscapeHatch, "no-effect-internal-tags": noEffectInternalTags, "no-error-constructor": noErrorConstructor, "no-ts-nocheck": noTsNocheck, diff --git a/scripts/oxlint-plugin-executor/rules/no-effect-escape-hatch.js b/scripts/oxlint-plugin-executor/rules/no-effect-escape-hatch.js new file mode 100644 index 000000000..6682860f9 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-effect-escape-hatch.js @@ -0,0 +1,33 @@ +import { getPropertyName, isTestLike, unwrapExpression } from "../utils.js"; + +const escapeHatches = new Set(["die", "dieMessage", "orDie", "orDieWith"]); + +const message = + "Do not collapse Effect failures with die/orDie escape hatches. Keep typed errors in the Effect error channel; at true runtime edges use a narrow boundary suppression. Skill: wrdn-effect-typed-errors."; + +const isEffectEscapeHatch = (node) => { + const expression = unwrapExpression(node); + if (expression?.type !== "MemberExpression") return false; + const property = getPropertyName(expression.property); + return escapeHatches.has(property); +}; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow Effect die/orDie escape hatches outside test code.", + }, + }, + create(context) { + if (isTestLike(context.filename)) return {}; + + return { + MemberExpression(node) { + if (isEffectEscapeHatch(node)) { + context.report({ node, message }); + } + }, + }; + }, +}; From fe37cf1ee9ec2b34b29b98b4606906963987e49e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:10:47 -0700 Subject: [PATCH 012/108] Use Effect assertions in connection tests --- packages/core/sdk/src/connections.test.ts | 46 ++++++++++++----------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/packages/core/sdk/src/connections.test.ts b/packages/core/sdk/src/connections.test.ts index 391d7892d..10e64d329 100644 --- a/packages/core/sdk/src/connections.test.ts +++ b/packages/core/sdk/src/connections.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Deferred, Effect, Fiber } from "effect"; +import { Deferred, Effect, Exit, Fiber, Predicate } from "effect"; import { makeMemoryAdapter } from "@executor-js/storage-core/testing/memory"; @@ -162,7 +162,9 @@ describe("connections", () => { }), ) .pipe(Effect.flip); - expect(err._tag).toBe("ConnectionProviderNotRegisteredError"); + expect( + Predicate.isTagged(err, "ConnectionProviderNotRegisteredError"), + ).toBe(true); }), ); @@ -194,7 +196,7 @@ describe("connections", () => { }), ), ); - expect(result._tag).toBe("Failure"); + expect(Exit.isFailure(result)).toBe(true); }), ); @@ -280,8 +282,8 @@ describe("connections", () => { const err = yield* executor.secrets .remove("conn-1.access") .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe( - "SecretOwnedByConnectionError", + expect(Predicate.isTagged(err, "SecretOwnedByConnectionError")).toBe( + true, ); }), ); @@ -553,10 +555,10 @@ describe("connections", () => { { concurrency: "unbounded" }, ); expect(leaderResult).toBe("rotated-1"); - for (const r of followerResults) { - expect(r._tag).toBe("Success"); - if (r._tag !== "Success") continue; - expect(r.value).toBe("rotated-1"); + for (const result of followerResults) { + expect(Exit.isSuccess(result)).toBe(true); + if (!Exit.isSuccess(result)) continue; + expect(result.value).toBe("rotated-1"); } expect(calls).toHaveLength(1); }), @@ -610,8 +612,12 @@ describe("connections", () => { const flipped = yield* executor.connections .accessToken("conn-1") .pipe(Effect.flip); - expect(flipped._tag).toBe("ConnectionReauthRequiredError"); - if (flipped._tag !== "ConnectionReauthRequiredError") return; + expect( + Predicate.isTagged(flipped, "ConnectionReauthRequiredError"), + ).toBe(true); + if (!Predicate.isTagged(flipped, "ConnectionReauthRequiredError")) { + return; + } expect(flipped.provider).toBe("spotify"); expect(flipped.message).toMatch(/invalid_grant/); }), @@ -666,7 +672,7 @@ describe("connections", () => { const err = yield* executor.connections .accessToken("conn-1") .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe("ConnectionRefreshError"); + expect(Predicate.isTagged(err, "ConnectionRefreshError")).toBe(true); }), ); @@ -702,9 +708,9 @@ describe("connections", () => { const err = yield* executor.connections .accessToken("conn-1") .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe( - "ConnectionRefreshNotSupportedError", - ); + expect( + Predicate.isTagged(err, "ConnectionRefreshNotSupportedError"), + ).toBe(true); }), ); @@ -721,9 +727,7 @@ describe("connections", () => { const err = yield* executor.connections .accessToken("does-not-exist") .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe( - "ConnectionNotFoundError", - ); + expect(Predicate.isTagged(err, "ConnectionNotFoundError")).toBe(true); }), ); @@ -809,7 +813,7 @@ describe("connections", () => { }), ) .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe("ConnectionNotFoundError"); + expect(Predicate.isTagged(err, "ConnectionNotFoundError")).toBe(true); }), ); @@ -859,9 +863,7 @@ describe("connections", () => { const err = yield* executor.connections .setIdentityLabel("does-not-exist", "x") .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe( - "ConnectionNotFoundError", - ); + expect(Predicate.isTagged(err, "ConnectionNotFoundError")).toBe(true); }), ); From b039b6e726ee82976783b976f37595d5bc5fcf58 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:13:35 -0700 Subject: [PATCH 013/108] Ban unsupported Effect APIs --- .../skills/wrdn-effect-typed-errors/SKILL.md | 10 +++++ .oxlintrc.jsonc | 1 + scripts/oxlint-plugin-executor.js | 2 + .../rules/no-unsupported-effect-api.js | 38 +++++++++++++++++++ 4 files changed, 51 insertions(+) create mode 100644 scripts/oxlint-plugin-executor/rules/no-unsupported-effect-api.js diff --git a/.agents/skills/wrdn-effect-typed-errors/SKILL.md b/.agents/skills/wrdn-effect-typed-errors/SKILL.md index a0517190f..710ff378f 100644 --- a/.agents/skills/wrdn-effect-typed-errors/SKILL.md +++ b/.agents/skills/wrdn-effect-typed-errors/SKILL.md @@ -53,6 +53,16 @@ The lint rule is not a mandate to make every file Effect-shaped. It is acceptabl - control is immediately translated into a typed Effect failure, stable IPC payload, stable test assertion, or deliberately best-effort cleanup - the suppression is narrow and explains the boundary +## Repo Effect API compatibility + +Use the APIs that exist in this repo's pinned Effect runtime: + +- Use `Effect.callback` for callback adapters. Do not use `Effect.async`. +- Use `Effect.andThen` or `Effect.gen` sequencing. Do not use `Effect.zipRight`. +- Use `Effect.timeoutOrElse` or `Effect.timeoutOption`. Do not use `Effect.timeoutFail`. + +These are not style preferences; the unavailable APIs fail at typecheck or runtime. + Good boundary suppression: ```ts diff --git a/.oxlintrc.jsonc b/.oxlintrc.jsonc index 2f8f0a6d8..7bbba7275 100644 --- a/.oxlintrc.jsonc +++ b/.oxlintrc.jsonc @@ -22,6 +22,7 @@ "executor/no-ts-nocheck": "error", "executor/no-try-catch-or-throw": "error", "executor/no-unknown-error-message": "error", + "executor/no-unsupported-effect-api": "error", "executor/prefer-schema-inferred-types": "error", "executor/prefer-value-inferred-extension-types": "error", "executor/prefer-yield-tagged-error": "error", diff --git a/scripts/oxlint-plugin-executor.js b/scripts/oxlint-plugin-executor.js index e09c0367a..81e12bf1e 100644 --- a/scripts/oxlint-plugin-executor.js +++ b/scripts/oxlint-plugin-executor.js @@ -19,6 +19,7 @@ import noTsNocheck from "./oxlint-plugin-executor/rules/no-ts-nocheck.js"; import noTryCatchOrThrow from "./oxlint-plugin-executor/rules/no-try-catch-or-throw.js"; import noUnknownErrorMessage from "./oxlint-plugin-executor/rules/no-unknown-error-message.js"; import noUnknownShapeProbing from "./oxlint-plugin-executor/rules/no-unknown-shape-probing.js"; +import noUnsupportedEffectApi from "./oxlint-plugin-executor/rules/no-unsupported-effect-api.js"; import noVitestImport from "./oxlint-plugin-executor/rules/no-vitest-import.js"; import preferSchemaInferredTypes from "./oxlint-plugin-executor/rules/prefer-schema-inferred-types.js"; import preferYieldTaggedError from "./oxlint-plugin-executor/rules/prefer-yield-tagged-error.js"; @@ -53,6 +54,7 @@ export default { "no-try-catch-or-throw": noTryCatchOrThrow, "no-unknown-error-message": noUnknownErrorMessage, "no-unknown-shape-probing": noUnknownShapeProbing, + "no-unsupported-effect-api": noUnsupportedEffectApi, "prefer-schema-inferred-types": preferSchemaInferredTypes, "prefer-value-inferred-extension-types": preferValueInferredExtensionTypes, "prefer-yield-tagged-error": preferYieldTaggedError, diff --git a/scripts/oxlint-plugin-executor/rules/no-unsupported-effect-api.js b/scripts/oxlint-plugin-executor/rules/no-unsupported-effect-api.js new file mode 100644 index 000000000..4933f7752 --- /dev/null +++ b/scripts/oxlint-plugin-executor/rules/no-unsupported-effect-api.js @@ -0,0 +1,38 @@ +import { getPropertyName, isIdentifier } from "../utils.js"; + +const unsupported = new Map([ + [ + "async", + "Effect.async is not available in this repo's Effect runtime. Use Effect.callback for callback adapters.", + ], + [ + "zipRight", + "Effect.zipRight is not available in this repo's Effect runtime. Use Effect.andThen or Effect.gen sequencing.", + ], + [ + "timeoutFail", + "Effect.timeoutFail is not available in this repo's Effect runtime. Use Effect.timeoutOrElse or Effect.timeoutOption.", + ], +]); + +const message = (name) => + `${unsupported.get(name)} Skill: wrdn-effect-typed-errors.`; + +export default { + meta: { + type: "problem", + docs: { + description: "Disallow Effect APIs that are not available in this repo's Effect runtime.", + }, + }, + create(context) { + return { + MemberExpression(node) { + if (!isIdentifier(node.object, "Effect")) return; + const property = getPropertyName(node.property); + if (!unsupported.has(property)) return; + context.report({ node, message: message(property) }); + }, + }; + }, +}; From 53939596b46631f12f8f1dc01e7a22c8d7798f6a Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:21:18 -0700 Subject: [PATCH 014/108] Use typed assertions in policy code --- packages/core/sdk/src/policies.test.ts | 21 ++++++++-------- packages/core/sdk/src/policies.ts | 34 +++++++++++++------------- 2 files changed, 28 insertions(+), 27 deletions(-) diff --git a/packages/core/sdk/src/policies.test.ts b/packages/core/sdk/src/policies.test.ts index d233d3da0..83cbdf099 100644 --- a/packages/core/sdk/src/policies.test.ts +++ b/packages/core/sdk/src/policies.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Effect, Result } from "effect"; +import { Effect, Predicate, Result } from "effect"; import { generateKeyBetween } from "fractional-indexing"; import type { ToolPolicyRow } from "./core-schema"; @@ -149,7 +149,7 @@ describe("resolveToolPolicy", () => { ROW("outer", "vercel.*", "block", "a0", "org"), ROW("inner", "vercel.dns.create", "approve", "a0", "user"), ]; - const rank = (row: { scope_id: unknown }) => + const rank = (row: Pick) => row.scope_id === "user" ? 0 : 1; const result = resolveToolPolicy( "vercel.dns.create", @@ -274,8 +274,8 @@ const policyTestPlugin = definePlugin(() => ({ for (const row of toolRows) { // Make tools whose name contains "delete" require approval by // default — mirrors openapi's HTTP-method heuristic in spirit. - out[row.id as string] = { - requiresApproval: (row.name as string).toLowerCase().includes("delete"), + out[row.id] = { + requiresApproval: row.name.toLowerCase().includes("delete"), }; } return Effect.succeed(out); @@ -526,10 +526,11 @@ describe("blocked tools", () => { ); expect(Result.isFailure(result)).toBe(true); if (!Result.isFailure(result)) return; - expect((result.failure as { _tag?: string })._tag).toBe( - "ToolBlockedError", + expect(Predicate.isTagged("ToolBlockedError")(result.failure)).toBe( + true, ); - expect((result.failure as { pattern?: string }).pattern).toBe("vercel.*"); + if (!Predicate.isTagged("ToolBlockedError")(result.failure)) return; + expect(result.failure.pattern).toBe("vercel.*"); }), ); }); @@ -583,9 +584,9 @@ describe("approve / require_approval interaction with annotations", () => { ); expect(Result.isFailure(result)).toBe(true); if (!Result.isFailure(result)) return; - expect((result.failure as { _tag?: string })._tag).toBe( - "ElicitationDeclinedError", - ); + expect( + Predicate.isTagged("ElicitationDeclinedError")(result.failure), + ).toBe(true); }), ); diff --git a/packages/core/sdk/src/policies.ts b/packages/core/sdk/src/policies.ts index bc1e8fe14..78fa2bafc 100644 --- a/packages/core/sdk/src/policies.ts +++ b/packages/core/sdk/src/policies.ts @@ -134,22 +134,22 @@ export const isValidPattern = (pattern: string): boolean => { // `generateKeyBetween(null, min)` from independent clients) would otherwise // flip on every refetch. export const comparePolicyRow = ( - a: { position: unknown; id: unknown }, - b: { position: unknown; id: unknown }, + a: Pick, + b: Pick, ): number => { - const pa = a.position as string; - const pb = b.position as string; + const pa = a.position; + const pb = b.position; if (pa < pb) return -1; if (pa > pb) return 1; - const ia = a.id as string; - const ib = b.id as string; + const ia = a.id; + const ib = b.id; return ia < ib ? -1 : ia > ib ? 1 : 0; }; export const resolveToolPolicy = ( toolId: string, policies: readonly ToolPolicyRow[], - scopeRank: (row: { scope_id: unknown }) => number, + scopeRank: (row: Pick) => number, ): PolicyMatch | undefined => { if (policies.length === 0) return undefined; const sorted = [...policies].sort((a, b) => { @@ -159,11 +159,11 @@ export const resolveToolPolicy = ( return comparePolicyRow(a, b); }); for (const row of sorted) { - if (matchPattern(row.pattern as string, toolId)) { + if (matchPattern(row.pattern, toolId)) { return { action: row.action as ToolPolicyAction, - pattern: row.pattern as string, - policyId: row.id as string, + pattern: row.pattern, + policyId: row.id, }; } } @@ -201,7 +201,7 @@ const liftUser = (match: PolicyMatch): EffectivePolicy => ({ export const resolveEffectivePolicy = ( toolId: string, policies: readonly ToolPolicyRow[], - scopeRank: (row: { scope_id: unknown }) => number, + scopeRank: (row: Pick) => number, defaultRequiresApproval?: boolean, ): EffectivePolicy => { const match = resolveToolPolicy(toolId, policies, scopeRank); @@ -231,13 +231,13 @@ export const effectivePolicyFromSorted = ( // --------------------------------------------------------------------------- export const rowToToolPolicy = (row: ToolPolicyRow): ToolPolicy => ({ - id: PolicyId.make(row.id as string), - scopeId: ScopeId.make(row.scope_id as string), - pattern: row.pattern as string, + id: PolicyId.make(row.id), + scopeId: ScopeId.make(row.scope_id), + pattern: row.pattern, action: row.action as ToolPolicyAction, - position: row.position as string, - createdAt: row.created_at as Date, - updatedAt: row.updated_at as Date, + position: row.position, + createdAt: row.created_at, + updatedAt: row.updated_at, }); // --------------------------------------------------------------------------- From 74119c1354b221b18aff5e500920b95df063a0b3 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:21:52 -0700 Subject: [PATCH 015/108] Use typed failures in OpenAPI multi-scope tests --- .../src/sdk/multi-scope-bearer.test.ts | 35 ++++++++++--------- .../openapi/src/sdk/multi-scope-oauth.test.ts | 32 ++++++++++------- 2 files changed, 39 insertions(+), 28 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/multi-scope-bearer.test.ts b/packages/plugins/openapi/src/sdk/multi-scope-bearer.test.ts index 65ac05f12..ebf965728 100644 --- a/packages/plugins/openapi/src/sdk/multi-scope-bearer.test.ts +++ b/packages/plugins/openapi/src/sdk/multi-scope-bearer.test.ts @@ -32,6 +32,7 @@ import { ScopeId, SecretId, SetSecretInput, + ToolInvocationError, type InvokeOptions, type SecretProvider, } from "@executor-js/sdk"; @@ -170,7 +171,7 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { // ------------------------------------------------------------- yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "vercel", baseUrl, headers: { @@ -344,7 +345,7 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "vercel", baseUrl, headers: { @@ -481,7 +482,7 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "vercel", baseUrl, headers: { @@ -553,9 +554,9 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { yield* aliceExec.openapi.removeSourceBinding( "vercel", - orgScope.id as string, + String(orgScope.id), "auth:token", - aliceScope.id as string, + String(aliceScope.id), ); const fallbackResult = (yield* aliceExec.tools.invoke( @@ -579,10 +580,10 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { }), ); - yield* adminExec.openapi.removeSpec("vercel", orgScope.id as string); + yield* adminExec.openapi.removeSpec("vercel", String(orgScope.id)); yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "vercel", baseUrl, headers: { @@ -596,16 +597,18 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { const bindingsAfterReadd = yield* aliceExec.openapi.listSourceBindings( "vercel", - orgScope.id as string, + String(orgScope.id), ); expect(bindingsAfterReadd).toEqual([]); const error = yield* Effect.flip( aliceExec.tools.invoke("vercel.projects.list", {}, autoApprove), ); - expect((error as { _tag: string })._tag).toBe("ToolInvocationError"); - expect((error as { message: string }).message).toContain( - 'Missing binding for header "Authorization"', + expect(error).toBeInstanceOf(ToolInvocationError); + expect(error).toEqual( + expect.objectContaining({ + message: expect.stringContaining('Missing binding for header "Authorization"'), + }), ); }), ); @@ -657,20 +660,20 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "vercel", baseUrl: "https://api.vercel.example", }); yield* aliceExec.openapi.addSpec({ spec: specJson, - scope: aliceScope.id as string, + scope: String(aliceScope.id), namespace: "vercel", }); const source = yield* aliceExec.openapi.getSource( "vercel", - aliceScope.id as string, + String(aliceScope.id), ); expect(source?.scope).toBe(aliceScope.id); expect(source?.config.baseUrl).toBe("https://api.vercel.example"); @@ -739,7 +742,7 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "vercel", baseUrl, headers: { @@ -752,7 +755,7 @@ layer(TestLayer)("OpenAPI multi-scope bearer (Vercel-style)", (it) => { }); yield* aliceExec.openapi.addSpec({ spec: specJson, - scope: aliceScope.id as string, + scope: String(aliceScope.id), namespace: "vercel", }); diff --git a/packages/plugins/openapi/src/sdk/multi-scope-oauth.test.ts b/packages/plugins/openapi/src/sdk/multi-scope-oauth.test.ts index 345a571ce..0dc460a75 100644 --- a/packages/plugins/openapi/src/sdk/multi-scope-oauth.test.ts +++ b/packages/plugins/openapi/src/sdk/multi-scope-oauth.test.ts @@ -9,7 +9,7 @@ // --------------------------------------------------------------------------- import { afterEach, expect, layer } from "@effect/vitest"; -import { Effect, Layer, Schema } from "effect"; +import { Data, Effect, Layer, Predicate, Schema } from "effect"; import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"; import { FetchHttpClient, HttpRouter, HttpServer, HttpServerRequest } from "effect/unstable/http"; import * as NodeHttpServer from "@effect/platform-node/NodeHttpServer"; @@ -33,6 +33,10 @@ import { OAuth2Auth } from "./types"; const autoApprove: InvokeOptions = { onElicitation: "accept-all" }; +class TestInvariantError extends Data.TaggedError("TestInvariantError")<{ + readonly message: string; +}> {} + // --------------------------------------------------------------------------- // Test API — a single endpoint that echoes the Authorization header so the // test can assert which user's token got injected. @@ -137,7 +141,9 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { const clientLayer = FetchHttpClient.layer; const server = yield* HttpServer.HttpServer; const address = server.address; - if (address._tag !== "TcpAddress") return yield* Effect.die("test server must bind to TCP"); + if (!Predicate.isTagged(address, "TcpAddress")) { + return yield* new TestInvariantError({ message: "test server must bind to TCP" }); + } const baseUrl = `http://127.0.0.1:${address.port}`; const plugins = [ openApiPlugin({ httpClientLayer: clientLayer }), @@ -257,10 +263,10 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { ); const bobStart = yield* startAuthorizationCode(bobExec, startInputFor("bob", bobScope.id)); if (aliceStart.authorizationUrl === null) { - throw new Error("expected authorizationCode flow for alice"); + return yield* new TestInvariantError({ message: "expected authorizationCode flow for alice" }); } if (bobStart.authorizationUrl === null) { - throw new Error("expected authorizationCode flow for bob"); + return yield* new TestInvariantError({ message: "expected authorizationCode flow for bob" }); } const aliceAuth = yield* aliceExec.oauth.complete({ @@ -306,14 +312,14 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { // ------------------------------------------------------------- yield* aliceExec.openapi.addSpec({ spec: specJson, - scope: aliceScope.id as string, + scope: String(aliceScope.id), namespace: "petstore", baseUrl, oauth2: aliceOAuth2Auth, }); yield* bobExec.openapi.addSpec({ spec: specJson, - scope: bobScope.id as string, + scope: String(bobScope.id), namespace: "petstore", baseUrl, oauth2: bobOAuth2Auth, @@ -351,7 +357,7 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { expect(String(bobConn?.scopeId)).toBe("user-bob"); const adminConnectionIds = new Set( - (yield* adminExec.connections.list()).map((c) => c.id as string), + (yield* adminExec.connections.list()).map((c) => String(c.id)), ); expect(adminConnectionIds).not.toContain(String(aliceAuth.connectionId)); expect(adminConnectionIds).not.toContain(String(bobAuth.connectionId)); @@ -406,7 +412,9 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { const clientLayer = FetchHttpClient.layer; const server = yield* HttpServer.HttpServer; const address = server.address; - if (address._tag !== "TcpAddress") return yield* Effect.die("test server must bind to TCP"); + if (!Predicate.isTagged(address, "TcpAddress")) { + return yield* new TestInvariantError({ message: "test server must bind to TCP" }); + } const baseUrl = `http://127.0.0.1:${address.port}`; const plugins = [ openApiPlugin({ httpClientLayer: clientLayer }), @@ -541,7 +549,7 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { endpoint: input.tokenUrl, redirectUrl: input.tokenUrl, connectionId: input.connectionId, - tokenScope: tokenScope as string, + tokenScope: String(tokenScope), pluginId: "openapi", identityLabel: `${input.displayName} OAuth`, strategy: { @@ -553,7 +561,7 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { }, }); if (!started.completedConnection) { - throw new Error("expected clientCredentials flow"); + return yield* new TestInvariantError({ message: "expected clientCredentials flow" }); } return new OAuth2Auth({ kind: "oauth2", @@ -575,7 +583,7 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { const adminAuth = yield* startClientCredentials(adminExec, orgScope.id, startInput); yield* adminExec.openapi.addSpec({ spec: specJson, - scope: orgScope.id as string, + scope: String(orgScope.id), namespace: "petstore", baseUrl, oauth2: adminAuth, @@ -624,7 +632,7 @@ layer(TestLayer)("OpenAPI multi-scope OAuth", (it) => { // (4) Each user's invocation resolves their OWN row and gets // their OWN token — not whatever the last signer happened to // mint. This is the core multi-user regression. - yield* aliceExec.openapi.updateSource("petstore", orgScope.id as string, { + yield* aliceExec.openapi.updateSource("petstore", String(orgScope.id), { oauth2: aliceAuth, }); const aliceResult = (yield* aliceExec.tools.invoke( From f9da4f3a90a4159051260b76190d1ab101751c14 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:23:40 -0700 Subject: [PATCH 016/108] Use typed boundaries in cloud MCP e2e --- apps/cloud/src/mcp-miniflare.e2e.node.test.ts | 226 ++++++++++++------ 1 file changed, 148 insertions(+), 78 deletions(-) diff --git a/apps/cloud/src/mcp-miniflare.e2e.node.test.ts b/apps/cloud/src/mcp-miniflare.e2e.node.test.ts index 63b0e33d4..45b23cbc5 100644 --- a/apps/cloud/src/mcp-miniflare.e2e.node.test.ts +++ b/apps/cloud/src/mcp-miniflare.e2e.node.test.ts @@ -24,10 +24,16 @@ import { resolve } from "node:path"; import { createServer } from "node:http"; import type { AddressInfo } from "node:net"; -import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"; +import { + HttpApi, + HttpApiBuilder, + HttpApiEndpoint, + HttpApiGroup, + OpenApi, +} from "effect/unstable/httpapi"; import { HttpRouter, HttpServer } from "effect/unstable/http"; import * as NodeHttpServer from "@effect/platform-node/NodeHttpServer"; -import { Context, Effect, Layer, Schema } from "effect"; +import { Context, Data, Effect, Layer, Option, Predicate, Schema } from "effect"; import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; @@ -69,7 +75,9 @@ const UpstreamServeLayer = HttpRouter.serve(UpstreamApiLive).pipe( // Services // --------------------------------------------------------------------------- -class Upstream extends Context.Service()("MiniflareE2E/Upstream") {} class Worker extends Context.Service< @@ -100,13 +108,21 @@ class TelemetryReceiver extends Context.Service< } >()("MiniflareE2E/TelemetryReceiver") {} +class MiniflareE2ETestError extends Data.TaggedError("MiniflareE2ETestError")<{ + readonly message: string; + readonly cause?: unknown; +}> {} + const UpstreamLive = Layer.effect( Upstream, Effect.gen(function* () { const server = yield* HttpServer.HttpServer; const addr = server.address; - if (addr._tag !== "TcpAddress") { - return yield* Effect.die(`upstream server bound to non-TCP: ${addr._tag}`); + if (!Predicate.isTagged("TcpAddress")(addr)) { + return yield* new MiniflareE2ETestError({ + message: "upstream server bound to non-TCP address", + cause: addr, + }); } const url = `http://127.0.0.1:${addr.port}`; const specJson = JSON.stringify({ @@ -127,25 +143,48 @@ const UpstreamLive = Layer.effect( // reported the expected spans, not just that the exporter was called. // --------------------------------------------------------------------------- -type OtlpAttributeValue = { - readonly stringValue?: string; - readonly intValue?: string | number; - readonly doubleValue?: number; - readonly boolValue?: boolean; -}; -type OtlpAttribute = { readonly key: string; readonly value?: OtlpAttributeValue }; -type OtlpSpan = { - readonly name: string; - readonly traceId?: string; - readonly spanId?: string; - readonly parentSpanId?: string; - readonly attributes?: ReadonlyArray; -}; -type OtlpPayload = { - readonly resourceSpans?: ReadonlyArray<{ - readonly scopeSpans?: ReadonlyArray<{ readonly spans?: ReadonlyArray }>; - }>; -}; +const OtlpAttributeValue = Schema.Struct({ + stringValue: Schema.optional(Schema.String), + intValue: Schema.optional(Schema.Union([Schema.String, Schema.Number])), + doubleValue: Schema.optional(Schema.Number), + boolValue: Schema.optional(Schema.Boolean), +}); +type OtlpAttributeValue = typeof OtlpAttributeValue.Type; + +const OtlpPayloadFromJson = Schema.fromJsonString( + Schema.Struct({ + resourceSpans: Schema.optional( + Schema.Array( + Schema.Struct({ + scopeSpans: Schema.optional( + Schema.Array( + Schema.Struct({ + spans: Schema.optional( + Schema.Array( + Schema.Struct({ + name: Schema.String, + traceId: Schema.optional(Schema.String), + spanId: Schema.optional(Schema.String), + parentSpanId: Schema.optional(Schema.String), + attributes: Schema.optional( + Schema.Array( + Schema.Struct({ + key: Schema.String, + value: Schema.optional(OtlpAttributeValue), + }), + ), + ), + }), + ), + ), + }), + ), + ), + }), + ), + ), + }), +); const unwrapAttrValue = (v?: OtlpAttributeValue): unknown => { if (!v) return undefined; @@ -156,7 +195,8 @@ const unwrapAttrValue = (v?: OtlpAttributeValue): unknown => { return undefined; }; -const TelemetryReceiverLive = Layer.effect(TelemetryReceiver)(Effect.acquireRelease( +const TelemetryReceiverLive = Layer.effect(TelemetryReceiver)( + Effect.acquireRelease( Effect.callback< { readonly tracesUrl: string; @@ -178,8 +218,9 @@ const TelemetryReceiverLive = Layer.effect(TelemetryReceiver)(Effect.acquireRele body += chunk; }); req.on("end", () => { - try { - const payload = JSON.parse(body) as OtlpPayload; + const maybePayload = Schema.decodeUnknownOption(OtlpPayloadFromJson)(body); + if (Option.isSome(maybePayload)) { + const payload = maybePayload.value; for (const rs of payload.resourceSpans ?? []) { for (const ss of rs.scopeSpans ?? []) { for (const sp of ss.spans ?? []) { @@ -197,8 +238,6 @@ const TelemetryReceiverLive = Layer.effect(TelemetryReceiver)(Effect.acquireRele } } } - } catch { - // ignore malformed payloads } res.writeHead(200, { "content-type": "application/json" }); res.end("{}"); @@ -221,24 +260,33 @@ const TelemetryReceiverLive = Layer.effect(TelemetryReceiver)(Effect.acquireRele Effect.map((t) => ({ tracesUrl: t.tracesUrl, spans: () => [...t.store], - waitForSpan: async (predicate: (s: CapturedSpan) => boolean, timeoutMs = 5_000) => { - const deadline = Date.now() + timeoutMs; - for (;;) { - const hit = t.store.find(predicate); - if (hit) return hit; - if (Date.now() > deadline) { - throw new Error( - `Timed out waiting for span. Captured ${t.store.length}: ${t.store.map((s) => s.name).join(", ") || ""}`, - ); - } - await new Promise((r) => setTimeout(r, 50)); - } - }, + waitForSpan: (predicate: (s: CapturedSpan) => boolean, timeoutMs = 5_000) => + Effect.gen(function* () { + const poll = Effect.gen(function* () { + for (;;) { + const hit = t.store.find(predicate); + if (hit) return hit; + yield* Effect.sleep("50 millis"); + } + }); + return yield* poll.pipe( + Effect.timeoutOrElse({ + duration: `${timeoutMs} millis`, + orElse: () => + Effect.fail( + new MiniflareE2ETestError({ + message: `Timed out waiting for span. Captured ${t.store.length}: ${t.store.map((s) => s.name).join(", ") || ""}`, + }), + ), + }), + ); + }).pipe(Effect.runPromise), })), ), ); -const WorkerLive = Layer.effect(Worker)(Effect.gen(function* () { +const WorkerLive = Layer.effect(Worker)( + Effect.gen(function* () { const receiver = yield* TelemetryReceiver; // AXIOM_TOKEN activates DoTelemetryLive inside the worker; AXIOM_TRACES_URL // redirects the exporter at our in-process OTLP/JSON receiver so spans @@ -267,7 +315,13 @@ const WorkerLive = Layer.effect(Worker)(Effect.gen(function* () { body: JSON.stringify({ id, name }), }); if (res.status !== 204) { - throw new Error(`seed-org failed: ${res.status} ${await res.text()}`); + return Effect.runPromise( + Effect.fail( + new MiniflareE2ETestError({ + message: `seed-org failed: ${res.status} ${await res.text()}`, + }), + ), + ); } }, })), @@ -306,6 +360,41 @@ const connectClient = async ( return client; }; +const ignoreCancelBody = (body: ReadableStream | null): Effect.Effect => + body + ? Effect.ignore( + Effect.tryPromise({ + try: () => body.cancel(), + catch: (cause) => + new MiniflareE2ETestError({ message: "Failed to cancel response body", cause }), + }), + ) + : Effect.void; + +const ignoreCancelReader = ( + reader: ReadableStreamDefaultReader | undefined, +): Effect.Effect => + reader + ? Effect.ignore( + Effect.tryPromise({ + try: () => reader.cancel(), + catch: (cause) => + new MiniflareE2ETestError({ message: "Failed to cancel response reader", cause }), + }), + ) + : Effect.void; + +const withTestTimeout = ( + self: Effect.Effect, + message: string, +): Effect.Effect => + self.pipe( + Effect.timeoutOrElse({ + duration: "5 seconds", + orElse: () => Effect.fail(new MiniflareE2ETestError({ message })), + }), + ); + const initializeSession = async (baseUrl: URL, bearer: string): Promise => { const response = await fetch(new URL("/mcp", baseUrl), { method: "POST", @@ -473,12 +562,8 @@ layer(TestEnv, { timeout: 60_000 })("cloud MCP over real HTTP (miniflare)", (it) expect(second.status).toBe(200); expect(second.headers.get("content-type") ?? "").toContain("text/event-stream"); - yield* Effect.promise(async () => { - await first.body?.cancel().catch(() => undefined); - }); - yield* Effect.promise(async () => { - await second.body?.cancel().catch(() => undefined); - }); + yield* ignoreCancelBody(first.body); + yield* ignoreCancelBody(second.body); }), 30_000, ); @@ -523,9 +608,7 @@ layer(TestEnv, { timeout: 60_000 })("cloud MCP over real HTTP (miniflare)", (it) ); expect(firstRead).toBe("open"); - yield* Effect.promise(async () => { - await firstReader?.cancel().catch(() => undefined); - }); + yield* ignoreCancelReader(firstReader); }), 30_000, ); @@ -586,16 +669,9 @@ layer(TestEnv, { timeout: 60_000 })("cloud MCP over real HTTP (miniflare)", (it) return responses; }); - const response = yield* Effect.promise(() => - Promise.race([ - postResult, - new Promise((_, reject) => - setTimeout( - () => reject(new Error("tools/call did not return during SSE churn")), - 5_000, - ), - ), - ]), + const response = yield* withTestTimeout( + Effect.promise(() => postResult), + "tools/call did not return during SSE churn", ); expect(response.status).toBe(200); const body = (yield* Effect.promise(() => response.json())) as { @@ -608,11 +684,12 @@ layer(TestEnv, { timeout: 60_000 })("cloud MCP over real HTTP (miniflare)", (it) expect(body.error).toBeUndefined(); expect(body.result).toBeDefined(); - yield* Effect.promise(async () => { - await Promise.all( - reconnects.map((response) => response.body?.cancel().catch(() => undefined)), - ); - }); + yield* Effect.all( + reconnects.map((response) => ignoreCancelBody(response.body)), + { + concurrency: "unbounded", + }, + ); }), 30_000, ); @@ -653,16 +730,9 @@ layer(TestEnv, { timeout: 60_000 })("cloud MCP over real HTTP (miniflare)", (it) yield* Effect.promise(() => new Promise((resolve) => setTimeout(resolve, 50))); const second = postExecute('return "second";'); - const responses = yield* Effect.promise(() => - Promise.race([ - Promise.all([first, second]), - new Promise((_, reject) => - setTimeout( - () => reject(new Error("overlapping tools/call requests did not both return")), - 5_000, - ), - ), - ]), + const responses = yield* withTestTimeout( + Effect.promise(() => Promise.all([first, second])), + "overlapping tools/call requests did not both return", ); expect(responses.map((response) => response.status)).toEqual([200, 200]); From 1d616f61e5d4630d3c7b9416aec60f4314e27631 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:28:26 -0700 Subject: [PATCH 017/108] Use typed keychain failures --- packages/plugins/keychain/src/keyring.ts | 35 +++++++++++++----------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/packages/plugins/keychain/src/keyring.ts b/packages/plugins/keychain/src/keyring.ts index 838cb7e6d..aeb8ad855 100644 --- a/packages/plugins/keychain/src/keyring.ts +++ b/packages/plugins/keychain/src/keyring.ts @@ -54,20 +54,23 @@ const loadEntryCtor = async (): Promise => { }; const loadEntry = (): Effect.Effect => - Effect.tryPromise({ - try: async () => { - if (!isSupportedPlatform()) { - throw new Error(`unsupported platform '${process.platform}'`); - } - entryCtorPromise ??= loadEntryCtor(); - return await entryCtorPromise; - }, - catch: (cause) => - new KeychainError({ - message: `Failed loading native keyring: ${cause instanceof Error ? cause.message : String(cause)}`, - cause, - }), - }); + isSupportedPlatform() + ? Effect.tryPromise({ + try: async () => { + entryCtorPromise ??= loadEntryCtor(); + return await entryCtorPromise; + }, + catch: (cause) => + new KeychainError({ + message: "Failed loading native keyring", + cause, + }), + }) + : Effect.fail( + new KeychainError({ + message: `Failed loading native keyring: unsupported platform '${process.platform}'`, + }), + ); const createEntry = (serviceName: string, account: string) => Effect.flatMap(loadEntry(), (Entry) => @@ -75,7 +78,7 @@ const createEntry = (serviceName: string, account: string) => try: () => new Entry(serviceName, account), catch: (cause) => new KeychainError({ - message: `Failed creating keyring entry: ${cause instanceof Error ? cause.message : String(cause)}`, + message: "Failed creating keyring entry", cause, }), }), @@ -106,7 +109,7 @@ export const setPassword = ( try: () => entry.setPassword(value), catch: (cause) => new KeychainError({ - message: `Failed writing secret: ${cause instanceof Error ? cause.message : String(cause)}`, + message: "Failed writing secret", cause, }), }).pipe(Effect.asVoid), From 9c6a5c05534146ce2ffd51d313799e9486ea8048 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:28:45 -0700 Subject: [PATCH 018/108] Use Effect assertions in executor tests --- packages/core/sdk/src/executor.test.ts | 44 ++++++++++++++------------ 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/packages/core/sdk/src/executor.test.ts b/packages/core/sdk/src/executor.test.ts index f9fc5b54d..2d286e7d4 100644 --- a/packages/core/sdk/src/executor.test.ts +++ b/packages/core/sdk/src/executor.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Effect, Result } from "effect"; +import { Data, Effect, Exit, Predicate, Result } from "effect"; import { makeMemoryAdapter } from "@executor-js/storage-core/testing/memory"; import type { DBAdapter, Where } from "@executor-js/storage-core"; @@ -25,6 +25,10 @@ type FindManyCall = { readonly where?: readonly Where[]; }; +class TestPluginError extends Data.TaggedError("TestPluginError")<{ + readonly message: string; +}> {} + const recordFindMany = ( adapter: DBAdapter, calls: FindManyCall[], @@ -138,7 +142,9 @@ const testPlugin = definePlugin(() => ({ yield* ctx.storage.writeThing(thingId, value); return { ok: true }; } - return yield* Effect.fail(new Error(`unknown tool ${toolRow.id}`)); + return yield* new TestPluginError({ + message: `unknown tool ${toolRow.id}`, + }); }), // Derived annotations: `write` gates on approval, `read` doesn't. @@ -361,9 +367,7 @@ describe("createExecutor", () => { }, ) .pipe(Effect.flip); - expect((declined as { _tag: string })._tag).toBe( - "ElicitationDeclinedError", - ); + expect(Predicate.isTagged(declined, "ElicitationDeclinedError")).toBe(true); // auto-accept → succeeds const accepted = yield* executor.tools.invoke( @@ -446,9 +450,7 @@ describe("createExecutor", () => { const err = yield* executor.sources .remove("test.control") .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe( - "SourceRemovalNotAllowedError", - ); + expect(Predicate.isTagged(err, "SourceRemovalNotAllowedError")).toBe(true); }), ); @@ -538,7 +540,7 @@ describe("createExecutor", () => { // translating it to the opaque `InternalError({ traceId })` when // crossing the wire; here, at the SDK layer, we expect the raw tag. const err = yield* executor.collide.tryRegister().pipe(Effect.flip); - expect(err._tag).toBe("StorageError"); + expect(Predicate.isTagged(err, "StorageError")).toBe(true); }), ); @@ -599,7 +601,7 @@ describe("createExecutor", () => { canRemove: true, tools: [{ name: "t", description: "t" }], }); - return yield* Effect.fail(new Error("boom")); + return yield* new TestPluginError({ message: "boom" }); }), ), countThings: () => ctx.storage.countThings(), @@ -684,11 +686,9 @@ describe("createExecutor", () => { const leaked = yield* executor.secrets .get("conn-owned.access_token") - .pipe(Effect.result); - expect(Result.isFailure(leaked)).toBe(true); - if (!Result.isFailure(leaked)) return; - expect((leaked.failure as { _tag?: string })._tag).toBe( - "SecretOwnedByConnectionError", + .pipe(Effect.flip); + expect(Predicate.isTagged(leaked, "SecretOwnedByConnectionError")).toBe( + true, ); const status = yield* executor.secrets.status("conn-owned.access_token"); @@ -709,7 +709,7 @@ describe("createExecutor", () => { const err = yield* executor.tools .invoke("does.not.exist", {}, { onElicitation: "accept-all" }) .pipe(Effect.flip); - expect((err as { _tag: string })._tag).toBe("ToolNotFoundError"); + expect(Predicate.isTagged(err, "ToolNotFoundError")).toBe(true); }), ); @@ -821,7 +821,9 @@ describe("createExecutor", () => { {}, { onElicitation: (ctx) => { - expect(ctx.request._tag).toBe("FormElicitation"); + expect(Predicate.isTagged(ctx.request, "FormElicitation")).toBe( + true, + ); return Effect.succeed( new ElicitationResponse({ action: "accept", @@ -879,7 +881,9 @@ describe("createExecutor", () => { {}, { onElicitation: (ctx) => { - expect(ctx.request._tag).toBe("UrlElicitation"); + expect(Predicate.isTagged(ctx.request, "UrlElicitation")).toBe( + true, + ); return Effect.succeed( new ElicitationResponse({ action: "accept", @@ -1178,7 +1182,7 @@ describe("tenant isolation (SDK)", () => { }), ), ); - expect(result._tag).toBe("Failure"); + expect(Exit.isFailure(result)).toBe(true); }), ); @@ -1484,7 +1488,7 @@ const invokeMarkerPlugin = definePlugin(() => ({ invokeTool: ({ toolRow }) => Effect.succeed({ marker: toolRow.description, - scope: toolRow.scope_id as string, + scope: toolRow.scope_id, }), })); From 83554b77e155861347cb41ab93606a0afdc49783 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:28:57 -0700 Subject: [PATCH 019/108] Keep tool invoker failures typed --- packages/core/execution/src/tool-invoker.ts | 105 +++++++++++++------- 1 file changed, 71 insertions(+), 34 deletions(-) diff --git a/packages/core/execution/src/tool-invoker.ts b/packages/core/execution/src/tool-invoker.ts index 2abc15266..5c172ade7 100644 --- a/packages/core/execution/src/tool-invoker.ts +++ b/packages/core/execution/src/tool-invoker.ts @@ -1,4 +1,4 @@ -import { Effect } from "effect"; +import { Effect, Predicate } from "effect"; import * as Cause from "effect/Cause"; import type { Executor, @@ -23,14 +23,6 @@ const extractSourceNamespace = (path: string): string => { return idx === -1 ? path : path.slice(0, idx); }; -const stringifyUnknown = (value: unknown): string => { - try { - return JSON.stringify(value) ?? String(value); - } catch { - return String(value); - } -}; - const hasStringMessage = (value: unknown): value is { readonly message: string } => value !== null && typeof value === "object" && @@ -38,7 +30,7 @@ const hasStringMessage = (value: unknown): value is { readonly message: string } typeof value.message === "string"; const messageFromErrorLike = (value: unknown): string | undefined => { - if (value instanceof Error || hasStringMessage(value)) { + if (hasStringMessage(value)) { return value.message; } return undefined; @@ -46,7 +38,36 @@ const messageFromErrorLike = (value: unknown): string | undefined => { const renderToolErrorMessage = (error: unknown): string => messageFromErrorLike(error) ?? - (typeof error === "undefined" ? "Tool execution failed" : stringifyUnknown(error)); + (typeof error === "undefined" ? "Tool execution failed" : renderUnknownPrimitive(error)); + +const renderUnknownPrimitive = (value: unknown): string => { + switch (typeof value) { + case "string": + return value; + case "number": + case "boolean": + case "bigint": + case "symbol": + return value.toString(); + default: + return "Tool execution failed"; + } +}; + +type ToolResultEnvelope = { + readonly error?: unknown; + readonly data?: unknown; +}; + +const isToolResultEnvelope = (value: unknown): value is ToolResultEnvelope => + value !== null && + typeof value === "object" && + ("error" in value || "data" in value); + +const hasToolResultError = ( + value: ToolResultEnvelope, +): value is ToolResultEnvelope & { readonly error: unknown } => + value.error !== null && value.error !== undefined; /** * Bridges QuickJS `tools.someSource.someOp(args)` calls into @@ -90,36 +111,28 @@ export const makeExecutorToolInvoker = ( ); }), ); - const r = result as { readonly error?: unknown; readonly data?: unknown } | unknown; - if ( - r !== null && - typeof r === "object" && - "error" in r && - (r as { error?: unknown }).error !== null && - (r as { error?: unknown }).error !== undefined - ) { - const error = (r as { error: unknown }).error; - return yield* Effect.fail( - new ExecutionToolError({ - message: renderToolErrorMessage(error), - cause: error, - }), - ); + if (!isToolResultEnvelope(result)) { + return result; + } + if (hasToolResultError(result)) { + return yield* new ExecutionToolError({ + message: renderToolErrorMessage(result.error), + cause: result.error, + }); } - if (r !== null && typeof r === "object" && "data" in r) { - return (r as { data: unknown }).data; + if ("data" in result) { + return result.data; } - return r; + return result; }), }); const isElicitationDeclinedError = ( value: unknown, ): value is { readonly _tag: "ElicitationDeclinedError"; readonly toolId: string; readonly action: "cancel" | "decline" } => + Predicate.isTagged(value, "ElicitationDeclinedError") && value !== null && typeof value === "object" && - "_tag" in value && - value._tag === "ElicitationDeclinedError" && "toolId" in value && typeof value.toolId === "string" && "action" in value && @@ -383,7 +396,15 @@ export const searchTools = Effect.fn("executor.tools.search")(function* ( return empty; } - const all = yield* executor.tools.list({ includeAnnotations: false }).pipe(Effect.orDie); + const all = yield* executor.tools.list({ includeAnnotations: false }).pipe( + Effect.mapError( + (cause) => + new ExecutionToolError({ + message: "Failed to list tools for search", + cause, + }), + ), + ); const ranked = all .filter((tool: Tool) => matchesNamespace(tool, options?.namespace)) .map((tool: Tool) => scoreToolMatch(tool, query)) @@ -413,7 +434,15 @@ export const listExecutorSources = Effect.fn("executor.sources.list")(function* const normalizedQuery = normalizeSearchText(options?.query ?? ""); const limit = options?.limit ?? 50; const offset = options?.offset ?? 0; - const sources = yield* executor.sources.list().pipe(Effect.orDie); + const sources = yield* executor.sources.list().pipe( + Effect.mapError( + (cause) => + new ExecutionToolError({ + message: "Failed to list executor sources", + cause, + }), + ), + ); const filtered = normalizedQuery.length === 0 @@ -424,7 +453,15 @@ export const listExecutorSources = Effect.fn("executor.sources.list")(function* }); // Single query for all tools, then count per source in memory. - const allTools = yield* executor.tools.list({ includeAnnotations: false }).pipe(Effect.orDie); + const allTools = yield* executor.tools.list({ includeAnnotations: false }).pipe( + Effect.mapError( + (cause) => + new ExecutionToolError({ + message: "Failed to list tools for source counts", + cause, + }), + ), + ); const toolCountBySource = new Map(); for (const tool of allTools) { toolCountBySource.set(tool.sourceId, (toolCountBySource.get(tool.sourceId) ?? 0) + 1); From 2cae07e5c443778ef3e994b24ec29bc51bde2a4b Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:29:09 -0700 Subject: [PATCH 020/108] Use Effect assertions in OpenAPI plugin tests --- .../plugins/openapi/src/sdk/plugin.test.ts | 48 ++++++++++--------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/plugin.test.ts b/packages/plugins/openapi/src/sdk/plugin.test.ts index 670605876..96896db88 100644 --- a/packages/plugins/openapi/src/sdk/plugin.test.ts +++ b/packages/plugins/openapi/src/sdk/plugin.test.ts @@ -1,5 +1,5 @@ import { expect, layer } from "@effect/vitest"; -import { Effect, Layer, Schema } from "effect"; +import { Effect, Layer, Predicate, Schema } from "effect"; import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"; import { HttpClient, HttpRouter, HttpServerRequest } from "effect/unstable/http"; import * as NodeHttpServer from "@effect/platform-node/NodeHttpServer"; @@ -373,8 +373,10 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { executor.tools.invoke("noauth.items.listItems", {}, autoApprove), ); - expect((error as { _tag: string })._tag).toBe("ToolInvocationError"); - expect((error as { message: string }).message).toContain("missing-token"); + expect(Predicate.isTagged(error, "ToolInvocationError")).toBe(true); + expect(error).toMatchObject({ + message: expect.stringContaining("missing-token"), + }); }), ); @@ -606,7 +608,7 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { // Org-level base source yield* executor.openapi.addSpec({ spec: specJson, - scope: ORG_SCOPE as string, + scope: String(ORG_SCOPE), namespace: "shared", baseUrl: "", name: "Org Source", @@ -615,20 +617,20 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { // Per-user shadow with the same namespace yield* executor.openapi.addSpec({ spec: specJson, - scope: USER_SCOPE as string, + scope: String(USER_SCOPE), namespace: "shared", name: "User Source", }); - const userView = yield* executor.openapi.getSource("shared", USER_SCOPE as string); - const orgView = yield* executor.openapi.getSource("shared", ORG_SCOPE as string); + const userView = yield* executor.openapi.getSource("shared", String(USER_SCOPE)); + const orgView = yield* executor.openapi.getSource("shared", String(ORG_SCOPE)); // Both rows must coexist — innermost-wins reads come from the // executor; the store's scope-pinned getters return the exact row. expect(userView?.name).toBe("User Source"); - expect(userView?.scope).toBe(USER_SCOPE as string); + expect(userView?.scope).toBe(String(USER_SCOPE)); expect(orgView?.name).toBe("Org Source"); - expect(orgView?.scope).toBe(ORG_SCOPE as string); + expect(orgView?.scope).toBe(String(ORG_SCOPE)); }), ); @@ -652,20 +654,20 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { yield* executor.openapi.addSpec({ spec: specJson, - scope: ORG_SCOPE as string, + scope: String(ORG_SCOPE), namespace: "shared", baseUrl: "https://org.example.com", name: "Org Source", }); yield* executor.openapi.addSpec({ spec: specJson, - scope: USER_SCOPE as string, + scope: String(USER_SCOPE), namespace: "shared", name: "User Source", }); findManyCalls.length = 0; - const userView = yield* executor.openapi.getSource("shared", USER_SCOPE as string); + const userView = yield* executor.openapi.getSource("shared", String(USER_SCOPE)); expect(userView?.config.baseUrl).toBe("https://org.example.com"); expect(findManyCalls.some((call) => call.model === "openapi_source")).toBe(false); @@ -689,23 +691,23 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { yield* executor.openapi.addSpec({ spec: specJson, - scope: ORG_SCOPE as string, + scope: String(ORG_SCOPE), namespace: "shared", baseUrl: "", name: "Org Source", }); yield* executor.openapi.addSpec({ spec: specJson, - scope: USER_SCOPE as string, + scope: String(USER_SCOPE), namespace: "shared", baseUrl: "", name: "User Source", }); - yield* executor.openapi.removeSpec("shared", USER_SCOPE as string); + yield* executor.openapi.removeSpec("shared", String(USER_SCOPE)); - const userView = yield* executor.openapi.getSource("shared", USER_SCOPE as string); - const orgView = yield* executor.openapi.getSource("shared", ORG_SCOPE as string); + const userView = yield* executor.openapi.getSource("shared", String(USER_SCOPE)); + const orgView = yield* executor.openapi.getSource("shared", String(ORG_SCOPE)); expect(userView).toBeNull(); expect(orgView?.name).toBe("Org Source"); @@ -729,26 +731,26 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { yield* executor.openapi.addSpec({ spec: specJson, - scope: ORG_SCOPE as string, + scope: String(ORG_SCOPE), namespace: "shared", baseUrl: "https://org.example.com", name: "Org Source", }); yield* executor.openapi.addSpec({ spec: specJson, - scope: USER_SCOPE as string, + scope: String(USER_SCOPE), namespace: "shared", baseUrl: "https://user.example.com", name: "User Source", }); - yield* executor.openapi.updateSource("shared", USER_SCOPE as string, { + yield* executor.openapi.updateSource("shared", String(USER_SCOPE), { name: "User Renamed", baseUrl: "https://user-new.example.com", }); - const userView = yield* executor.openapi.getSource("shared", USER_SCOPE as string); - const orgView = yield* executor.openapi.getSource("shared", ORG_SCOPE as string); + const userView = yield* executor.openapi.getSource("shared", String(USER_SCOPE)); + const orgView = yield* executor.openapi.getSource("shared", String(ORG_SCOPE)); expect(userView?.name).toBe("User Renamed"); expect(userView?.config.baseUrl).toBe("https://user-new.example.com"); @@ -939,7 +941,7 @@ layer(TestLayer)("OpenAPI Plugin", (it) => { const failure = yield* executor.secrets .remove(SecretId.make("locked")) .pipe(Effect.flip); - expect((failure as { _tag: string })._tag).toBe("SecretInUseError"); + expect(Predicate.isTagged(failure, "SecretInUseError")).toBe(true); // Detach the binding, then remove succeeds. yield* executor.openapi.removeSourceBinding( From ff6dbd3860175c28a71d8c70acd4632991be9c9f Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:30:17 -0700 Subject: [PATCH 021/108] Use typed OAuth API boundaries --- .../src/services/sources-api.node.test.ts | 35 +++++++++--------- packages/core/api/src/handlers/oauth.ts | 36 ++++++++++++++----- 2 files changed, 45 insertions(+), 26 deletions(-) diff --git a/apps/cloud/src/services/sources-api.node.test.ts b/apps/cloud/src/services/sources-api.node.test.ts index 0e0f0d0cc..e196fc69f 100644 --- a/apps/cloud/src/services/sources-api.node.test.ts +++ b/apps/cloud/src/services/sources-api.node.test.ts @@ -4,7 +4,7 @@ // single org. import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Result, Schema } from "effect"; import http from "node:http"; import { readFileSync } from "node:fs"; import type { AddressInfo } from "node:net"; @@ -173,6 +173,11 @@ const GRAPHQL_INTROSPECTION_RESPONSE = { }, }; +const GraphqlRequestSchema = Schema.Struct({ + query: Schema.optional(Schema.String), + variables: Schema.optional(Schema.Record(Schema.String, Schema.Unknown)), +}); + const startGraphqlServer = () => { const requests: Array<{ readonly query: string; readonly variables: unknown }> = []; const server = http.createServer(async (req, res) => { @@ -182,10 +187,9 @@ const startGraphqlServer = () => { return; } - const parsed = JSON.parse(await readBody(req)) as { - readonly query?: string; - readonly variables?: Record; - }; + const parsed = await Schema.decodeUnknownPromise(Schema.fromJsonString(GraphqlRequestSchema))( + await readBody(req), + ); const query = parsed.query ?? ""; requests.push({ query, variables: parsed.variables ?? null }); @@ -411,9 +415,8 @@ describe("sources api (HTTP)", () => { }), ); - if (execution.status !== "completed") { - throw new Error(`Expected completed execution, got ${execution.status}`); - } + expect(execution.status).toBe("completed"); + if (execution.status !== "completed") return; expect(execution.isError).toBe(false); expect(execution.structured).toMatchObject({ status: "completed", @@ -448,7 +451,7 @@ describe("sources api (HTTP)", () => { }) .pipe(Effect.result), ); - expect(addResult._tag).toBe("Failure"); + expect(Result.isFailure(addResult)).toBe(true); const fetched = yield* asOrg(org, (client) => client.mcp.getSource({ params: { scopeId, namespace } }), @@ -518,9 +521,8 @@ describe("sources api (HTTP)", () => { }), ); - if (execution.status !== "completed") { - throw new Error(`Expected completed execution, got ${execution.status}`); - } + expect(execution.status).toBe("completed"); + if (execution.status !== "completed") return; expect(execution.isError).toBe(false); expect(execution.structured).toMatchObject({ status: "completed", @@ -587,9 +589,8 @@ describe("sources api (HTTP)", () => { }), ); - if (execution.status !== "completed") { - throw new Error(`Expected completed execution, got ${execution.status}`); - } + expect(execution.status).toBe("completed"); + if (execution.status !== "completed") return; expect(execution.isError).toBe(false); expect(execution.structured).toMatchObject({ status: "completed", @@ -635,7 +636,7 @@ describe("sources api (HTTP)", () => { .remove({ params: { scopeId: ScopeId.make(org), sourceId: ghost } }) .pipe(Effect.result), ); - expect(result._tag).toBe("Success"); + expect(Result.isSuccess(result)).toBe(true); }), ); @@ -651,7 +652,7 @@ describe("sources api (HTTP)", () => { .remove({ params: { scopeId: ScopeId.make(org), sourceId: "openapi" } }) .pipe(Effect.result), ); - expect(result._tag).toBe("Failure"); + expect(Result.isFailure(result)).toBe(true); }), ); diff --git a/packages/core/api/src/handlers/oauth.ts b/packages/core/api/src/handlers/oauth.ts index 443a3d812..b850ea263 100644 --- a/packages/core/api/src/handlers/oauth.ts +++ b/packages/core/api/src/handlers/oauth.ts @@ -6,7 +6,7 @@ import { HttpApiBuilder } from "effect/unstable/httpapi"; import { HttpServerResponse } from "effect/unstable/http"; -import { Effect } from "effect"; +import { Effect, Option, Predicate, Schema } from "effect"; import { runOAuthCallback } from "../oauth-popup"; import { @@ -39,19 +39,35 @@ const resolveOAuthSecretBackedMap = makeError(`Secret not found for "${name}"`), }).pipe( Effect.mapError((error) => - error instanceof OAuthProbeError || error instanceof OAuthStartError + Predicate.isTagged(error, "OAuthProbeError") || Predicate.isTagged(error, "OAuthStartError") ? (error as E) : makeError("Secret resolution failed"), ), ); +const decodeOAuthStartError = Schema.decodeUnknownOption(OAuthStartError); +const decodeOAuthCompleteError = Schema.decodeUnknownOption(OAuthCompleteError); +const decodeOAuthProbeError = Schema.decodeUnknownOption(OAuthProbeError); +const decodeOAuthSessionNotFoundError = Schema.decodeUnknownOption(OAuthSessionNotFoundError); + +const getOAuthErrorMessage = ( + error: unknown, + decode: (input: unknown) => Option.Option, +): string | undefined => + Option.match(decode(error), { + onNone: () => undefined, + onSome: (oauthError) => oauthError.message, + }); + const toPopupErrorMessage = (error: unknown): string => { - if (error instanceof OAuthStartError) return error.message; - if (error instanceof OAuthCompleteError) return error.message; - if (error instanceof OAuthProbeError) return error.message; - if (error instanceof OAuthSessionNotFoundError) { - return `OAuth session not found: ${error.sessionId}`; - } + const message = + getOAuthErrorMessage(error, decodeOAuthStartError) ?? + getOAuthErrorMessage(error, decodeOAuthCompleteError) ?? + getOAuthErrorMessage(error, decodeOAuthProbeError); + if (message) return message; + + const sessionNotFound = decodeOAuthSessionNotFoundError(error); + if (Option.isSome(sessionNotFound)) return `OAuth session not found: ${sessionNotFound.value.sessionId}`; return "Authentication failed"; }; @@ -147,7 +163,9 @@ export const OAuthHandlers = HttpApiBuilder.group(ExecutorApi, "oauth", (handler Effect.tapError((cause) => Effect.logError("OAuth callback completion failed", cause), ), - Effect.catchCause(() => Effect.fail(new Error("Authentication failed"))), + Effect.catchCause(() => + Effect.fail(new OAuthCompleteError({ message: "Authentication failed" })), + ), ), urlParams, toErrorMessage: toPopupErrorMessage, From fdb0bcc1643398625bef0fac1145172bbf7965a1 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:31:51 -0700 Subject: [PATCH 022/108] Document WorkOS Vault test error boundary --- .../plugins/workos-vault/src/sdk/secret-store.test.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/plugins/workos-vault/src/sdk/secret-store.test.ts b/packages/plugins/workos-vault/src/sdk/secret-store.test.ts index ef0e12e08..c36a4c33e 100644 --- a/packages/plugins/workos-vault/src/sdk/secret-store.test.ts +++ b/packages/plugins/workos-vault/src/sdk/secret-store.test.ts @@ -100,9 +100,11 @@ const makeFakeClient = (options?: { readonly context: Record; }) => { if (options?.rejectNamesWithColon && name.includes(":")) { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors throw new FakeInvalidRequestError(`Invalid object name "${name}"`); } if (objects.has(name)) { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors throw new FakeConflictError(`Object "${name}" already exists`); } const id = nextId(); @@ -113,15 +115,18 @@ const makeFakeClient = (options?: { readObjectByName: async (name: string) => { if (options?.rejectNamesWithColon && name.includes(":")) { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors throw new FakeInvalidRequestError(`Invalid object name "${name}"`); } if ( options?.rejectReadNamesLongerThan !== undefined && name.length > options.rejectReadNamesLongerThan ) { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors throw new FakeInvalidRequestError(`Invalid object name "${name}"`); } const object = objects.get(name); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors if (!object) throw new FakeNotFoundError(`Object "${name}" not found`); return object; }, @@ -136,15 +141,18 @@ const makeFakeClient = (options?: { readonly versionCheck?: string; }) => { const current = [...objects.values()].find((o) => o.id === id); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors if (!current) throw new FakeNotFoundError(`Object "${id}" not found`); if ( conflictPending && current.name.endsWith("/secrets/conflict") ) { conflictPending = false; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors throw new FakeConflictError(`Injected conflict for "${id}"`); } if (versionCheck && current.metadata.versionId !== versionCheck) { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors throw new FakeConflictError(`Version mismatch for "${id}"`); } const nextVersion = current.metadata.versionId.replace( @@ -166,6 +174,7 @@ const makeFakeClient = (options?: { deleteObject: async ({ id }: { readonly id: string }) => { const entry = [...objects.entries()].find(([, o]) => o.id === id); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake WorkOS SDK promise rejects with status-shaped errors if (!entry) throw new FakeNotFoundError(`Object "${id}" not found`); objects.delete(entry[0]); }, From cf41ce1cc8f300f7772f21f2718f68addb573964 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 19:57:13 -0700 Subject: [PATCH 023/108] Use typed boundaries in MCP OAuth tests --- .../cloud/src/services/mcp-oauth.node.test.ts | 28 ++++++++---- apps/local/src/server/mcp-oauth.test.ts | 43 ++++++++++++++----- 2 files changed, 52 insertions(+), 19 deletions(-) diff --git a/apps/cloud/src/services/mcp-oauth.node.test.ts b/apps/cloud/src/services/mcp-oauth.node.test.ts index 4136983cf..cc9ad3515 100644 --- a/apps/cloud/src/services/mcp-oauth.node.test.ts +++ b/apps/cloud/src/services/mcp-oauth.node.test.ts @@ -27,7 +27,7 @@ import { createServer, type Server } from "node:http"; import type { AddressInfo } from "node:net"; import { createHash, randomBytes } from "node:crypto"; -import { Effect } from "effect"; +import { Effect, Option, Schema } from "effect"; import { ScopeId } from "@executor-js/sdk"; import { asUser, testUserOrgScopeId } from "./__test-harness__/api-harness"; @@ -43,6 +43,15 @@ interface FakeServer { readonly close: () => Promise; } +const RegistrationBody = Schema.Struct({ + redirect_uris: Schema.optional(Schema.Array(Schema.String)), + grant_types: Schema.optional(Schema.Array(Schema.String)), + response_types: Schema.optional(Schema.Array(Schema.String)), +}); +const decodeRegistrationBody = Schema.decodeUnknownOption( + Schema.fromJsonString(RegistrationBody), +); + const startFakeServer = async (): Promise => { const clients = new Map(); const codes = new Map< @@ -82,6 +91,7 @@ const startFakeServer = async (): Promise => { res.end(payload); }; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake HTTP server returns stable 500 responses for unexpected handler failures try { if (url.pathname === "/.well-known/oauth-protected-resource") { const origin = `http://${req.headers.host}`; @@ -108,11 +118,11 @@ const startFakeServer = async (): Promise => { if (url.pathname === "/register" && req.method === "POST") { const body = await readBody(req); - const parsed = JSON.parse(body) as { - readonly redirect_uris?: readonly string[]; - readonly grant_types?: readonly string[]; - readonly response_types?: readonly string[]; - }; + const parsedOption = decodeRegistrationBody(body); + if (Option.isNone(parsedOption)) { + return send(400, { error: "invalid_registration" }); + } + const parsed = parsedOption.value; const clientId = next("client"); clients.set(clientId, { redirect_uris: parsed.redirect_uris ?? [] }); registrations += 1; @@ -214,8 +224,8 @@ const startFakeServer = async (): Promise => { } send(404, { error: "not_found", params: url.pathname }); - } catch (e) { - send(500, { error: "server_error", message: String(e) }); + } catch { + send(500, { error: "server_error", message: "fake server failed" }); } }); @@ -244,10 +254,12 @@ const followAuthorize = async ( const response = await fetch(authorizationUrl, { redirect: "manual" }); expect(response.status).toBe(302); const location = response.headers.get("location"); + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: browser redirect helper rejects malformed fake OAuth responses if (!location) throw new Error("no location header on authorize redirect"); const dest = new URL(location); const code = dest.searchParams.get("code"); const state = dest.searchParams.get("state"); + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: browser redirect helper rejects malformed fake OAuth responses if (!code || !state) throw new Error(`redirect missing code/state: ${location}`); return { code, state }; }; diff --git a/apps/local/src/server/mcp-oauth.test.ts b/apps/local/src/server/mcp-oauth.test.ts index 25e6aab3c..7bca6e10d 100644 --- a/apps/local/src/server/mcp-oauth.test.ts +++ b/apps/local/src/server/mcp-oauth.test.ts @@ -31,7 +31,7 @@ import { migrate } from "drizzle-orm/bun-sqlite/migrator"; import { HttpApi, HttpApiBuilder, HttpApiClient } from "effect/unstable/httpapi"; import { FetchHttpClient, HttpRouter, HttpServer } from "effect/unstable/http"; -import { Effect, Layer } from "effect"; +import { Effect, Layer, Option, Schema } from "effect"; import { addGroup, observabilityMiddleware } from "@executor-js/api"; import { @@ -79,6 +79,15 @@ interface FakeServer { readonly close: () => Promise; } +const RegistrationBody = Schema.Struct({ + redirect_uris: Schema.optional(Schema.Array(Schema.String)), + grant_types: Schema.optional(Schema.Array(Schema.String)), + response_types: Schema.optional(Schema.Array(Schema.String)), +}); +const decodeRegistrationBody = Schema.decodeUnknownOption( + Schema.fromJsonString(RegistrationBody), +); + const startFakeServer = async (): Promise => { const clients = new Map(); const codes = new Map< @@ -115,6 +124,7 @@ const startFakeServer = async (): Promise => { res.end(payload); }; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fake HTTP server returns stable 500 responses for unexpected handler failures try { if (url.pathname === "/.well-known/oauth-protected-resource") { const origin = `http://${req.headers.host}`; @@ -141,11 +151,11 @@ const startFakeServer = async (): Promise => { if (url.pathname === "/register" && req.method === "POST") { const body = await readBody(req); - const parsed = JSON.parse(body) as { - readonly redirect_uris?: readonly string[]; - readonly grant_types?: readonly string[]; - readonly response_types?: readonly string[]; - }; + const parsedOption = decodeRegistrationBody(body); + if (Option.isNone(parsedOption)) { + return send(400, { error: "invalid_registration" }); + } + const parsed = parsedOption.value; const clientId = next("client"); clients.set(clientId, { redirect_uris: parsed.redirect_uris ?? [] }); return send(201, { @@ -225,8 +235,8 @@ const startFakeServer = async (): Promise => { } send(404, { error: "not_found", path: url.pathname }); - } catch (e) { - send(500, { error: "server_error", message: String(e) }); + } catch { + send(500, { error: "server_error", message: "fake server failed" }); } }); @@ -318,8 +328,16 @@ const startHarness = async (tmpDir: string): Promise => { )) as typeof globalThis.fetch, scopeId, dispose: async () => { - await disposeHandler().catch(() => undefined); - await Effect.runPromise(executor.close()).catch(() => undefined); + await Effect.runPromise( + Effect.ignore( + Effect.tryPromise(() => disposeHandler()), + ), + ); + await Effect.runPromise( + Effect.ignore( + Effect.tryPromise(() => Effect.runPromise(executor.close())), + ), + ); sqlite.close(); }, }; @@ -355,12 +373,15 @@ const followAuthorize = async ( const response = await fetch(authorizationUrl, { redirect: "manual" }); expect(response.status).toBe(302); const location = response.headers.get("location"); + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: browser redirect helper rejects malformed fake OAuth responses if (!location) throw new Error("no location header on authorize redirect"); const dest = new URL(location); const code = dest.searchParams.get("code"); const state = dest.searchParams.get("state"); - if (!code || !state) + if (!code || !state) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: browser redirect helper rejects malformed fake OAuth responses throw new Error(`redirect missing code/state: ${location}`); + } return { code, state }; }; From 33919dd5eafdc51b49d5a8b23fe016a1730cea02 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:01:41 -0700 Subject: [PATCH 024/108] Use Effect boundaries in release smoke test --- tests/release-bootstrap-smoke.test.ts | 58 ++++++++++++++++----------- 1 file changed, 34 insertions(+), 24 deletions(-) diff --git a/tests/release-bootstrap-smoke.test.ts b/tests/release-bootstrap-smoke.test.ts index ef56d9bc4..c6e1bcb6a 100644 --- a/tests/release-bootstrap-smoke.test.ts +++ b/tests/release-bootstrap-smoke.test.ts @@ -5,6 +5,7 @@ import { mkdtemp, mkdir, readFile, rm, cp } from "node:fs/promises"; import { tmpdir } from "node:os"; import { join, resolve } from "node:path"; import { fileURLToPath } from "node:url"; +import { Effect, Exit } from "effect"; type CommandResult = { readonly exitCode: number; @@ -56,28 +57,34 @@ const runCommand = async ( }; const listen = async (server: ReturnType): Promise => - new Promise((resolvePort, reject) => { - server.once("error", reject); - server.listen(0, "127.0.0.1", () => { - const address = server.address(); - if (!address || typeof address === "string") { - reject(new Error("Failed to resolve server address")); - return; - } - resolvePort(address.port); - }); - }); + Effect.runPromise( + Effect.callback((resume) => { + const onError = (cause: unknown) => resume(Effect.fail(cause)); + server.once("error", onError); + server.listen(0, "127.0.0.1", () => { + server.off("error", onError); + const address = server.address(); + if (!address || typeof address === "string") { + resume(Effect.fail("Failed to resolve server address")); + return; + } + resume(Effect.succeed(address.port)); + }); + }), + ); const closeServer = async (server: ReturnType): Promise => - new Promise((resolveClose, reject) => { - server.close((error) => { - if (error) { - reject(error); - return; - } - resolveClose(); - }); - }); + Effect.runPromise( + Effect.callback((resume) => { + server.close((error) => { + if (error) { + resume(Effect.fail(error)); + return; + } + resume(Effect.void); + }); + }), + ); const platformName = process.platform === "win32" ? "win32" : process.platform; const archName = process.arch; @@ -121,6 +128,7 @@ describe("release bootstrap smoke", () => { await mkdir(join(installedWrapperDir, "node_modules"), { recursive: true }); await cp(platformDir, installedPlatformDir, { recursive: true }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: release smoke test must clean temp install files after process checks try { const firstRun = await runCommand( process.execPath, @@ -178,18 +186,20 @@ describe("release bootstrap smoke", () => { webStderr += chunk; }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: release smoke test must stop the spawned web process try { const deadline = Date.now() + 30_000; let rootResponse: Response | null = null; while (Date.now() < deadline) { await new Promise((resolveDelay) => setTimeout(resolveDelay, 250)); - try { - rootResponse = await fetch(`http://127.0.0.1:${webPort}/`); + const fetchExit = await Effect.runPromiseExit( + Effect.tryPromise(() => fetch(`http://127.0.0.1:${webPort}/`)), + ); + if (Exit.isSuccess(fetchExit)) { + rootResponse = fetchExit.value; if (rootResponse.ok) { break; } - } catch { - // keep polling until the server is ready } } From cf3f93798665a34a5ba20ad91e7e66cdbfdb975e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:09:26 -0700 Subject: [PATCH 025/108] Use promiseExit in GraphQL source UI --- .../graphql/src/react/AddGraphqlSource.tsx | 58 ++++++++++--------- .../graphql/src/react/EditGraphqlSource.tsx | 57 +++++++++--------- 2 files changed, 60 insertions(+), 55 deletions(-) diff --git a/packages/plugins/graphql/src/react/AddGraphqlSource.tsx b/packages/plugins/graphql/src/react/AddGraphqlSource.tsx index c4c615434..86da5e208 100644 --- a/packages/plugins/graphql/src/react/AddGraphqlSource.tsx +++ b/packages/plugins/graphql/src/react/AddGraphqlSource.tsx @@ -1,5 +1,6 @@ import { useCallback, useState } from "react"; import { useAtomSet } from "@effect/atom-react"; +import * as Exit from "effect/Exit"; import { useScope } from "@executor-js/react/api/scope-context"; import { sourceWriteKeys } from "@executor-js/react/api/reactivity-keys"; @@ -55,7 +56,7 @@ export default function AddGraphqlSource(props: { const [tokens, setTokens] = useState(null); const scopeId = useScope(); - const doAdd = useAtomSet(addGraphqlSource, { mode: "promise" }); + const doAdd = useAtomSet(addGraphqlSource, { mode: "promiseExit" }); const { beginAdd } = usePendingSources(); const secretList = useSecretPickerSecrets(); const oauth = useOAuthPopupFlow({ @@ -118,35 +119,36 @@ export default function AddGraphqlSource(props: { kind: "graphql", url: trimmedEndpoint || undefined, }); - try { - await doAdd({ - params: { scopeId }, - payload: { - endpoint: trimmedEndpoint, - name: identity.name.trim() || undefined, - namespace: slugifyNamespace(identity.namespace) || undefined, - ...(Object.keys(headerMap).length > 0 ? { headers: headerMap } : {}), - ...(Object.keys(queryParams).length > 0 - ? { queryParams: queryParams as Record } - : {}), - ...(authMode === "oauth2" && tokens - ? { - auth: { - kind: "oauth2" as const, - connectionId: tokens.connectionId, - }, - } - : {}), - }, - reactivityKeys: sourceWriteKeys, - }); - props.onComplete(); - } catch (e) { - setAddError(e instanceof Error ? e.message : "Failed to add source"); + const exit = await doAdd({ + params: { scopeId }, + payload: { + endpoint: trimmedEndpoint, + name: identity.name.trim() || undefined, + namespace: slugifyNamespace(identity.namespace) || undefined, + ...(Object.keys(headerMap).length > 0 ? { headers: headerMap } : {}), + ...(Object.keys(queryParams).length > 0 + ? { queryParams: queryParams as Record } + : {}), + ...(authMode === "oauth2" && tokens + ? { + auth: { + kind: "oauth2" as const, + connectionId: tokens.connectionId, + }, + } + : {}), + }, + reactivityKeys: sourceWriteKeys, + }); + placeholder.done(); + + if (Exit.isFailure(exit)) { + setAddError("Failed to add source"); setAdding(false); - } finally { - placeholder.done(); + return; } + + props.onComplete(); }; return ( diff --git a/packages/plugins/graphql/src/react/EditGraphqlSource.tsx b/packages/plugins/graphql/src/react/EditGraphqlSource.tsx index 35e8aebd4..90c933706 100644 --- a/packages/plugins/graphql/src/react/EditGraphqlSource.tsx +++ b/packages/plugins/graphql/src/react/EditGraphqlSource.tsx @@ -1,5 +1,6 @@ import { useState } from "react"; import { useAtomValue, useAtomSet } from "@effect/atom-react"; +import * as Exit from "effect/Exit"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import { graphqlSourceAtom, updateGraphqlSource } from "./atoms"; import { useScope } from "@executor-js/react/api/scope-context"; @@ -41,7 +42,7 @@ const graphqlOAuthConnectionId = (namespaceSlug: string): string => function EditForm(props: { sourceId: string; initial: EditableSource; onSave: () => void }) { const scopeId = useScope(); - const doUpdate = useAtomSet(updateGraphqlSource, { mode: "promise" }); + const doUpdate = useAtomSet(updateGraphqlSource, { mode: "promiseExit" }); const secretList = useSecretPickerSecrets(); const identity = useSourceIdentity({ @@ -71,34 +72,36 @@ function EditForm(props: { sourceId: string; initial: EditableSource; onSave: () setSaving(true); setError(null); const { headers, queryParams } = serializeHttpCredentials(credentials); - try { - await doUpdate({ - params: { scopeId, namespace: props.sourceId }, - payload: { - name: identity.name.trim() || undefined, - endpoint: endpoint.trim() || undefined, - headers, - queryParams: queryParams as Record, - auth: - authMode === "oauth2" - ? { - kind: "oauth2", - connectionId: - props.initial.auth.kind === "oauth2" - ? props.initial.auth.connectionId - : graphqlOAuthConnectionId(props.initial.namespace), - } - : { kind: "none" }, - }, - reactivityKeys: sourceWriteKeys, - }); - setDirty(false); - props.onSave(); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to update source"); - } finally { + const exit = await doUpdate({ + params: { scopeId, namespace: props.sourceId }, + payload: { + name: identity.name.trim() || undefined, + endpoint: endpoint.trim() || undefined, + headers, + queryParams: queryParams as Record, + auth: + authMode === "oauth2" + ? { + kind: "oauth2", + connectionId: + props.initial.auth.kind === "oauth2" + ? props.initial.auth.connectionId + : graphqlOAuthConnectionId(props.initial.namespace), + } + : { kind: "none" }, + }, + reactivityKeys: sourceWriteKeys, + }); + + if (Exit.isFailure(exit)) { + setError("Failed to update source"); setSaving(false); + return; } + + setDirty(false); + props.onSave(); + setSaving(false); }; return ( From c1c6430bf0248e9f7448ccaaeb3b7aadfe1302bb Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:09:58 -0700 Subject: [PATCH 026/108] Use Effect exit for local update check --- apps/local/src/web/shell.tsx | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/apps/local/src/web/shell.tsx b/apps/local/src/web/shell.tsx index be6e7804c..a9cbba822 100644 --- a/apps/local/src/web/shell.tsx +++ b/apps/local/src/web/shell.tsx @@ -1,6 +1,7 @@ import { Link, Outlet, useLocation } from "@tanstack/react-router"; import { useCallback, useEffect, useRef, useState } from "react"; import { useAtomRefresh } from "@effect/atom-react"; +import { Data, Effect, Exit } from "effect"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import { sourcesAtom, toolsAtom } from "@executor-js/react/api/atoms"; import { useSourcesWithPending } from "@executor-js/react/api/optimistic"; @@ -29,6 +30,10 @@ type UpdateChannel = "latest" | "beta"; const EXECUTOR_DIST_TAGS_PATH = "/v1/app/npm/dist-tags"; +class LatestVersionCheckError extends Data.TaggedError("LatestVersionCheckError")<{ + readonly cause: unknown; +}> {} + type ParsedVersion = { readonly major: number; readonly minor: number; @@ -95,15 +100,22 @@ function useLatestVersion(currentVersion: string) { useEffect(() => { let cancelled = false; - fetch(EXECUTOR_DIST_TAGS_PATH) - .then(async (res) => { - if (!res.ok) throw new Error(`Failed to load dist tags: ${res.status}`); - return res.json() as Promise>>; - }) - .then((data) => { - if (!cancelled) setLatestVersion(data[channel] ?? null); - }) - .catch(() => {}); + + const latestVersionEffect = Effect.tryPromise({ + try: async () => { + const res = await fetch(EXECUTOR_DIST_TAGS_PATH); + if (!res.ok) return {}; + return (await res.json()) as Partial>; + }, + catch: (cause) => new LatestVersionCheckError({ cause }), + }); + + void Effect.runPromiseExit(latestVersionEffect).then((exit) => { + if (!cancelled && Exit.isSuccess(exit)) { + setLatestVersion(exit.value[channel] ?? null); + } + }); + return () => { cancelled = true; }; From 26475f9769324a03d63ce9d31b45d1988a8ff6db Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:10:33 -0700 Subject: [PATCH 027/108] Clarify shared UI and config boundaries --- packages/core/config/src/sink.ts | 3 +-- packages/react/src/components/field.tsx | 3 +++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/core/config/src/sink.ts b/packages/core/config/src/sink.ts index a35596bd7..6db78dd09 100644 --- a/packages/core/config/src/sink.ts +++ b/packages/core/config/src/sink.ts @@ -52,8 +52,7 @@ export interface ConfigFileSinkOptions { } const defaultOnError = (op: "upsert" | "remove", err: unknown): void => { - const msg = err instanceof Error ? err.message : String(err); - console.warn(`[config-sink] ${op} failed: ${msg}`); + console.warn(`[config-sink] ${op} failed`, err); }; export const makeFileConfigSink = ( diff --git a/packages/react/src/components/field.tsx b/packages/react/src/components/field.tsx index fc4782e20..761717762 100644 --- a/packages/react/src/components/field.tsx +++ b/packages/react/src/components/field.tsx @@ -191,14 +191,17 @@ function FieldError({ return null; } + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: FieldError receives typed UI validation messages, not thrown errors const uniqueErrors = [...new Map(errors.map((error) => [error?.message, error])).values()]; if (uniqueErrors?.length == 1) { + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: FieldError receives typed UI validation messages, not thrown errors return uniqueErrors[0]?.message; } return (
    + {/* oxlint-disable-next-line executor/no-unknown-error-message -- boundary: FieldError receives typed UI validation messages, not thrown errors */} {uniqueErrors.map((error, index) => error?.message &&
  • {error.message}
  • )}
); From 15b056b8d925c97a6a5271d5ca1a7246a4715a5a Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:13:49 -0700 Subject: [PATCH 028/108] Use Effect cleanup in MCP worker transport --- .../src/services/mcp-worker-transport.ts | 38 +++++++++++-------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/apps/cloud/src/services/mcp-worker-transport.ts b/apps/cloud/src/services/mcp-worker-transport.ts index 8a4794fc5..06cff17de 100644 --- a/apps/cloud/src/services/mcp-worker-transport.ts +++ b/apps/cloud/src/services/mcp-worker-transport.ts @@ -1,6 +1,6 @@ import { WorkerTransport, type WorkerTransportOptions } from "agents/mcp"; import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; -import { Data, Effect } from "effect"; +import { Data, Effect, Exit } from "effect"; export class McpWorkerTransportError extends Data.TaggedError("McpWorkerTransportError")<{ readonly cause: unknown; @@ -64,19 +64,20 @@ const extractJsonRpcRequestIdKeys = async (request: Request): Promise { - if (!message || typeof message !== "object") return []; - const rpc = message as JsonRpcLike; - if (typeof rpc.method !== "string") return []; - const key = jsonRpcRequestIdKey(rpc.id); - return key ? [key] : []; - }); - } catch { + const parsed = await Effect.runPromiseExit( + Effect.tryPromise(() => request.clone().json()), + ); + if (Exit.isFailure(parsed)) { return []; } + const messages = Array.isArray(parsed.value) ? parsed.value : [parsed.value]; + return messages.flatMap((message) => { + if (!message || typeof message !== "object") return []; + const rpc = message as JsonRpcLike; + if (typeof rpc.method !== "string") return []; + const key = jsonRpcRequestIdKey(rpc.id); + return key ? [key] : []; + }); }; // Hard ceiling on how long a same-id JSON-RPC request will wait for an @@ -109,9 +110,12 @@ export class JsonRpcRequestIdQueue { this.inFlight.set(id, current); } + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: promise queue must release in-flight ids after callback completion try { if (previous.length > 0) { - const settled = Promise.all(previous.map((p) => p.catch(() => undefined))); + const settled = Promise.all( + previous.map((p) => Effect.runPromise(Effect.ignore(Effect.tryPromise(() => p)))), + ); const timeout = new Promise<"timeout">((resolve) => setTimeout(() => resolve("timeout"), this.previousTimeoutMs), ); @@ -183,9 +187,13 @@ export const makeMcpWorkerTransport = ( return result.response; }), close: () => - Effect.promise(() => transport.close().catch(() => undefined)).pipe( + Effect.ignore( + Effect.tryPromise({ + try: () => transport.close(), + catch: (cause) => new McpWorkerTransportError({ cause }), + }), + ).pipe( Effect.withSpan("mcp.worker_transport.close"), - Effect.orDie, ), } satisfies McpWorkerTransport; }).pipe(Effect.withSpan("mcp.worker_transport.make")); From f7e6ca406dc3c1e81b6823cdc432906ce8ea0e14 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:14:20 -0700 Subject: [PATCH 029/108] Simplify tenant isolation assertions --- .../services/tenant-isolation.node.test.ts | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/apps/cloud/src/services/tenant-isolation.node.test.ts b/apps/cloud/src/services/tenant-isolation.node.test.ts index db89cdf87..730216ed6 100644 --- a/apps/cloud/src/services/tenant-isolation.node.test.ts +++ b/apps/cloud/src/services/tenant-isolation.node.test.ts @@ -76,15 +76,13 @@ describe("tenant isolation (HTTP)", () => { }), ); - const result = yield* asOrg(orgB, (client) => - client.openapi - .getSource({ params: { scopeId: ScopeId.make(orgB), namespace: namespaceA } }) - .pipe(Effect.result), + const source = yield* asOrg(orgB, (client) => + client.openapi.getSource({ + params: { scopeId: ScopeId.make(orgB), namespace: namespaceA }, + }), ); - expect(result._tag).toBe("Success"); - if (result._tag !== "Success") return; - expect(result.success).toBeNull(); + expect(source).toBeNull(); }), ); @@ -121,15 +119,13 @@ describe("tenant isolation (HTTP)", () => { }), ); - const result = yield* asOrg(orgB, (client) => - client.secrets - .status({ params: { scopeId: ScopeId.make(orgB), secretId: SecretId.make(secretIdA) } }) - .pipe(Effect.result), + const status = yield* asOrg(orgB, (client) => + client.secrets.status({ + params: { scopeId: ScopeId.make(orgB), secretId: SecretId.make(secretIdA) }, + }), ); - expect(result._tag).toBe("Success"); - if (result._tag !== "Success") return; - expect(result.success.status).toBe("missing"); + expect(status.status).toBe("missing"); }), ); From 987638b0d7677ad4fe4687412821a1fb686c6cc9 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:15:35 -0700 Subject: [PATCH 030/108] Use typed errors in postgres storage tests --- .../core/storage-postgres/src/index.test.ts | 36 ++++++++++++------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/packages/core/storage-postgres/src/index.test.ts b/packages/core/storage-postgres/src/index.test.ts index 5363c683c..db39c1a5f 100644 --- a/packages/core/storage-postgres/src/index.test.ts +++ b/packages/core/storage-postgres/src/index.test.ts @@ -7,7 +7,7 @@ // cloud test DB on 5434. import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Data, Effect } from "effect"; import postgres from "postgres"; import { drizzle } from "drizzle-orm/postgres-js"; import { relations } from "drizzle-orm"; @@ -21,7 +21,7 @@ import { jsonb, } from "drizzle-orm/pg-core"; -import type { DBAdapter } from "@executor-js/storage-core"; +import type { DBAdapter, StorageFailure } from "@executor-js/storage-core"; import { conformanceSchema, runAdapterConformance, @@ -31,6 +31,13 @@ import { makePostgresAdapter } from "./index"; const url = "postgresql://postgres:postgres@127.0.0.1:5435/postgres"; +class PostgresTestDatabaseError extends Data.TaggedError( + "PostgresTestDatabaseError", +)<{ + readonly message: string; + readonly cause: unknown; +}> {} + // max=1 so BEGIN/COMMIT sent via `db.execute(sql.raw(...))` always hit // the same connection — postgres.js with a larger pool rejects unsafe // transaction control that isn't bound to a single reserved client. @@ -135,9 +142,10 @@ const createConformanceTables = Effect.tryPromise({ ); }, catch: (cause) => - new Error( - `failed to create postgres conformance tables: ${String(cause)}`, - ), + new PostgresTestDatabaseError({ + message: "failed to create postgres conformance tables", + cause, + }), }); const resetTables = Effect.gen(function* () { @@ -145,18 +153,19 @@ const resetTables = Effect.gen(function* () { try: () => sql`DROP TABLE IF EXISTS "source", "tag", "source_tag", "with_defaults", "blob" CASCADE`.then( () => undefined, - ), + ), catch: (cause) => - new Error( - `failed to reset postgres conformance tables: ${String(cause)}`, - ), + new PostgresTestDatabaseError({ + message: "failed to reset postgres conformance tables", + cause, + }), }); yield* createConformanceTables; }); const withAdapter = ( fn: (adapter: DBAdapter) => Effect.Effect, -): Effect.Effect => +): Effect.Effect => Effect.gen(function* () { yield* resetTables; const db = drizzle(sql, { schema: conformanceTables }); @@ -165,7 +174,7 @@ const withAdapter = ( schema: conformanceSchema, }); return yield* fn(adapter); - }) as Effect.Effect; + }); runAdapterConformance("postgres", withAdapter); @@ -191,7 +200,10 @@ const resetScopedTable = Effect.tryPromise({ ); }, catch: (cause) => - new Error(`failed to reset scoped_item table: ${String(cause)}`), + new PostgresTestDatabaseError({ + message: "failed to reset scoped_item table", + cause, + }), }); const makeScopedAdapter = () => From 3e9f3d5ca46f0cf9d2dbad49045a8fa972898ca7 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:16:00 -0700 Subject: [PATCH 031/108] Clarify drizzle generator boundaries --- packages/core/cli/src/generators/drizzle.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/core/cli/src/generators/drizzle.ts b/packages/core/cli/src/generators/drizzle.ts index d84d4e593..7b864213b 100644 --- a/packages/core/cli/src/generators/drizzle.ts +++ b/packages/core/cli/src/generators/drizzle.ts @@ -38,6 +38,7 @@ const getType = ( mysql: `mysqlEnum([${type.map((x) => `'${x}'`).join(", ")}])`, }[dialect]; } + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: generator rejects invalid schema input through the SchemaGenerator promise API throw new TypeError( `Invalid field type for field ${name}`, ); @@ -95,8 +96,9 @@ const getType = ( }, }; - const dbTypeMap = typeMap[type as string]; + const dbTypeMap = typeMap[type]; if (!dbTypeMap) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: generator rejects unsupported schema input through the SchemaGenerator promise API throw new Error( `Unsupported field type '${field.type}' for field '${name}'.`, ); From a02b3cc990a1110a38f64a9278d8a34c6545cddb Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:18:15 -0700 Subject: [PATCH 032/108] Use stable blob store error messages --- packages/core/storage-file/src/blob-store.ts | 8 +++----- packages/core/storage-postgres/src/blob-store.ts | 8 +++----- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/packages/core/storage-file/src/blob-store.ts b/packages/core/storage-file/src/blob-store.ts index 52e13a1bb..d4be4f800 100644 --- a/packages/core/storage-file/src/blob-store.ts +++ b/packages/core/storage-file/src/blob-store.ts @@ -35,13 +35,11 @@ export interface MakeSqliteBlobStoreOptions { const wrapErr = (op: string) => - (cause: unknown): StorageError => { - const msg = cause instanceof Error ? cause.message : String(cause); - return new StorageError({ - message: `[storage-file] blob ${op}: ${msg}`, + (cause: unknown): StorageError => + new StorageError({ + message: `[storage-file] blob ${op} failed`, cause, }); - }; export const makeSqliteBlobStore = ( options: MakeSqliteBlobStoreOptions, diff --git a/packages/core/storage-postgres/src/blob-store.ts b/packages/core/storage-postgres/src/blob-store.ts index 40ad03a01..35fd254db 100644 --- a/packages/core/storage-postgres/src/blob-store.ts +++ b/packages/core/storage-postgres/src/blob-store.ts @@ -36,13 +36,11 @@ export interface MakePostgresBlobStoreOptions { const wrapErr = (op: string) => - (cause: unknown): StorageError => { - const msg = cause instanceof Error ? cause.message : String(cause); - return new StorageError({ - message: `[storage-postgres] blob ${op}: ${msg}`, + (cause: unknown): StorageError => + new StorageError({ + message: `[storage-postgres] blob ${op} failed`, cause, }); - }; export const makePostgresBlobStore = ( options: MakePostgresBlobStoreOptions, From b0d47dfdaa9f3064b5a1c451affd8c1c61c5fc36 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:19:07 -0700 Subject: [PATCH 033/108] Use inferred keychain extension type --- packages/plugins/keychain/src/index.ts | 51 +++++++++++++------------- 1 file changed, 26 insertions(+), 25 deletions(-) diff --git a/packages/plugins/keychain/src/index.ts b/packages/plugins/keychain/src/index.ts index 2f87c2301..5e1396df2 100644 --- a/packages/plugins/keychain/src/index.ts +++ b/packages/plugins/keychain/src/index.ts @@ -46,16 +46,7 @@ export interface KeychainPluginConfig { // Plugin extension — public API on executor.keychain // --------------------------------------------------------------------------- -export interface KeychainExtension { - /** Human-readable name for the keychain on this platform */ - readonly displayName: string; - - /** Whether the current platform supports system keychain */ - readonly isSupported: boolean; - - /** Check if a secret exists in the system keychain */ - readonly has: (id: string) => Effect.Effect; -} +export type KeychainExtension = ReturnType; // --------------------------------------------------------------------------- // Plugin definition @@ -69,25 +60,35 @@ const scopedServiceName = ( ctx: PluginCtx, options: KeychainPluginConfig | undefined, ): string => - `${resolveServiceName(options?.serviceName)}/${ctx.scopes[0]!.id as string}`; + `${resolveServiceName(options?.serviceName)}/${ctx.scopes[0]!.id}`; + +const makeKeychainExtension = ( + ctx: PluginCtx, + options: KeychainPluginConfig | undefined, +) => { + const serviceName = scopedServiceName(ctx, options); + return { + /** Human-readable name for the keychain on this platform */ + displayName: displayName(), + + /** Whether the current platform supports system keychain */ + isSupported: isSupportedPlatform(), + + /** Check if a secret exists in the system keychain */ + has: (id: string) => + getPassword(serviceName, id).pipe( + Effect.map((v) => v !== null), + Effect.orElseSucceed(() => false), + ), + }; +}; export const keychainPlugin = definePlugin( (options?: KeychainPluginConfig) => ({ id: "keychain" as const, storage: () => ({}), - extension: (ctx): KeychainExtension => { - const serviceName = scopedServiceName(ctx, options); - return { - displayName: displayName(), - isSupported: isSupportedPlatform(), - has: (id) => - getPassword(serviceName, id).pipe( - Effect.map((v) => v !== null), - Effect.orElseSucceed(() => false), - ), - }; - }, + extension: (ctx): KeychainExtension => makeKeychainExtension(ctx, options), secretProviders: (ctx): Effect.Effect => Effect.gen(function* () { @@ -103,9 +104,9 @@ export const keychainPlugin = definePlugin( ), ), Effect.as(true), - Effect.catch((cause) => + Effect.catch(() => Effect.logWarning( - `keychain unavailable, skipping provider registration: ${cause.message}`, + "keychain unavailable, skipping provider registration", ).pipe(Effect.as(false)), ), ); From 10308ec4b0c41f754eee8cbf390fa568e17d42f6 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:29 -0700 Subject: [PATCH 034/108] Use typed config parse errors --- packages/core/config/src/load.ts | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/packages/core/config/src/load.ts b/packages/core/config/src/load.ts index b993ddfd1..aa811f687 100644 --- a/packages/core/config/src/load.ts +++ b/packages/core/config/src/load.ts @@ -4,13 +4,13 @@ import type { PlatformError } from "effect/PlatformError"; import * as jsonc from "jsonc-parser"; import { ExecutorFileConfig } from "./schema"; -export class ConfigParseError { - readonly _tag = "ConfigParseError"; - constructor( - readonly path: string, - readonly message: string, - ) {} -} +export class ConfigParseError extends Schema.TaggedErrorClass()( + "ConfigParseError", + { + path: Schema.String, + message: Schema.String, + }, +) {} /** * Load and validate an executor config file. @@ -38,11 +38,17 @@ export const loadConfig = ( const msg = errors .map((e) => `offset ${e.offset}: ${jsonc.printParseErrorCode(e.error)}`) .join("; "); - return yield* Effect.fail(new ConfigParseError(path, msg)); + return yield* new ConfigParseError({ path, message: msg }); } const decoded = yield* Schema.decodeUnknownEffect(ExecutorFileConfig)(parsed).pipe( - Effect.mapError((e) => new ConfigParseError(path, String(e))), + Effect.mapError( + (error) => + new ConfigParseError({ + path, + message: error.issue.toString(), + }), + ), ); return decoded; From 84e64a00f47eadfaed61a24423eea60a9a89b62d Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:34 -0700 Subject: [PATCH 035/108] Parse plugin loader config with Schema --- packages/core/config/src/load-plugins.ts | 163 ++++++++++++++--------- 1 file changed, 102 insertions(+), 61 deletions(-) diff --git a/packages/core/config/src/load-plugins.ts b/packages/core/config/src/load-plugins.ts index b440c4148..ff6961b09 100644 --- a/packages/core/config/src/load-plugins.ts +++ b/packages/core/config/src/load-plugins.ts @@ -26,6 +26,7 @@ import { dirname, isAbsolute, resolve as resolvePath } from "node:path"; import { pathToFileURL } from "node:url"; import * as fs from "node:fs"; import * as jsonc from "jsonc-parser"; +import { Effect, Schema } from "effect"; import type { AnyPlugin } from "@executor-js/sdk"; @@ -36,7 +37,15 @@ import type { AnyPlugin } from "@executor-js/sdk"; // across the runtime boundary. type LooseConfiguredPlugin = (options?: Record) => AnyPlugin; -import type { PluginConfig } from "./schema"; +import { ExecutorFileConfig } from "./schema"; + +export class LoadPluginsError extends Schema.TaggedErrorClass()( + "LoadPluginsError", + { + message: Schema.String, + cause: Schema.optional(Schema.Unknown), + }, +) {} export interface LoadPluginsFromJsoncOptions { /** Absolute path to `executor.jsonc` (or compatible). */ @@ -57,70 +66,102 @@ export interface LoadPluginsFromJsoncOptions { */ export const loadPluginsFromJsonc = async ( options: LoadPluginsFromJsoncOptions, -): Promise => { - const { path, deps } = options; - if (!fs.existsSync(path)) return null; +): Promise => + Effect.runPromise(loadPluginsFromJsoncEffect(options)); - const raw = fs.readFileSync(path, "utf8"); - const errors: jsonc.ParseError[] = []; - const parsed = jsonc.parse(raw, errors) as - | { plugins?: readonly PluginConfig[] } - | undefined; - if (errors.length > 0) { - const msg = errors - .map((e) => `offset ${e.offset}: ${jsonc.printParseErrorCode(e.error)}`) - .join("; "); - throw new Error(`[load-plugins] failed to parse ${path}: ${msg}`); - } +const loadPluginsFromJsoncEffect = ( + options: LoadPluginsFromJsoncOptions, +): Effect.Effect => + Effect.gen(function* () { + const { path, deps } = options; + if (!fs.existsSync(path)) return null; - const entries = parsed?.plugins ?? null; - if (!entries || entries.length === 0) return null; + const raw = fs.readFileSync(path, "utf8"); + const errors: jsonc.ParseError[] = []; + const parsed = jsonc.parse(raw, errors); + if (errors.length > 0) { + const msg = errors + .map((e) => `offset ${e.offset}: ${jsonc.printParseErrorCode(e.error)}`) + .join("; "); + return yield* new LoadPluginsError({ + message: `[load-plugins] failed to parse ${path}: ${msg}`, + }); + } - // jiti is created once per call; `moduleCache: false` ensures a - // restart picks up freshly-installed packages without process restart - // (relevant when the dev server kicks a reload after `executor plugin - // install`). - const { createJiti } = await import("jiti"); - const jiti = createJiti(pathToFileURL(path).href, { - interopDefault: true, - moduleCache: false, - }); + const config = yield* Schema.decodeUnknownEffect(ExecutorFileConfig)(parsed).pipe( + Effect.mapError( + (error) => + new LoadPluginsError({ + message: `[load-plugins] failed to decode ${path}: ${error.issue.toString()}`, + cause: error, + }), + ), + ); - const fromDir = dirname(path); - // require.resolve is anchored to the jsonc's directory so plugin - // packages resolve from the host app's `node_modules` regardless of - // CWD. - const require = createRequire( - isAbsolute(path) ? path : resolvePath(fromDir, "_anchor.js"), - ); + const entries = config.plugins ?? null; + if (!entries || entries.length === 0) return null; - const loaded: AnyPlugin[] = []; - for (const entry of entries) { - const serverEntry = `${entry.package}/server`; - let resolved: string; - try { - resolved = require.resolve(serverEntry); - } catch { - throw new Error( - `[load-plugins] cannot resolve "${serverEntry}" from ${fromDir}. ` + - `Is "${entry.package}" installed and does it export "./server"?`, - ); - } - const mod = (await jiti.import(resolved)) as - | { default?: LooseConfiguredPlugin } - | LooseConfiguredPlugin; - const factory = ( - typeof mod === "function" ? mod : (mod.default ?? null) - ) as LooseConfiguredPlugin | null; - if (!factory || typeof factory !== "function") { - throw new Error( - `[load-plugins] "${serverEntry}" did not export a default ` + - `definePlugin(...) factory.`, - ); + // jiti is created once per call; `moduleCache: false` ensures a + // restart picks up freshly-installed packages without process restart + // (relevant when the dev server kicks a reload after `executor plugin + // install`). + const { createJiti } = yield* Effect.tryPromise({ + try: () => import("jiti"), + catch: (cause) => + new LoadPluginsError({ + message: `[load-plugins] failed to import jiti.`, + cause, + }), + }); + const jiti = createJiti(pathToFileURL(path).href, { + interopDefault: true, + moduleCache: false, + }); + + const fromDir = dirname(path); + // require.resolve is anchored to the jsonc's directory so plugin + // packages resolve from the host app's `node_modules` regardless of + // CWD. + const require = createRequire( + isAbsolute(path) ? path : resolvePath(fromDir, "_anchor.js"), + ); + + const loaded: AnyPlugin[] = []; + for (const entry of entries) { + const serverEntry = `${entry.package}/server`; + const resolved = yield* Effect.try({ + try: () => require.resolve(serverEntry), + catch: (cause) => + new LoadPluginsError({ + message: + `[load-plugins] cannot resolve "${serverEntry}" from ${fromDir}. ` + + `Is "${entry.package}" installed and does it export "./server"?`, + cause, + }), + }); + const mod = (yield* Effect.tryPromise({ + try: () => jiti.import(resolved), + catch: (cause) => + new LoadPluginsError({ + message: `[load-plugins] failed to import "${serverEntry}" from ${resolved}.`, + cause, + }), + })) as + | { default?: LooseConfiguredPlugin } + | LooseConfiguredPlugin; + const factory = ( + typeof mod === "function" ? mod : (mod.default ?? null) + ) as LooseConfiguredPlugin | null; + if (!factory || typeof factory !== "function") { + return yield* new LoadPluginsError({ + message: + `[load-plugins] "${serverEntry}" did not export a default ` + + `definePlugin(...) factory.`, + }); + } + const merged = { ...(deps ?? {}), ...(entry.options ?? {}) }; + loaded.push(factory(merged)); } - const merged = { ...(deps ?? {}), ...(entry.options ?? {}) }; - loaded.push(factory(merged)); - } - return loaded; -}; + return loaded; + }); From 58fbda2b30ed1ec89105b8b438ea59687fe225f6 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:38 -0700 Subject: [PATCH 036/108] Mark core boundary escape hatches --- packages/core/execution/src/promise.ts | 2 ++ packages/core/sdk/src/client.ts | 1 + 2 files changed, 3 insertions(+) diff --git a/packages/core/execution/src/promise.ts b/packages/core/execution/src/promise.ts index 8e316721f..b17e42aef 100644 --- a/packages/core/execution/src/promise.ts +++ b/packages/core/execution/src/promise.ts @@ -58,6 +58,7 @@ export type ExecutionEngine = { * Wrap a Promise-style executor into the Effect shape the engine consumes. */ const fromPromise =
(try_: () => Promise): Effect.Effect => + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: Promise executor facade has already erased the SDK typed error channel Effect.tryPromise({ try: try_, catch: (cause) => cause }).pipe(Effect.orDie); type EffectInvokeOptions = Parameters[2]; @@ -144,6 +145,7 @@ export const toPromiseExecutionEngine = ( Effect.runPromise( engine.execute(code, { onElicitation: (ctx) => + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: host-provided Promise elicitation callback is outside the Effect error model Effect.tryPromise(() => options.onElicitation(ctx)).pipe(Effect.orDie), }), ), diff --git a/packages/core/sdk/src/client.ts b/packages/core/sdk/src/client.ts index 666435cb9..8217e4ecc 100644 --- a/packages/core/sdk/src/client.ts +++ b/packages/core/sdk/src/client.ts @@ -285,6 +285,7 @@ export function ExecutorPluginsProvider( const usePluginsCtx = (hookName: string): ExecutorPluginsContextValue => { const ctx = useContext(ExecutorPluginsContext); if (!ctx) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: React hook invariant throw new Error( `${hookName} must be called inside an .`, ); From 85c78ab3538d5c0345ad77c19ceb0f9aabe1bfc5 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:43 -0700 Subject: [PATCH 037/108] Remove redundant OpenAPI credential casts --- .../plugins/openapi/src/sdk/credential-status.test.ts | 8 ++++---- packages/plugins/openapi/src/sdk/credential-status.ts | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/credential-status.test.ts b/packages/plugins/openapi/src/sdk/credential-status.test.ts index 5ecc40480..52c15db11 100644 --- a/packages/plugins/openapi/src/sdk/credential-status.test.ts +++ b/packages/plugins/openapi/src/sdk/credential-status.test.ts @@ -11,8 +11,8 @@ import { const userScope = ScopeId.make("user"); const orgScope = ScopeId.make("org"); const scopeRanks = new Map([ - [userScope as string, 0], - [orgScope as string, 1], + [userScope, 0], + [orgScope, 1], ]); const source: SourceForCredentialStatus = { @@ -41,11 +41,11 @@ const bindings = ( slot === "oauth2:oauth2:connection" ? { kind: "connection", - connectionId: ConnectionId.make(`${scopeId as string}-connection`), + connectionId: ConnectionId.make(`${scopeId}-connection`), } : { kind: "secret", - secretId: SecretId.make(`${scopeId as string}-${slot}`), + secretId: SecretId.make(`${scopeId}-${slot}`), }, })); diff --git a/packages/plugins/openapi/src/sdk/credential-status.ts b/packages/plugins/openapi/src/sdk/credential-status.ts index 06c20b8fc..007e8f43a 100644 --- a/packages/plugins/openapi/src/sdk/credential-status.ts +++ b/packages/plugins/openapi/src/sdk/credential-status.ts @@ -23,7 +23,7 @@ export type SourceForCredentialStatus = { }; const scopeRank = (ranks: ReadonlyMap, scopeId: ScopeId): number => - ranks.get(scopeId as string) ?? Number.MAX_SAFE_INTEGER; + ranks.get(scopeId) ?? Number.MAX_SAFE_INTEGER; export const effectiveBindingForScope = ( rows: readonly BindingRowForCredentialStatus[], @@ -60,7 +60,7 @@ const hasConnectionBinding = ( const binding = effectiveBindingForScope(rows, slot, targetScope, ranks); if (binding?.value.kind !== "connection") return false; return liveConnectionIds - ? liveConnectionIds.has(binding.value.connectionId as string) + ? liveConnectionIds.has(binding.value.connectionId) : true; }; @@ -83,7 +83,7 @@ export function missingCredentialLabels( const liveConnectionIds = rawLiveConnectionIds ? rawLiveConnectionIds instanceof Set ? rawLiveConnectionIds - : new Set([...rawLiveConnectionIds].map((id) => id as string)) + : new Set(rawLiveConnectionIds) : undefined; for (const [headerName, value] of Object.entries(source.config.headers ?? {})) { From 4194ac66302ac2de5961d3883bbdd1454ec346b2 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:49 -0700 Subject: [PATCH 038/108] Use typed OpenAPI OAuth test failures --- .../src/sdk/client-credentials-oauth.test.ts | 24 ++++++++-- .../openapi/src/sdk/oauth-refresh.test.ts | 46 +++++++++++++------ 2 files changed, 51 insertions(+), 19 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/client-credentials-oauth.test.ts b/packages/plugins/openapi/src/sdk/client-credentials-oauth.test.ts index 5a926cf8e..52e780d3d 100644 --- a/packages/plugins/openapi/src/sdk/client-credentials-oauth.test.ts +++ b/packages/plugins/openapi/src/sdk/client-credentials-oauth.test.ts @@ -31,6 +31,13 @@ import { OAuth2Auth } from "./types"; const autoApprove: InvokeOptions = { onElicitation: "accept-all" }; +class OpenApiClientCredentialsTestSetupError extends Schema.TaggedErrorClass()( + "OpenApiClientCredentialsTestSetupError", + { + message: Schema.String, + }, +) {} + // --------------------------------------------------------------------------- // Test API — single endpoint that echoes the Authorization header. // --------------------------------------------------------------------------- @@ -147,7 +154,11 @@ layer(TestLayer)("OpenAPI client_credentials OAuth", (it) => { const clientLayer = FetchHttpClient.layer; const server = yield* HttpServer.HttpServer; const address = server.address; - if (address._tag !== "TcpAddress") return yield* Effect.die("test server must bind to TCP"); + if (!("port" in address)) { + return yield* new OpenApiClientCredentialsTestSetupError({ + message: "Test server must bind to TCP", + }); + } const baseUrl = `http://127.0.0.1:${address.port}`; const plugins = [ openApiPlugin({ httpClientLayer: clientLayer }), @@ -231,12 +242,15 @@ layer(TestLayer)("OpenAPI client_credentials OAuth", (it) => { }, }); - if (!started.completedConnection) { - throw new Error("expected completed clientCredentials connection"); + const completedConnection = started.completedConnection; + if (!completedConnection) { + return yield* new OpenApiClientCredentialsTestSetupError({ + message: "Expected completed clientCredentials connection", + }); } const auth = new OAuth2Auth({ kind: "oauth2", - connectionId: started.completedConnection.connectionId, + connectionId: completedConnection.connectionId, securitySchemeName: "oauth2", flow: "clientCredentials", tokenUrl: "https://token.example.com/token", @@ -257,7 +271,7 @@ layer(TestLayer)("OpenAPI client_credentials OAuth", (it) => { // Add the source with OAuth2Auth pointing at the completed connection. yield* userExec.openapi.addSpec({ spec: specJson, - scope: userScope.id as string, + scope: userScope.id, namespace: "petstore", baseUrl, oauth2: auth, diff --git a/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts b/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts index 1b7011929..7f9a4f697 100644 --- a/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts +++ b/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts @@ -20,7 +20,6 @@ import * as NodeHttpServer from "@effect/platform-node/NodeHttpServer"; import { ConnectionId, - ConnectionReauthRequiredError, CreateConnectionInput, ScopeId, SecretId, @@ -41,6 +40,13 @@ import { OAuth2Auth } from "./types"; const autoApprove: InvokeOptions = { onElicitation: "accept-all" }; +class OpenApiOauthTestSetupError extends Schema.TaggedErrorClass()( + "OpenApiOauthTestSetupError", + { + message: Schema.String, + }, +) {} + // --------------------------------------------------------------------------- // Test API — one endpoint that echoes the Authorization header so we can // prove which access token was in flight at invoke time. @@ -141,11 +147,15 @@ const makeExecutor = () => storage: () => ({}), secretProviders: [memoryProvider], })); - const clientLayer = FetchHttpClient.layer; - const server = yield* HttpServer.HttpServer; - const address = server.address; - if (address._tag !== "TcpAddress") return yield* Effect.die("test server must bind to TCP"); - const baseUrl = `http://127.0.0.1:${address.port}`; + const clientLayer = FetchHttpClient.layer; + const server = yield* HttpServer.HttpServer; + const address = server.address; + if (!("port" in address)) { + return yield* new OpenApiOauthTestSetupError({ + message: "Test server must bind to TCP", + }); + } + const baseUrl = `http://127.0.0.1:${address.port}`; const plugins = [ openApiPlugin({ httpClientLayer: clientLayer }), memorySecretsPlugin(), @@ -277,7 +287,7 @@ layer(TestLayer)("OpenAPI oauth refresh", (it) => { yield* executor.openapi.addSpec({ spec: specJson, - scope: String(scopeId), + scope: scopeId, namespace: "petstore", baseUrl, oauth2: auth, @@ -334,7 +344,7 @@ layer(TestLayer)("OpenAPI oauth refresh", (it) => { yield* executor.openapi.addSpec({ spec: specJson, - scope: String(scopeId), + scope: scopeId, namespace: "petstore", baseUrl, oauth2: auth, @@ -392,7 +402,7 @@ layer(TestLayer)("OpenAPI oauth refresh", (it) => { yield* executor.openapi.addSpec({ spec: specJson, - scope: String(scopeId), + scope: scopeId, namespace: "petstore", baseUrl, oauth2: auth, @@ -402,15 +412,23 @@ layer(TestLayer)("OpenAPI oauth refresh", (it) => { // generic Error (see openapi invokeTool), so we assert against // the `accessToken` call directly too — that's the surface // the UI bridges use to trigger re-auth. - const flipped = yield* executor.connections + const reauthRequired = yield* executor.connections .accessToken("conn-refresh-dead") - .pipe(Effect.flip); - expect(flipped._tag).toBe("ConnectionReauthRequiredError"); - expect((flipped as ConnectionReauthRequiredError).provider).toBe( + .pipe( + Effect.flatMap(() => + Effect.fail( + new OpenApiOauthTestSetupError({ + message: "Expected refresh to require re-auth", + }), + ), + ), + Effect.catchTag("ConnectionReauthRequiredError", Effect.succeed), + ); + expect(reauthRequired.provider).toBe( "openapi:oauth2", ); expect( - (flipped as ConnectionReauthRequiredError).message, + reauthRequired.message, ).toMatch(/invalid_grant|revoked/i); }), ); From 2445ee5dca4a4110ca84ff0b7db417e1440b6523 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:53 -0700 Subject: [PATCH 039/108] Use promiseExit in 1Password settings --- .../src/react/OnePasswordSettings.tsx | 49 +++++++++---------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/packages/plugins/onepassword/src/react/OnePasswordSettings.tsx b/packages/plugins/onepassword/src/react/OnePasswordSettings.tsx index 8ce73852d..bb81ad7d7 100644 --- a/packages/plugins/onepassword/src/react/OnePasswordSettings.tsx +++ b/packages/plugins/onepassword/src/react/OnePasswordSettings.tsx @@ -1,5 +1,6 @@ import { useState } from "react"; import { useAtomSet, useAtomValue } from "@effect/atom-react"; +import * as Exit from "effect/Exit"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import { ReactivityKey } from "@executor-js/react/api/reactivity-keys"; import { useScope } from "@executor-js/react/api/scope-context"; @@ -62,15 +63,15 @@ function VaultPicker(props: { isLoading: true, error: null, }), - onError: (error) => ({ + onError: () => ({ vaults: [] as { id: string; name: string }[], isLoading: false, - error: error.message, + error: "Failed to list vaults", }), - onDefect: (defect) => ({ + onDefect: () => ({ vaults: [] as { id: string; name: string }[], isLoading: false, - error: defect instanceof Error ? defect.message : "Failed to list vaults", + error: "Failed to list vaults", }), onSuccess: ({ value }) => { const v = value.vaults; @@ -142,7 +143,7 @@ function ConfigDialog(props: { const [error, setError] = useState(null); const scopeId = useScope(); - const doConfigure = useAtomSet(configureOnePassword, { mode: "promise" }); + const doConfigure = useAtomSet(configureOnePassword, { mode: "promiseExit" }); const reset = () => { if (!isEdit) { @@ -159,23 +160,25 @@ function ConfigDialog(props: { if (!accountName.trim() || !vaultId.trim()) return; setSaving(true); setError(null); - try { - const auth = - authKind === "desktop-app" - ? { kind: "desktop-app" as const, accountName: accountName.trim() } - : { kind: "service-account" as const, tokenSecretId: accountName.trim() }; - await doConfigure({ - params: { scopeId }, - payload: { auth, vaultId: vaultId.trim(), name: vaultName.trim() || "1Password" }, - reactivityKeys: [ReactivityKey.secrets], - }); - props.onOpenChange(false); - reset(); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to save configuration"); + const auth = + authKind === "desktop-app" + ? { kind: "desktop-app" as const, accountName: accountName.trim() } + : { kind: "service-account" as const, tokenSecretId: accountName.trim() }; + + const exit = await doConfigure({ + params: { scopeId }, + payload: { auth, vaultId: vaultId.trim(), name: vaultName.trim() || "1Password" }, + reactivityKeys: [ReactivityKey.secrets], + }); + if (Exit.isFailure(exit)) { + setError("Failed to save configuration"); setSaving(false); + return; } + + props.onOpenChange(false); + reset(); }; return ( @@ -298,14 +301,10 @@ export default function OnePasswordSettings() { const [configOpen, setConfigOpen] = useState(false); const scopeId = useScope(); const configResult = useAtomValue(onepasswordConfigAtom(scopeId)); - const doRemove = useAtomSet(removeOnePasswordConfig, { mode: "promise" }); + const doRemove = useAtomSet(removeOnePasswordConfig, { mode: "promiseExit" }); const handleRemove = async () => { - try { - await doRemove({ params: { scopeId }, reactivityKeys: [ReactivityKey.secrets] }); - } catch { - /* TODO: toast */ - } + await doRemove({ params: { scopeId }, reactivityKeys: [ReactivityKey.secrets] }); }; const config: OnePasswordConfig | null = AsyncResult.match( From 8fe085881e275426ba6f3d3a4172da9070374b7e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:32:57 -0700 Subject: [PATCH 040/108] Clean daemon state test boundaries --- tests/daemon-state.test.ts | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/tests/daemon-state.test.ts b/tests/daemon-state.test.ts index 8bba958b2..3698604f5 100644 --- a/tests/daemon-state.test.ts +++ b/tests/daemon-state.test.ts @@ -24,7 +24,7 @@ const fileSystemError = (method: string, cause: unknown) => _tag: "Unknown", module: "FileSystem", method, - description: cause instanceof Error ? cause.message : String(cause), + description: "FileSystem operation failed", cause, }); @@ -54,22 +54,24 @@ const fileSystemLayer = FileSystem.layerNoop({ const daemonStateLayer = Layer.merge(fileSystemLayer, Path.layer); const withDaemonDataDir = (effect: Effect.Effect) => - Effect.gen(function* () { - const prev = process.env.EXECUTOR_DATA_DIR; - const dir = mkdtempSync(join(tmpdir(), "executor-daemon-state-test-")); - process.env.EXECUTOR_DATA_DIR = dir; - - try { - return yield* effect; - } finally { - if (prev === undefined) { - delete process.env.EXECUTOR_DATA_DIR; - } else { - process.env.EXECUTOR_DATA_DIR = prev; - } - rmSync(dir, { recursive: true, force: true }); - } - }).pipe(Effect.provide(daemonStateLayer)); + Effect.acquireUseRelease( + Effect.sync(() => { + const prev = process.env.EXECUTOR_DATA_DIR; + const dir = mkdtempSync(join(tmpdir(), "executor-daemon-state-test-")); + process.env.EXECUTOR_DATA_DIR = dir; + return { dir, prev }; + }), + () => effect, + ({ dir, prev }) => + Effect.sync(() => { + if (prev === undefined) { + delete process.env.EXECUTOR_DATA_DIR; + } else { + process.env.EXECUTOR_DATA_DIR = prev; + } + rmSync(dir, { recursive: true, force: true }); + }), + ).pipe(Effect.provide(daemonStateLayer)); describe("daemon state", () => { it("normalizes local host aliases", () => { From f157b8040421877e34ba503cad214bed787dd322 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:33:10 -0700 Subject: [PATCH 041/108] Tighten MCP host test boundaries --- packages/hosts/mcp/src/server.test.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/hosts/mcp/src/server.test.ts b/packages/hosts/mcp/src/server.test.ts index 977c57bb3..14b3b7f6b 100644 --- a/packages/hosts/mcp/src/server.test.ts +++ b/packages/hosts/mcp/src/server.test.ts @@ -44,6 +44,7 @@ const withClient = async ( const client = new Client({ name: "test-client", version: "1.0.0" }, { capabilities }); await mcpServer.connect(serverTransport); await client.connect(clientTransport); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: test helper must close MCP transports after async client assertions try { await fn(client); } finally { @@ -136,6 +137,7 @@ describe("MCP host server — client with elicitation", () => { it("execute tool hides defect details in MCP error results", async () => { const engine = makeStubEngine({ + // oxlint-disable-next-line executor/no-effect-escape-hatch, executor/no-error-constructor -- boundary: test injects a defect to verify MCP error redaction execute: () => Effect.die(new Error("secret internal detail")), }); @@ -318,7 +320,8 @@ describe("MCP host server — client with form-only elicitation", () => { await withClient(engine, FORM_ONLY_CAPS, async (client) => { client.setRequestHandler(ElicitRequestSchema, async (request) => { - receivedMessage = (request.params as Record).message as string; + receivedMessage = + typeof request.params.message === "string" ? request.params.message : undefined; return { action: "accept" as const, content: {} }; }); @@ -527,6 +530,7 @@ describe("MCP host server — elicitation error handling", () => { await withClient(engine, ELICITATION_CAPS, async (client) => { client.setRequestHandler(ElicitRequestSchema, async () => { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: MCP client request handler rejects to exercise server fallback throw new Error("client cannot handle this"); }); From 77e62f3e30ffe472f681c53ea6db9e7e97057bae Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:38:04 -0700 Subject: [PATCH 042/108] Clean OpenAPI invoke error handling --- packages/plugins/openapi/src/sdk/invoke.ts | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/invoke.ts b/packages/plugins/openapi/src/sdk/invoke.ts index 7c0a5df19..bba5cd701 100644 --- a/packages/plugins/openapi/src/sdk/invoke.ts +++ b/packages/plugins/openapi/src/sdk/invoke.ts @@ -117,13 +117,13 @@ export const resolveHeaders = ( typeof value === "string" ? Effect.succeed({ name, value }) : secrets.get(value.secretId).pipe( - Effect.mapError((err) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" - ? new OpenApiInvocationError({ - message: `Failed to resolve secret "${value.secretId}" for header "${name}"`, - statusCode: Option.none(), - }) - : err, + Effect.catchTag("SecretOwnedByConnectionError", () => + Effect.fail( + new OpenApiInvocationError({ + message: `Failed to resolve secret "${value.secretId}" for header "${name}"`, + statusCode: Option.none(), + }), + ), ), Effect.flatMap((secret) => secret === null @@ -585,7 +585,7 @@ export const invoke = Effect.fn("OpenApi.invoke")(function* ( Effect.mapError( (err) => new OpenApiInvocationError({ - message: `HTTP request failed: ${err.message}`, + message: "HTTP request failed", statusCode: Option.none(), cause: err, }), @@ -600,9 +600,9 @@ export const invoke = Effect.fn("OpenApi.invoke")(function* ( const contentType = response.headers["content-type"] ?? null; const mapBodyError = Effect.mapError( - (err: { readonly message?: string }) => + (err: unknown) => new OpenApiInvocationError({ - message: `Failed to read response body: ${err.message ?? String(err)}`, + message: "Failed to read response body", statusCode: Option.some(status), cause: err, }), From 868d4d428d2a0ade20d4f47cd6eaef6b9cacb2cd Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:38:09 -0700 Subject: [PATCH 043/108] Clean MCP probe shape boundaries --- .../plugins/mcp/src/sdk/probe-shape.test.ts | 20 ++- packages/plugins/mcp/src/sdk/probe-shape.ts | 148 ++++++++++-------- 2 files changed, 100 insertions(+), 68 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/probe-shape.test.ts b/packages/plugins/mcp/src/sdk/probe-shape.test.ts index 629846913..93300efa6 100644 --- a/packages/plugins/mcp/src/sdk/probe-shape.test.ts +++ b/packages/plugins/mcp/src/sdk/probe-shape.test.ts @@ -8,16 +8,23 @@ type FetchStub = ( init?: Parameters[1], ) => Promise; +interface FetchFailure { + readonly failure: unknown; +} + const asFetch = (stub: FetchStub): typeof fetch => stub as typeof fetch; /** * Build a `fetch`-compatible stub that returns the given `Response` (or - * throws the given error) regardless of input. `fetch`'s exact signature + * rejects with the given failure) regardless of input. `fetch`'s exact signature * is a union; a narrow closure is enough for the probe. */ -const stubFetch = (result: Response | Error): typeof fetch => +const stubFetch = (result: Response | FetchFailure): typeof fetch => asFetch(async (_input, _init) => { - if (result instanceof Error) throw result; + if ("failure" in result) { + // oxlint-disable-next-line promise/prefer-await-to-then, executor/no-promise-reject -- boundary: fetch-compatible test stub must reject like fetch + return Promise.reject(result.failure); + } return result; }); @@ -25,7 +32,10 @@ const stubFetchSequence = (results: readonly Response[]): typeof fetch => { let index = 0; return asFetch(async (_input, _init) => { const result = results[index++]; - if (!result) throw new Error("unexpected fetch"); + if (!result) { + // oxlint-disable-next-line promise/prefer-await-to-then, executor/no-promise-reject -- boundary: fetch-compatible test stub must reject like fetch + return Promise.reject({ message: "unexpected fetch" }); + } return result; }); }; @@ -140,7 +150,7 @@ describe("probeMcpEndpointShape", () => { it.effect("reports transport failure as unreachable", () => Effect.gen(function* () { const result = yield* probeMcpEndpointShape("https://missing/", { - fetch: stubFetch(new TypeError("fetch failed")), + fetch: stubFetch({ failure: { message: "fetch failed" } }), }); expect(result.kind).toBe("unreachable"); }), diff --git a/packages/plugins/mcp/src/sdk/probe-shape.ts b/packages/plugins/mcp/src/sdk/probe-shape.ts index 59d98049e..d046a555f 100644 --- a/packages/plugins/mcp/src/sdk/probe-shape.ts +++ b/packages/plugins/mcp/src/sdk/probe-shape.ts @@ -30,7 +30,7 @@ // round-trip, no DCR — every non-MCP endpoint exits here. // --------------------------------------------------------------------------- -import { Effect } from "effect"; +import { Data, Effect, Option, Schema } from "effect"; /** MCP initialize request body used as the shape probe. Any real MCP * server either answers it (unauth-OK server) or returns the spec- @@ -62,6 +62,27 @@ const readHeader = (headers: Headers, name: string): string | null => { return null; }; +class ProbeTransportError extends Data.TaggedError("ProbeTransportError")<{ + readonly reason: string; + readonly cause: unknown; +}> {} + +const ErrorMessageShape = Schema.Struct({ message: Schema.String }); +const decodeErrorMessageShape = Schema.decodeUnknownOption(ErrorMessageShape); + +const reasonFromBoundaryCause = (cause: unknown): string => { + const messageShape = decodeErrorMessageShape(cause); + if (Option.isSome(messageShape)) return messageShape.value.message; + if (typeof cause === "string") return cause; + if (typeof cause === "number" || typeof cause === "boolean" || typeof cause === "bigint") { + return `${cause}`; + } + if (typeof cause === "symbol") return cause.description ?? "symbol"; + if (cause === null) return "null"; + if (typeof cause === "undefined") return "undefined"; + return "fetch failed"; +}; + export type McpShapeProbeResult = /** Server answered initialize successfully — either a 2xx with a * JSON-RPC payload, or a 401 + WWW-Authenticate: Bearer (RFC 6750 @@ -99,85 +120,86 @@ export const probeMcpEndpointShape = ( Effect.gen(function* () { const fetchImpl = options.fetch ?? globalThis.fetch; const timeoutMs = options.timeoutMs ?? 8_000; + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), timeoutMs); const outcome = yield* Effect.tryPromise({ try: async (): Promise => { - const controller = new AbortController(); - const timer = setTimeout(() => controller.abort(), timeoutMs); - try { - const classify = (response: Response, method: "GET" | "POST") => { - if (response.status === 401) { - const wwwAuth = readHeader(response.headers, "www-authenticate"); - if (wwwAuth && /^\s*bearer\b/i.test(wwwAuth)) { - return { kind: "mcp", requiresAuth: true } as const; - } - return { - kind: "not-mcp", - reason: - "401 without Bearer WWW-Authenticate — not an MCP auth challenge", - } as const; + const classify = (response: Response, method: "GET" | "POST") => { + if (response.status === 401) { + const wwwAuth = readHeader(response.headers, "www-authenticate"); + if (wwwAuth && /^\s*bearer\b/i.test(wwwAuth)) { + return { kind: "mcp", requiresAuth: true } as const; } + return { + kind: "not-mcp", + reason: + "401 without Bearer WWW-Authenticate — not an MCP auth challenge", + } as const; + } - if (response.status >= 200 && response.status < 300) { - if (method === "GET") { - const contentType = readHeader(response.headers, "content-type") ?? ""; - if (!/^\s*text\/event-stream\b/i.test(contentType)) { - return { - kind: "not-mcp", - reason: "GET response is not an SSE stream", - } as const; - } + if (response.status >= 200 && response.status < 300) { + if (method === "GET") { + const contentType = readHeader(response.headers, "content-type") ?? ""; + if (!/^\s*text\/event-stream\b/i.test(contentType)) { + return { + kind: "not-mcp", + reason: "GET response is not an SSE stream", + } as const; } - return { kind: "mcp", requiresAuth: false } as const; } + return { kind: "mcp", requiresAuth: false } as const; + } - return null; - }; + return null; + }; - const url = new URL(endpoint); - for (const [key, value] of Object.entries(options.queryParams ?? {})) { - url.searchParams.set(key, value); - } - const authHeaders = options.headers ?? {}; - - const postResponse = await fetchImpl(url, { - method: "POST", - headers: { - ...authHeaders, - "content-type": "application/json", - accept: "application/json, text/event-stream", - }, - body: INITIALIZE_BODY, - signal: controller.signal, - }); + const url = new URL(endpoint); + for (const [key, value] of Object.entries(options.queryParams ?? {})) { + url.searchParams.set(key, value); + } + const authHeaders = options.headers ?? {}; - const postResult = classify(postResponse, "POST"); - if (postResult) return postResult; - - if ([404, 405, 406, 415].includes(postResponse.status)) { - const getResponse = await fetchImpl(url, { - method: "GET", - headers: { ...authHeaders, accept: "text/event-stream" }, - signal: controller.signal, - }); - const getResult = classify(getResponse, "GET"); - if (getResult) return getResult; - } + const postResponse = await fetchImpl(url, { + method: "POST", + headers: { + ...authHeaders, + "content-type": "application/json", + accept: "application/json, text/event-stream", + }, + body: INITIALIZE_BODY, + signal: controller.signal, + }); + + const postResult = classify(postResponse, "POST"); + if (postResult) return postResult; - return { - kind: "not-mcp", - reason: `unexpected status ${postResponse.status} for initialize`, - }; - } finally { - clearTimeout(timer); + if ([404, 405, 406, 415].includes(postResponse.status)) { + const getResponse = await fetchImpl(url, { + method: "GET", + headers: { ...authHeaders, accept: "text/event-stream" }, + signal: controller.signal, + }); + const getResult = classify(getResponse, "GET"); + if (getResult) return getResult; } + + return { + kind: "not-mcp", + reason: `unexpected status ${postResponse.status} for initialize`, + }; }, - catch: (cause) => cause, + catch: (cause) => + new ProbeTransportError({ + reason: reasonFromBoundaryCause(cause), + cause, + }), }).pipe( + Effect.ensuring(Effect.sync(() => clearTimeout(timer))), Effect.catch((cause) => Effect.succeed({ kind: "unreachable", - reason: cause instanceof Error ? cause.message : String(cause), + reason: cause.reason, }), ), ); From d4dfc9d8ee70f8a5e17f27a854a92d50c75c2f49 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:38:15 -0700 Subject: [PATCH 044/108] Clean local server boundary cleanup --- apps/local/src/server/config-sync.ts | 8 ++--- apps/local/src/server/executor.ts | 49 +++++++++++++++++++++++++--- 2 files changed, 48 insertions(+), 9 deletions(-) diff --git a/apps/local/src/server/config-sync.ts b/apps/local/src/server/config-sync.ts index e83bee43e..d62daad39 100644 --- a/apps/local/src/server/config-sync.ts +++ b/apps/local/src/server/config-sync.ts @@ -5,7 +5,7 @@ // plugin in executor.ts. // --------------------------------------------------------------------------- -import { Effect } from "effect"; +import { Cause, Effect } from "effect"; import { join } from "node:path"; import * as fs from "node:fs"; import * as jsonc from "jsonc-parser"; @@ -88,7 +88,7 @@ const addSourceFromConfig = ( // aware of per-user tenancy. Pin replayed sources to the outermost // scope so a future `[user, org]` stack still sees them via org // fall-through. - const scope = executor.scopes.at(-1)!.id as string; + const scope = executor.scopes.at(-1)!.id; switch (source.kind) { case "openapi": return executor.openapi.addSpec({ @@ -157,11 +157,11 @@ export const syncFromConfig = ( (source) => addSourceFromConfig(executor, source).pipe( Effect.map(() => true as const), - Effect.catchCause((e) => { + Effect.catchCause((cause) => { const ns = "namespace" in source ? source.namespace : ("name" in source ? source.name : "unknown"); console.warn( `[config-sync] Failed to load source "${ns}":`, - e instanceof Error ? e.message : String(e), + Cause.pretty(cause), ); return Effect.succeed(false as const); }), diff --git a/apps/local/src/server/executor.ts b/apps/local/src/server/executor.ts index db8ee0658..2eef7f18e 100644 --- a/apps/local/src/server/executor.ts +++ b/apps/local/src/server/executor.ts @@ -1,7 +1,7 @@ import { Database } from "bun:sqlite"; import { drizzle } from "drizzle-orm/bun-sqlite"; import { migrate } from "drizzle-orm/bun-sqlite/migrator"; -import { Context, Effect, Layer, ManagedRuntime } from "effect"; +import { Context, Data, Effect, Layer, ManagedRuntime } from "effect"; import { createHash } from "node:crypto"; import * as fs from "node:fs"; import { homedir, tmpdir } from "node:os"; @@ -107,6 +107,41 @@ class LocalExecutorTag extends Context.Service {} + +const ignorePromiseFailure = ( + operation: LocalExecutorDisposeError["operation"], + try_: () => Promise, +) => + Effect.runPromise( + Effect.ignore( + Effect.tryPromise({ + try: try_, + catch: (cause) => new LocalExecutorDisposeError({ operation, cause }), + }), + ), + ); + +const handleOrNull = (promise: ReturnType) => + Effect.runPromise( + Effect.tryPromise({ + try: () => promise, + catch: (cause) => + new LocalExecutorDisposeError({ operation: "createHandle", cause }), + }).pipe( + Effect.catch(() => + Effect.succeed> | null>( + null, + ), + ), + ), + ); + const createLocalExecutorLayer = () => { const { path: dbPath, legacySecrets } = resolveDbPath(); @@ -202,8 +237,8 @@ export const createExecutorHandle = async () => { executor: bundle.executor, plugins: bundle.plugins, dispose: async () => { - await Effect.runPromise(bundle.executor.close()).catch(() => undefined); - await runtime.dispose().catch(() => undefined); + await Effect.runPromise(Effect.ignore(bundle.executor.close())); + await ignorePromiseFailure("disposeRuntime", () => runtime.dispose()); }, }; }; @@ -226,8 +261,12 @@ export const disposeExecutor = async (): Promise => { const currentHandlePromise = sharedHandlePromise; sharedHandlePromise = null; - const handle = await currentHandlePromise?.catch(() => null); - await handle?.dispose().catch(() => undefined); + const handle = currentHandlePromise + ? await handleOrNull(currentHandlePromise) + : null; + if (handle) { + await ignorePromiseFailure("disposeExecutor", () => handle.dispose()); + } }; export const reloadExecutor = () => { From 4d96793401cd305a159a86d594a8b45f9e2d24eb Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:38:20 -0700 Subject: [PATCH 045/108] Parse MCP migration test config with Schema --- .../src/server/migrate-mcp-bindings.test.ts | 355 +++++++++--------- 1 file changed, 180 insertions(+), 175 deletions(-) diff --git a/apps/local/src/server/migrate-mcp-bindings.test.ts b/apps/local/src/server/migrate-mcp-bindings.test.ts index cac869fc3..276e16222 100644 --- a/apps/local/src/server/migrate-mcp-bindings.test.ts +++ b/apps/local/src/server/migrate-mcp-bindings.test.ts @@ -4,200 +4,205 @@ // queryParams), runs the migration runner, asserts the auth columns // are populated and the child tables hold the secret-backed entries. -import { describe, expect, it } from "@effect/vitest"; +import { afterEach, describe, expect, it } from "@effect/vitest"; import { Database } from "bun:sqlite"; import { mkdtempSync, rmSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; import { drizzle } from "drizzle-orm/bun-sqlite"; import { migrate } from "drizzle-orm/bun-sqlite/migrator"; +import { Schema } from "effect"; import { PRE_0007_SQL, stampPriorMigrationsApplied } from "./__test-helpers__/pre-0007-schema"; const MIGRATIONS_FOLDER = join(import.meta.dirname, "../../drizzle"); +const ConfigJson = Schema.fromJsonString( + Schema.Struct({ + auth: Schema.optional(Schema.Unknown), + command: Schema.optional(Schema.String), + endpoint: Schema.optional(Schema.String), + transport: Schema.String, + }), +); + +const tempDirs: Array = []; + +const makeDbPath = () => { + const dir = mkdtempSync(join(tmpdir(), "mcp-mig-")); + tempDirs.push(dir); + return join(dir, "test.sqlite"); +}; + describe("0007_normalize_plugin_secret_refs (mcp)", () => { - it("flattens header auth into auth_kind/auth_secret_id columns", () => { - const dir = mkdtempSync(join(tmpdir(), "mcp-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO mcp_source (scope_id, id, name, config, created_at) VALUES (?, ?, ?, ?, ?)", - ).run( - "default-scope", - "remote-headers", - "Remote Headers", - JSON.stringify({ - transport: "remote", - endpoint: "https://example.com/mcp", - auth: { - kind: "header", - headerName: "X-API-Key", - secretId: "tok-secret", - prefix: "Bearer ", - }, - }), - Date.now(), - ); - - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const row = after - .prepare( - "SELECT auth_kind, auth_header_name, auth_secret_id, auth_secret_prefix, config FROM mcp_source WHERE id = ?", - ) - .get("remote-headers") as { - auth_kind: string; - auth_header_name: string; - auth_secret_id: string; - auth_secret_prefix: string; - config: string; - }; - expect(row.auth_kind).toBe("header"); - expect(row.auth_header_name).toBe("X-API-Key"); - expect(row.auth_secret_id).toBe("tok-secret"); - expect(row.auth_secret_prefix).toBe("Bearer "); - // The auth key should be stripped from config json after migration. - const config = JSON.parse(row.config); - expect(config.auth).toBeUndefined(); - expect(config.transport).toBe("remote"); - expect(config.endpoint).toBe("https://example.com/mcp"); - after.close(); - } finally { + afterEach(() => { + for (const dir of tempDirs.splice(0)) { rmSync(dir, { recursive: true, force: true }); } }); + it("flattens header auth into auth_kind/auth_secret_id columns", () => { + const dbPath = makeDbPath(); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO mcp_source (scope_id, id, name, config, created_at) VALUES (?, ?, ?, ?, ?)", + ).run( + "default-scope", + "remote-headers", + "Remote Headers", + JSON.stringify({ + transport: "remote", + endpoint: "https://example.com/mcp", + auth: { + kind: "header", + headerName: "X-API-Key", + secretId: "tok-secret", + prefix: "Bearer ", + }, + }), + Date.now(), + ); + + db.close(); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const row = after + .prepare( + "SELECT auth_kind, auth_header_name, auth_secret_id, auth_secret_prefix, config FROM mcp_source WHERE id = ?", + ) + .get("remote-headers") as { + auth_kind: string; + auth_header_name: string; + auth_secret_id: string; + auth_secret_prefix: string; + config: string; + }; + expect(row.auth_kind).toBe("header"); + expect(row.auth_header_name).toBe("X-API-Key"); + expect(row.auth_secret_id).toBe("tok-secret"); + expect(row.auth_secret_prefix).toBe("Bearer "); + // The auth key should be stripped from config json after migration. + const config = Schema.decodeUnknownSync(ConfigJson)(row.config); + expect(config.auth).toBeUndefined(); + expect(config.transport).toBe("remote"); + expect(config.endpoint).toBe("https://example.com/mcp"); + after.close(); + }); + it("flattens oauth2 auth and explodes headers into child rows", () => { - const dir = mkdtempSync(join(tmpdir(), "mcp-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO mcp_source (scope_id, id, name, config, created_at) VALUES (?, ?, ?, ?, ?)", - ).run( - "default-scope", - "remote-oauth", - "Remote OAuth", - JSON.stringify({ - transport: "remote", - endpoint: "https://oauth.example/mcp", - headers: { - "X-Trace": "static", - "X-Token": { secretId: "extra-tok" }, - }, - queryParams: { - org: { secretId: "org-id-secret" }, - }, - auth: { - kind: "oauth2", - connectionId: "conn-1", - clientIdSecretId: "client-id-sec", - clientSecretSecretId: "client-secret-sec", - }, - }), - Date.now(), - ); - - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const row = after - .prepare( - "SELECT auth_kind, auth_connection_id, auth_client_id_secret_id, auth_client_secret_secret_id FROM mcp_source WHERE id = ?", - ) - .get("remote-oauth") as Record; - expect(row.auth_kind).toBe("oauth2"); - expect(row.auth_connection_id).toBe("conn-1"); - expect(row.auth_client_id_secret_id).toBe("client-id-sec"); - expect(row.auth_client_secret_secret_id).toBe("client-secret-sec"); - - const headers = after - .prepare( - "SELECT name, kind, text_value, secret_id FROM mcp_source_header WHERE source_id = ? ORDER BY name", - ) - .all("remote-oauth") as ReadonlyArray>; - expect(headers).toHaveLength(2); - const byName = new Map(headers.map((h) => [h.name, h])); - expect(byName.get("X-Trace")).toMatchObject({ - kind: "text", - text_value: "static", - }); - expect(byName.get("X-Token")).toMatchObject({ - kind: "secret", - secret_id: "extra-tok", - }); - - const params = after - .prepare( - "SELECT name, secret_id FROM mcp_source_query_param WHERE source_id = ?", - ) - .all("remote-oauth") as ReadonlyArray>; - expect(params).toHaveLength(1); - expect(params[0]).toMatchObject({ name: "org", secret_id: "org-id-secret" }); - - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + const dbPath = makeDbPath(); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO mcp_source (scope_id, id, name, config, created_at) VALUES (?, ?, ?, ?, ?)", + ).run( + "default-scope", + "remote-oauth", + "Remote OAuth", + JSON.stringify({ + transport: "remote", + endpoint: "https://oauth.example/mcp", + headers: { + "X-Trace": "static", + "X-Token": { secretId: "extra-tok" }, + }, + queryParams: { + org: { secretId: "org-id-secret" }, + }, + auth: { + kind: "oauth2", + connectionId: "conn-1", + clientIdSecretId: "client-id-sec", + clientSecretSecretId: "client-secret-sec", + }, + }), + Date.now(), + ); + + db.close(); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const row = after + .prepare( + "SELECT auth_kind, auth_connection_id, auth_client_id_secret_id, auth_client_secret_secret_id FROM mcp_source WHERE id = ?", + ) + .get("remote-oauth") as Record; + expect(row.auth_kind).toBe("oauth2"); + expect(row.auth_connection_id).toBe("conn-1"); + expect(row.auth_client_id_secret_id).toBe("client-id-sec"); + expect(row.auth_client_secret_secret_id).toBe("client-secret-sec"); + + const headers = after + .prepare( + "SELECT name, kind, text_value, secret_id FROM mcp_source_header WHERE source_id = ? ORDER BY name", + ) + .all("remote-oauth") as ReadonlyArray>; + expect(headers).toHaveLength(2); + const byName = new Map(headers.map((h) => [h.name, h])); + expect(byName.get("X-Trace")).toMatchObject({ + kind: "text", + text_value: "static", + }); + expect(byName.get("X-Token")).toMatchObject({ + kind: "secret", + secret_id: "extra-tok", + }); + + const params = after + .prepare("SELECT name, secret_id FROM mcp_source_query_param WHERE source_id = ?") + .all("remote-oauth") as ReadonlyArray>; + expect(params).toHaveLength(1); + expect(params[0]).toMatchObject({ name: "org", secret_id: "org-id-secret" }); + + after.close(); }); it("leaves stdio sources alone (no auth, no headers, no queryParams)", () => { - const dir = mkdtempSync(join(tmpdir(), "mcp-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO mcp_source (scope_id, id, name, config, created_at) VALUES (?, ?, ?, ?, ?)", - ).run( - "default-scope", - "stdio-only", - "Stdio", - JSON.stringify({ - transport: "stdio", - command: "/usr/bin/server", - args: ["--flag"], - }), - Date.now(), - ); - - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const row = after - .prepare( - "SELECT auth_kind, auth_secret_id, config FROM mcp_source WHERE id = ?", - ) - .get("stdio-only") as { - auth_kind: string; - auth_secret_id: string | null; - config: string; - }; - expect(row.auth_kind).toBe("none"); - expect(row.auth_secret_id).toBeNull(); - const config = JSON.parse(row.config); - expect(config.transport).toBe("stdio"); - expect(config.command).toBe("/usr/bin/server"); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + const dbPath = makeDbPath(); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO mcp_source (scope_id, id, name, config, created_at) VALUES (?, ?, ?, ?, ?)", + ).run( + "default-scope", + "stdio-only", + "Stdio", + JSON.stringify({ + transport: "stdio", + command: "/usr/bin/server", + args: ["--flag"], + }), + Date.now(), + ); + + db.close(); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const row = after + .prepare("SELECT auth_kind, auth_secret_id, config FROM mcp_source WHERE id = ?") + .get("stdio-only") as { + auth_kind: string; + auth_secret_id: string | null; + config: string; + }; + expect(row.auth_kind).toBe("none"); + expect(row.auth_secret_id).toBeNull(); + const config = Schema.decodeUnknownSync(ConfigJson)(row.config); + expect(config.transport).toBe("stdio"); + expect(config.command).toBe("/usr/bin/server"); + after.close(); }); }); From 0545e1e29cc51f1eab4f76ec43908c9a3ca8e34e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:38:26 -0700 Subject: [PATCH 046/108] Clean cloud test worker boundaries --- apps/cloud/src/test-worker.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/apps/cloud/src/test-worker.ts b/apps/cloud/src/test-worker.ts index 37dbe55a4..2e0719467 100644 --- a/apps/cloud/src/test-worker.ts +++ b/apps/cloud/src/test-worker.ts @@ -42,10 +42,10 @@ const TestMcpAuthLive = Layer.succeed(McpAuth)({ if (!header?.startsWith("Bearer ")) return mcpUnauthorized("missing_bearer"); const rawToken = header.slice("Bearer ".length); if (rawToken === "test-system-error") { - return yield* Effect.fail(new McpJwtVerificationError({ - cause: new Error("simulated jwks fetch failure"), + return yield* new McpJwtVerificationError({ + cause: "simulated_jwks_fetch_failure", reason: "system", - })); + }); } const token = parseTestBearer(rawToken); return token ? mcpAuthorized(token) : mcpUnauthorized("invalid_token"); @@ -89,6 +89,7 @@ const handleSeedOrg = async ( connect_timeout: 10, onnotice: () => undefined, }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: worker seed endpoint keeps postgres cleanup in native async finalization try { await drizzle(sql, { schema: { organizations } }) .insert(organizations) @@ -98,6 +99,7 @@ const handleSeedOrg = async ( set: { name: body.name }, }); } finally { + // oxlint-disable-next-line executor/no-promise-catch -- boundary: best-effort postgres close during worker seed endpoint cleanup await sql.end({ timeout: 0 }).catch(() => undefined); } return new Response(null, { status: 204 }); From 586955bbcd9f644fad705478096967c80e3a64e1 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:45:01 -0700 Subject: [PATCH 047/108] Clean cloud service boundaries --- apps/cloud/src/mcp-flow.test.ts | 26 +++++-- apps/cloud/src/services/autumn.ts | 32 +++++---- apps/cloud/src/services/slack.ts | 113 ++++++++++++++++++++---------- 3 files changed, 116 insertions(+), 55 deletions(-) diff --git a/apps/cloud/src/mcp-flow.test.ts b/apps/cloud/src/mcp-flow.test.ts index 73b72aa95..0d654dffb 100644 --- a/apps/cloud/src/mcp-flow.test.ts +++ b/apps/cloud/src/mcp-flow.test.ts @@ -137,7 +137,7 @@ const seedOrg = async (id: string, name = "MCP Flow Org"): Promise => { beforeAll(() => { // Env presence guard — avoids confusing errors downstream if the test // wrangler forgot to bind something the DO needs. - if (!env.MCP_SESSION) throw new Error("MCP_SESSION binding missing from test wrangler"); + expect(env.MCP_SESSION, "MCP_SESSION binding missing from test wrangler").toBeDefined(); }); afterAll(() => undefined); @@ -384,18 +384,32 @@ describe("/mcp session restore", () => { const getResponse = await mcpGet({ bearer, sessionId: sessionId! }); expect(getResponse.status).toBe(200); expect(getResponse.headers.get("content-type") ?? "").toContain("text/event-stream"); - await getResponse.body?.cancel().catch(() => undefined); + const responseBody = getResponse.body; + if (responseBody) { + await Effect.runPromise( + Effect.ignore( + Effect.tryPromise({ + try: () => responseBody.cancel(), + catch: () => "ResponseBodyCancelFailed" as const, + }), + ), + ); + } - const response = await Promise.race([ + const postResult = await Promise.race([ mcpPost({ bearer, sessionId, body: TOOLS_LIST_REQUEST, - }), - new Promise((_, reject) => - setTimeout(() => reject(new Error("POST did not return after GET restore")), 5_000), + }).then((response) => ({ kind: "response" as const, response })), + new Promise<{ readonly kind: "timeout" }>((resolve) => + setTimeout(() => resolve({ kind: "timeout" }), 5_000), ), ]); + expect(postResult).toEqual(expect.objectContaining({ kind: "response" })); + if (postResult.kind !== "response") return; + + const response = postResult.response; expect(response.status).toBe(200); expect(response.headers.get("content-type") ?? "").toContain("application/json"); const body = (await response.json()) as { diff --git a/apps/cloud/src/services/autumn.ts b/apps/cloud/src/services/autumn.ts index 58e3c6bd5..4cd7d3090 100644 --- a/apps/cloud/src/services/autumn.ts +++ b/apps/cloud/src/services/autumn.ts @@ -12,7 +12,8 @@ import { Context, Data, Effect, Layer } from "effect"; // --------------------------------------------------------------------------- export class AutumnError extends Data.TaggedError("AutumnError")<{ - cause: unknown; + message: string; + cause?: unknown; }> {} // --------------------------------------------------------------------------- @@ -38,8 +39,8 @@ const make = Effect.sync(() => { const secretKey = env.AUTUMN_SECRET_KEY; if (!secretKey) { - const notConfigured = Effect.die( - new Error("Autumn not configured — AUTUMN_SECRET_KEY is empty"), + const notConfigured = Effect.fail( + new AutumnError({ message: "Autumn not configured: AUTUMN_SECRET_KEY is empty" }), ); return { use: () => notConfigured, @@ -52,22 +53,27 @@ const make = Effect.sync(() => { const use = (fn: (client: Autumn) => Promise) => Effect.tryPromise({ try: () => fn(client), - catch: (cause) => new AutumnError({ cause }), + catch: (cause) => new AutumnError({ message: "Autumn SDK request failed", cause }), }).pipe(Effect.withSpan(`autumn.${fn.name ?? "use"}`)); const trackExecution = (organizationId: string) => Effect.gen(function* () { yield* Effect.annotateCurrentSpan({ "autumn.customer.id": organizationId }); - const outcome = yield* Effect.result( - use((c) => c.track({ customerId: organizationId, featureId: "executions", value: 1 })), + yield* use((c) => + c.track({ customerId: organizationId, featureId: "executions", value: 1 }), + ).pipe( + Effect.catchTag("AutumnError", (error) => + Effect.gen(function* () { + // Silent billing data loss is worth paging on — autumn.trackExecution + // is fire-and-forget so the caller doesn't handle it themselves. + yield* Effect.sync(() => { + console.error("[billing] track failed:", error); + Sentry.captureException(error); + }); + yield* Effect.annotateCurrentSpan({ "autumn.track.failed": true }); + }), + ), ); - if (outcome._tag === "Failure") { - // Silent billing data loss is worth paging on — autumn.trackExecution - // is fire-and-forget so the caller doesn't handle it themselves. - console.error("[billing] track failed:", outcome.failure); - Sentry.captureException(outcome.failure); - yield* Effect.annotateCurrentSpan({ "autumn.track.failed": true }); - } }).pipe(Effect.withSpan("autumn.trackExecution")); return { use, trackExecution } satisfies IAutumnService; diff --git a/apps/cloud/src/services/slack.ts b/apps/cloud/src/services/slack.ts index ecd76b573..21f646c08 100644 --- a/apps/cloud/src/services/slack.ts +++ b/apps/cloud/src/services/slack.ts @@ -5,7 +5,7 @@ // --------------------------------------------------------------------------- import { env } from "cloudflare:workers"; -import { Context, Data, Effect, Layer } from "effect"; +import { Context, Data, Effect, Layer, Schema } from "effect"; export class SlackError extends Data.TaggedError("SlackError")<{ method: string; @@ -40,41 +40,82 @@ const randomSuffix = (): string => { return Array.from(bytes, (b) => b.toString(16).padStart(2, "0")).join(""); }; -type SlackResponse = { ok: boolean; error?: string } & Record; +const SlackErrorResponse = Schema.Struct({ + ok: Schema.Literal(false), + error: Schema.optional(Schema.String), +}); + +const SlackChannel = Schema.Struct({ id: Schema.String, name: Schema.String }); + +const SlackPostMessageResponse = Schema.Struct({ ok: Schema.Literal(true) }); + +const SlackCreateChannelResponse = Schema.Struct({ + ok: Schema.Literal(true), + channel: SlackChannel, +}); + +const SlackSharedInviteResponse = Schema.Struct({ + ok: Schema.Literal(true), + invite_id: Schema.String, + url: Schema.String, +}); const make = Effect.sync(() => { const token = env.SLACK_BOT_TOKEN; if (!token) { const notConfigured = (method: string) => - Effect.fail( - new SlackError({ method, error: "SLACK_BOT_TOKEN is not configured" }), - ); + Effect.fail(new SlackError({ method, error: "SLACK_BOT_TOKEN is not configured" })); return { createConnectInvite: () => notConfigured("createConnectInvite"), } satisfies ISlackService; } - const call = (method: string, body: Record) => - Effect.tryPromise({ - try: async (): Promise => { - const res = await fetch(`https://slack.com/api/${method}`, { - method: "POST", - headers: { - "content-type": "application/json; charset=utf-8", - authorization: `Bearer ${token}`, - }, - body: JSON.stringify(body), - }); - const json = (await res.json()) as A; - if (!json.ok) throw new Error(json.error ?? "unknown_slack_error"); - return json; - }, - catch: (cause) => - new SlackError({ + const call = ( + method: string, + body: Record, + successSchema: Schema.Decoder, + ) => + Effect.gen(function* () { + const json = yield* Effect.tryPromise({ + try: async (): Promise => { + const res = await fetch(`https://slack.com/api/${method}`, { + method: "POST", + headers: { + "content-type": "application/json; charset=utf-8", + authorization: `Bearer ${token}`, + }, + body: JSON.stringify(body), + }); + return res.json(); + }, + catch: () => + new SlackError({ + method, + error: "Failed to read Slack API response", + }), + }); + + const response = yield* Schema.decodeUnknownEffect( + Schema.Union([successSchema, SlackErrorResponse]), + )(json).pipe( + Effect.mapError( + () => + new SlackError({ + method, + error: "Unexpected Slack API response", + }), + ), + ); + + if (!response.ok) { + return yield* new SlackError({ method, - error: cause instanceof Error ? cause.message : String(cause), - }), + error: response.error ?? "unknown_slack_error", + }); + } + + return response; }).pipe(Effect.withSpan(`slack.${method}`)); const createConnectInvite: ISlackService["createConnectInvite"] = ({ @@ -87,10 +128,7 @@ const make = Effect.sync(() => { // Slack channel names: lowercase, no spaces, max 80 chars, unique per workspace. const baseName = `shared-${slugifyEmail(email)}`.slice(0, 80); const tryCreate = (n: string) => - call( - "conversations.create", - { name: n, is_private: false }, - ); + call("conversations.create", { name: n, is_private: false }, SlackCreateChannelResponse); const created = yield* tryCreate(baseName).pipe( Effect.catchTag("SlackError", (err) => @@ -108,12 +146,16 @@ const make = Effect.sync(() => { note ? `Note: ${note}` : null, ].filter(Boolean) as string[]; - yield* call("chat.postMessage", { - channel: channel.id, - text: helloLines.join("\n"), - }); + yield* call( + "chat.postMessage", + { + channel: channel.id, + text: helloLines.join("\n"), + }, + SlackPostMessageResponse, + ); - const invite = yield* call( + const invite = yield* call( "conversations.inviteShared", { channel: channel.id, @@ -122,15 +164,14 @@ const make = Effect.sync(() => { // which is what we want for a focused 1:1 support conversation. external_limited: true, }, + SlackSharedInviteResponse, ); return { channel: { id: channel.id, name: channel.name }, invite: { invite_id: invite.invite_id, url: invite.url }, }; - }).pipe( - Effect.withSpan("slack.createConnectInvite", { attributes: { "slack.email": email } }), - ); + }).pipe(Effect.withSpan("slack.createConnectInvite", { attributes: { "slack.email": email } })); return { createConnectInvite } satisfies ISlackService; }); From daa1763da57a663db40aade243948345e75cb89c Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:45:09 -0700 Subject: [PATCH 048/108] Parse local migration test rows with Schema --- .../migrate-google-discovery-bindings.test.ts | 351 ++++++++------- .../server/migrate-graphql-bindings.test.ts | 420 ++++++++++-------- 2 files changed, 407 insertions(+), 364 deletions(-) diff --git a/apps/local/src/server/migrate-google-discovery-bindings.test.ts b/apps/local/src/server/migrate-google-discovery-bindings.test.ts index f5b44dff0..06fbcf501 100644 --- a/apps/local/src/server/migrate-google-discovery-bindings.test.ts +++ b/apps/local/src/server/migrate-google-discovery-bindings.test.ts @@ -4,8 +4,9 @@ // containing auth/credentials), runs the migration, asserts the new // columns and child tables are populated. -import { describe, expect, it } from "@effect/vitest"; +import { afterEach, describe, expect, it } from "@effect/vitest"; import { Database } from "bun:sqlite"; +import { Schema } from "effect"; import { mkdtempSync, rmSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; @@ -16,189 +17,195 @@ import { PRE_0007_SQL, stampPriorMigrationsApplied } from "./__test-helpers__/pr const MIGRATIONS_FOLDER = join(import.meta.dirname, "../../drizzle"); -describe("0007_normalize_plugin_secret_refs (google-discovery)", () => { - it("flattens oauth2 auth into columns", () => { - const dir = mkdtempSync(join(tmpdir(), "gd-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO google_discovery_source (scope_id, id, name, config, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", - ).run( - "default-scope", - "drive", - "Drive", - JSON.stringify({ - name: "Drive", - discoveryUrl: "https://www.googleapis.com/discovery/v1/apis/drive/v3/rest", - service: "drive", - version: "v3", - rootUrl: "https://www.googleapis.com/", - servicePath: "drive/v3/", - auth: { - kind: "oauth2", - connectionId: "conn-1", - clientIdSecretId: "client-id", - clientSecretSecretId: "client-secret", - scopes: ["https://www.googleapis.com/auth/drive"], - }, - }), - Date.now(), - Date.now(), - ); +const migratedConfig = Schema.Struct({ + auth: Schema.optional(Schema.Unknown), + service: Schema.String, +}); +const decodeMigratedConfig = Schema.decodeUnknownSync(Schema.fromJsonString(migratedConfig)); - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); +const tempDirs = new Set(); - const after = new Database(dbPath, { readonly: true }); - const row = after - .prepare( - "SELECT auth_kind, auth_connection_id, auth_client_id_secret_id, auth_client_secret_secret_id, auth_scopes, config FROM google_discovery_source WHERE id = ?", - ) - .get("drive") as Record; - expect(row.auth_kind).toBe("oauth2"); - expect(row.auth_connection_id).toBe("conn-1"); - expect(row.auth_client_id_secret_id).toBe("client-id"); - expect(row.auth_client_secret_secret_id).toBe("client-secret"); - // auth_scopes column is text-typed (string[] gets stored as JSON in sqlite). - expect(row.auth_scopes).toContain("drive"); - // The auth key should be stripped from config json. - const config = JSON.parse(row.config!); - expect(config.auth).toBeUndefined(); - expect(config.service).toBe("drive"); - after.close(); - } finally { +const createTempDbPath = () => { + const dir = mkdtempSync(join(tmpdir(), "gd-mig-")); + tempDirs.add(dir); + return join(dir, "test.sqlite"); +}; + +describe("0007_normalize_plugin_secret_refs (google-discovery)", () => { + afterEach(() => { + for (const dir of tempDirs) { rmSync(dir, { recursive: true, force: true }); } + tempDirs.clear(); + }); + + it("flattens oauth2 auth into columns", () => { + const dbPath = createTempDbPath(); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO google_discovery_source (scope_id, id, name, config, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", + ).run( + "default-scope", + "drive", + "Drive", + JSON.stringify({ + name: "Drive", + discoveryUrl: "https://www.googleapis.com/discovery/v1/apis/drive/v3/rest", + service: "drive", + version: "v3", + rootUrl: "https://www.googleapis.com/", + servicePath: "drive/v3/", + auth: { + kind: "oauth2", + connectionId: "conn-1", + clientIdSecretId: "client-id", + clientSecretSecretId: "client-secret", + scopes: ["https://www.googleapis.com/auth/drive"], + }, + }), + Date.now(), + Date.now(), + ); + + db.close(); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const row = after + .prepare( + "SELECT auth_kind, auth_connection_id, auth_client_id_secret_id, auth_client_secret_secret_id, auth_scopes, config FROM google_discovery_source WHERE id = ?", + ) + .get("drive") as Record; + expect(row.auth_kind).toBe("oauth2"); + expect(row.auth_connection_id).toBe("conn-1"); + expect(row.auth_client_id_secret_id).toBe("client-id"); + expect(row.auth_client_secret_secret_id).toBe("client-secret"); + // auth_scopes column is text-typed (string[] gets stored as JSON in sqlite). + expect(row.auth_scopes).toContain("drive"); + // The auth key should be stripped from config json. + const config = decodeMigratedConfig(row.config); + expect(config.auth).toBeUndefined(); + expect(config.service).toBe("drive"); + after.close(); }); it("explodes credentials.headers and queryParams into child rows", () => { - const dir = mkdtempSync(join(tmpdir(), "gd-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO google_discovery_source (scope_id, id, name, config, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", - ).run( - "default-scope", - "with-creds", - "With Creds", - JSON.stringify({ - name: "With Creds", - discoveryUrl: "https://example.com/discovery", - service: "svc", - version: "v1", - rootUrl: "https://example.com/", - servicePath: "svc/v1/", - auth: { kind: "none" }, - credentials: { - headers: { - "X-Static": "literal", - Authorization: { secretId: "tok-secret", prefix: "Bearer " }, - }, - queryParams: { - api_key: { secretId: "key-secret" }, - }, + const dbPath = createTempDbPath(); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO google_discovery_source (scope_id, id, name, config, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", + ).run( + "default-scope", + "with-creds", + "With Creds", + JSON.stringify({ + name: "With Creds", + discoveryUrl: "https://example.com/discovery", + service: "svc", + version: "v1", + rootUrl: "https://example.com/", + servicePath: "svc/v1/", + auth: { kind: "none" }, + credentials: { + headers: { + "X-Static": "literal", + Authorization: { secretId: "tok-secret", prefix: "Bearer " }, }, - }), - Date.now(), - Date.now(), - ); - - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const headers = after - .prepare( - "SELECT name, kind, text_value, secret_id, secret_prefix FROM google_discovery_source_credential_header WHERE source_id = ? ORDER BY name", - ) - .all("with-creds") as ReadonlyArray>; - expect(headers).toHaveLength(2); - const byName = new Map(headers.map((h) => [h.name!, h])); - expect(byName.get("X-Static")).toMatchObject({ - kind: "text", - text_value: "literal", - }); - expect(byName.get("Authorization")).toMatchObject({ - kind: "secret", - secret_id: "tok-secret", - secret_prefix: "Bearer ", - }); - - const params = after - .prepare( - "SELECT name, secret_id FROM google_discovery_source_credential_query_param WHERE source_id = ?", - ) - .all("with-creds") as ReadonlyArray>; - expect(params).toHaveLength(1); - expect(params[0]).toMatchObject({ name: "api_key", secret_id: "key-secret" }); - - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + queryParams: { + api_key: { secretId: "key-secret" }, + }, + }, + }), + Date.now(), + Date.now(), + ); + + db.close(); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const headers = after + .prepare( + "SELECT name, kind, text_value, secret_id, secret_prefix FROM google_discovery_source_credential_header WHERE source_id = ? ORDER BY name", + ) + .all("with-creds") as ReadonlyArray>; + expect(headers).toHaveLength(2); + const byName = new Map(headers.map((h) => [h.name!, h])); + expect(byName.get("X-Static")).toMatchObject({ + kind: "text", + text_value: "literal", + }); + expect(byName.get("Authorization")).toMatchObject({ + kind: "secret", + secret_id: "tok-secret", + secret_prefix: "Bearer ", + }); + + const params = after + .prepare( + "SELECT name, secret_id FROM google_discovery_source_credential_query_param WHERE source_id = ?", + ) + .all("with-creds") as ReadonlyArray>; + expect(params).toHaveLength(1); + expect(params[0]).toMatchObject({ name: "api_key", secret_id: "key-secret" }); + + after.close(); }); it("survives auth.kind=none with no credentials", () => { - const dir = mkdtempSync(join(tmpdir(), "gd-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO google_discovery_source (scope_id, id, name, config, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", - ).run( - "default-scope", - "bare", - "Bare", - JSON.stringify({ - name: "Bare", - discoveryUrl: "https://example.com/discovery", - service: "svc", - version: "v1", - rootUrl: "https://example.com/", - servicePath: "svc/v1/", - auth: { kind: "none" }, - }), - Date.now(), - Date.now(), - ); - - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const row = after + const dbPath = createTempDbPath(); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO google_discovery_source (scope_id, id, name, config, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)", + ).run( + "default-scope", + "bare", + "Bare", + JSON.stringify({ + name: "Bare", + discoveryUrl: "https://example.com/discovery", + service: "svc", + version: "v1", + rootUrl: "https://example.com/", + servicePath: "svc/v1/", + auth: { kind: "none" }, + }), + Date.now(), + Date.now(), + ); + + db.close(); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const row = after + .prepare( + "SELECT auth_kind, auth_connection_id, auth_scopes FROM google_discovery_source WHERE id = ?", + ) + .get("bare") as Record; + expect(row.auth_kind).toBe("none"); + expect(row.auth_connection_id).toBeNull(); + + const headerCount = ( + after .prepare( - "SELECT auth_kind, auth_connection_id, auth_scopes FROM google_discovery_source WHERE id = ?", + "SELECT count(*) as n FROM google_discovery_source_credential_header WHERE source_id = ?", ) - .get("bare") as Record; - expect(row.auth_kind).toBe("none"); - expect(row.auth_connection_id).toBeNull(); - - const headerCount = ( - after - .prepare( - "SELECT count(*) as n FROM google_discovery_source_credential_header WHERE source_id = ?", - ) - .get("bare") as { n: number } - ).n; - expect(headerCount).toBe(0); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + .get("bare") as { n: number } + ).n; + expect(headerCount).toBe(0); + after.close(); }); }); diff --git a/apps/local/src/server/migrate-graphql-bindings.test.ts b/apps/local/src/server/migrate-graphql-bindings.test.ts index 851ddc780..74e410750 100644 --- a/apps/local/src/server/migrate-graphql-bindings.test.ts +++ b/apps/local/src/server/migrate-graphql-bindings.test.ts @@ -4,8 +4,9 @@ // run the migration, assert that the JSON unpacks into the new // normalized columns / child tables and that the JSON columns are gone. -import { describe, expect, it } from "@effect/vitest"; +import { afterEach, beforeEach, describe, expect, it } from "@effect/vitest"; import { Database } from "bun:sqlite"; +import { Schema } from "effect"; import { mkdtempSync, rmSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; @@ -16,231 +17,266 @@ import { PRE_0007_SQL, stampPriorMigrationsApplied } from "./__test-helpers__/pr const MIGRATIONS_FOLDER = join(import.meta.dirname, "../../drizzle"); +const NullableString = Schema.NullOr(Schema.String); + +const GraphqlAuthRow = Schema.Struct({ + auth_kind: Schema.String, + auth_connection_id: NullableString, +}); + +const TableInfoRow = Schema.Struct({ + name: Schema.String, +}); + +const GraphqlHeaderRow = Schema.Struct({ + name: Schema.String, + kind: Schema.String, + text_value: NullableString, + secret_id: NullableString, + secret_prefix: NullableString, +}); + +const GraphqlQueryParamRow = Schema.Struct({ + kind: Schema.String, + secret_id: Schema.String, +}); + +const CountRow = Schema.Struct({ + n: Schema.Number, +}); + +const GraphqlHeaderIdRow = Schema.Struct({ + id: Schema.String, + source_id: Schema.String, + name: Schema.String, + text_value: Schema.String, +}); + +const decodeAuthRow = Schema.decodeUnknownSync(GraphqlAuthRow); +const decodeTableInfoRows = Schema.decodeUnknownSync(Schema.Array(TableInfoRow)); +const decodeHeaderRows = Schema.decodeUnknownSync(Schema.Array(GraphqlHeaderRow)); +const decodeQueryParamRow = Schema.decodeUnknownSync(GraphqlQueryParamRow); +const decodeCountRow = Schema.decodeUnknownSync(CountRow); +const decodeHeaderIdRows = Schema.decodeUnknownSync( + Schema.Array(GraphqlHeaderIdRow), +); + +let dir: string; + +beforeEach(() => { + dir = mkdtempSync(join(tmpdir(), "graphql-mig-")); +}); + +afterEach(() => { + rmSync(dir, { recursive: true, force: true }); +}); + describe("0007_normalize_plugin_secret_refs (graphql)", () => { it("flattens auth json into auth_kind/auth_connection_id columns", () => { - const dir = mkdtempSync(join(tmpdir(), "graphql-mig-")); const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - db.prepare( - "INSERT INTO graphql_source (scope_id, id, name, endpoint, auth) VALUES (?, ?, ?, ?, ?)", - ).run( - "default-scope", - "github", - "GitHub", - "https://api.github.com/graphql", - JSON.stringify({ kind: "oauth2", connectionId: "conn-1" }), - ); - - db.close(); - - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const row = after + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + db.prepare( + "INSERT INTO graphql_source (scope_id, id, name, endpoint, auth) VALUES (?, ?, ?, ?, ?)", + ).run( + "default-scope", + "github", + "GitHub", + "https://api.github.com/graphql", + JSON.stringify({ kind: "oauth2", connectionId: "conn-1" }), + ); + + db.close(); + + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const row = decodeAuthRow( + after .prepare( "SELECT auth_kind, auth_connection_id FROM graphql_source WHERE id = ?", ) - .get("github") as { auth_kind: string; auth_connection_id: string }; - expect(row.auth_kind).toBe("oauth2"); - expect(row.auth_connection_id).toBe("conn-1"); - // Old json column is gone. - const cols = after + .get("github"), + ); + expect(row.auth_kind).toBe("oauth2"); + expect(row.auth_connection_id).toBe("conn-1"); + // Old json column is gone. + const cols = decodeTableInfoRows( + after .prepare("PRAGMA table_info('graphql_source')") - .all() as ReadonlyArray<{ name: string }>; - expect(cols.some((c) => c.name === "auth")).toBe(false); - expect(cols.some((c) => c.name === "headers")).toBe(false); - expect(cols.some((c) => c.name === "query_params")).toBe(false); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + .all(), + ); + expect(cols.some((c) => c.name === "auth")).toBe(false); + expect(cols.some((c) => c.name === "headers")).toBe(false); + expect(cols.some((c) => c.name === "query_params")).toBe(false); + after.close(); }); it("explodes header/query_param json into child rows", () => { - const dir = mkdtempSync(join(tmpdir(), "graphql-mig-")); const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - const headers = { - // Literal text header. - "X-Static": "literal-value", - // Secret-backed header without prefix. - Authorization: { secretId: "sec-token" }, - // Secret-backed with prefix. - "X-Bearer": { secretId: "sec-bearer", prefix: "Bearer " }, - }; - const queryParams = { - api_key: { secretId: "sec-key" }, - }; - - db.prepare( - "INSERT INTO graphql_source (scope_id, id, name, endpoint, headers, query_params, auth) VALUES (?, ?, ?, ?, ?, ?, ?)", - ).run( - "default-scope", - "example", - "Example", - "https://example.com/graphql", - JSON.stringify(headers), - JSON.stringify(queryParams), - JSON.stringify({ kind: "none" }), - ); - - db.close(); - - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const headerRows = after + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + const headers = { + // Literal text header. + "X-Static": "literal-value", + // Secret-backed header without prefix. + Authorization: { secretId: "sec-token" }, + // Secret-backed with prefix. + "X-Bearer": { secretId: "sec-bearer", prefix: "Bearer " }, + }; + const queryParams = { + api_key: { secretId: "sec-key" }, + }; + + db.prepare( + "INSERT INTO graphql_source (scope_id, id, name, endpoint, headers, query_params, auth) VALUES (?, ?, ?, ?, ?, ?, ?)", + ).run( + "default-scope", + "example", + "Example", + "https://example.com/graphql", + JSON.stringify(headers), + JSON.stringify(queryParams), + JSON.stringify({ kind: "none" }), + ); + + db.close(); + + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const headerRows = decodeHeaderRows( + after .prepare( "SELECT name, kind, text_value, secret_id, secret_prefix FROM graphql_source_header WHERE source_id = ? ORDER BY name", ) - .all("example") as ReadonlyArray<{ - name: string; - kind: string; - text_value: string | null; - secret_id: string | null; - secret_prefix: string | null; - }>; - expect(headerRows).toHaveLength(3); - - const byName = new Map(headerRows.map((r) => [r.name, r])); - expect(byName.get("X-Static")).toMatchObject({ - kind: "text", - text_value: "literal-value", - secret_id: null, - }); - expect(byName.get("Authorization")).toMatchObject({ - kind: "secret", - text_value: null, - secret_id: "sec-token", - secret_prefix: null, - }); - expect(byName.get("X-Bearer")).toMatchObject({ - kind: "secret", - secret_id: "sec-bearer", - secret_prefix: "Bearer ", - }); - - const paramRow = after + .all("example"), + ); + expect(headerRows).toHaveLength(3); + + const byName = new Map(headerRows.map((r) => [r.name, r])); + expect(byName.get("X-Static")).toMatchObject({ + kind: "text", + text_value: "literal-value", + secret_id: null, + }); + expect(byName.get("Authorization")).toMatchObject({ + kind: "secret", + text_value: null, + secret_id: "sec-token", + secret_prefix: null, + }); + expect(byName.get("X-Bearer")).toMatchObject({ + kind: "secret", + secret_id: "sec-bearer", + secret_prefix: "Bearer ", + }); + + const paramRow = decodeQueryParamRow( + after .prepare( "SELECT kind, secret_id FROM graphql_source_query_param WHERE source_id = ?", ) - .get("example") as { kind: string; secret_id: string }; - expect(paramRow).toMatchObject({ kind: "secret", secret_id: "sec-key" }); + .get("example"), + ); + expect(paramRow).toMatchObject({ kind: "secret", secret_id: "sec-key" }); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + after.close(); }); it("handles graphql_source rows with null json (empty config)", () => { - const dir = mkdtempSync(join(tmpdir(), "graphql-mig-")); const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); - db.prepare( - "INSERT INTO graphql_source (scope_id, id, name, endpoint) VALUES (?, ?, ?, ?)", - ).run("default-scope", "bare", "Bare", "https://bare.example/graphql"); - db.close(); + db.prepare( + "INSERT INTO graphql_source (scope_id, id, name, endpoint) VALUES (?, ?, ?, ?)", + ).run("default-scope", "bare", "Bare", "https://bare.example/graphql"); + db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - const after = new Database(dbPath, { readonly: true }); - const row = after + const after = new Database(dbPath, { readonly: true }); + const row = decodeAuthRow( + after .prepare( "SELECT auth_kind, auth_connection_id FROM graphql_source WHERE id = ?", ) - .get("bare") as { auth_kind: string; auth_connection_id: string | null }; - expect(row.auth_kind).toBe("none"); - expect(row.auth_connection_id).toBeNull(); - - const headerCount = ( - after - .prepare( - "SELECT count(*) as n FROM graphql_source_header WHERE source_id = ?", - ) - .get("bare") as { n: number } - ).n; - expect(headerCount).toBe(0); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + .get("bare"), + ); + expect(row.auth_kind).toBe("none"); + expect(row.auth_connection_id).toBeNull(); + + const headerCount = decodeCountRow( + after + .prepare( + "SELECT count(*) as n FROM graphql_source_header WHERE source_id = ?", + ) + .get("bare"), + ).n; + expect(headerCount).toBe(0); + after.close(); }); it("does not collapse child rows whose source/name pairs share colon-concatenated ids", () => { - const dir = mkdtempSync(join(tmpdir(), "graphql-mig-")); const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - const insert = db.prepare( - "INSERT INTO graphql_source (scope_id, id, name, endpoint, headers) VALUES (?, ?, ?, ?, ?)", - ); - insert.run( - "default-scope", - "a:b", - "First", - "https://first.example/graphql", - JSON.stringify({ c: "first" }), - ); - insert.run( - "default-scope", - "a", - "Second", - "https://second.example/graphql", - JSON.stringify({ "b:c": "second" }), - ); - db.close(); - - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const rows = after + const db = new Database(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + const insert = db.prepare( + "INSERT INTO graphql_source (scope_id, id, name, endpoint, headers) VALUES (?, ?, ?, ?, ?)", + ); + insert.run( + "default-scope", + "a:b", + "First", + "https://first.example/graphql", + JSON.stringify({ c: "first" }), + ); + insert.run( + "default-scope", + "a", + "Second", + "https://second.example/graphql", + JSON.stringify({ "b:c": "second" }), + ); + db.close(); + + const drizzleDb = drizzle(new Database(dbPath)); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + + const after = new Database(dbPath, { readonly: true }); + const rows = decodeHeaderIdRows( + after .prepare( "SELECT id, source_id, name, text_value FROM graphql_source_header ORDER BY source_id, name", ) - .all() as ReadonlyArray<{ - id: string; - source_id: string; - name: string; - text_value: string; - }>; - expect(rows).toHaveLength(2); - expect(rows).toEqual([ - { - id: '["a","b:c"]', - source_id: "a", - name: "b:c", - text_value: "second", - }, - { - id: '["a:b","c"]', - source_id: "a:b", - name: "c", - text_value: "first", - }, - ]); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + .all(), + ); + expect(rows).toHaveLength(2); + expect(rows).toEqual([ + { + id: '["a","b:c"]', + source_id: "a", + name: "b:c", + text_value: "second", + }, + { + id: '["a:b","c"]', + source_id: "a:b", + name: "c", + text_value: "first", + }, + ]); + after.close(); }); }); From 9f564b4ac8ff891693da761b38cd15279df6e5e0 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:45:15 -0700 Subject: [PATCH 049/108] Use Effect timeouts in tool invoker tests --- .../core/execution/src/tool-invoker.test.ts | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/packages/core/execution/src/tool-invoker.test.ts b/packages/core/execution/src/tool-invoker.test.ts index f8d85c218..beb100725 100644 --- a/packages/core/execution/src/tool-invoker.test.ts +++ b/packages/core/execution/src/tool-invoker.test.ts @@ -522,19 +522,16 @@ describe("pause/resume with multiple elicitations", () => { // Resume first pause — execution continues to second elicitation. // resume() must not hang; it should return (either a new paused // result or the completion). - const outcome2 = yield* Effect.promise(() => - Promise.race([ - Effect.runPromise(engine.resume(paused1.execution.id, { action: "accept" })), - new Promise((_, reject) => - setTimeout( - () => reject(new Error("resume hung — second elicitation not surfaced")), - 5000, - ), - ), - ]), + const outcome2 = yield* Effect.race( + engine + .resume(paused1.execution.id, { action: "accept" }) + .pipe(Effect.map((outcome) => ({ kind: "resumed" as const, outcome }))), + Effect.sleep("5 seconds").pipe(Effect.as({ kind: "hung" as const })), ); - expect(outcome2).not.toBeNull(); + expect(outcome2.kind).toBe("resumed"); + if (outcome2.kind !== "resumed") return; + expect(outcome2.outcome).not.toBeNull(); }), { timeout: 10000 }, ); @@ -568,15 +565,19 @@ describe("pause/resume with multiple elicitations", () => { ); expect(exitProbe).toBe("still-running"); - const outcome2 = await Promise.race([ - Effect.runPromise(engine.resume(paused1.execution.id, { action: "accept" })), - new Promise((_, reject) => - setTimeout(() => reject(new Error("resume hung across runPromise boundaries")), 2000), + const outcome2 = await Effect.runPromise( + Effect.race( + engine + .resume(paused1.execution.id, { action: "accept" }) + .pipe(Effect.map((outcome) => ({ kind: "resumed" as const, outcome }))), + Effect.sleep("2 seconds").pipe(Effect.as({ kind: "hung" as const })), ), - ]); + ); - expect(outcome2).not.toBeNull(); - const resumed = outcome2 as NonNullable; + expect(outcome2.kind).toBe("resumed"); + if (outcome2.kind !== "resumed") return; + expect(outcome2.outcome).not.toBeNull(); + const resumed = outcome2.outcome as NonNullable; expect(resumed.status).toBe("completed"); if (resumed.status !== "completed") return; expect(resumed.result.error).toBeUndefined(); From 976f585d66f0b06688cc70d0a7296f261b7406b1 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:45:18 -0700 Subject: [PATCH 050/108] Stabilize GraphQL extraction errors --- packages/plugins/graphql/src/sdk/extract.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/plugins/graphql/src/sdk/extract.ts b/packages/plugins/graphql/src/sdk/extract.ts index 0666d907c..a120381d6 100644 --- a/packages/plugins/graphql/src/sdk/extract.ts +++ b/packages/plugins/graphql/src/sdk/extract.ts @@ -251,8 +251,8 @@ export const extract = ( definitions, }; }, - catch: (err) => + catch: () => new GraphqlExtractionError({ - message: `Failed to extract GraphQL schema: ${err instanceof Error ? err.message : String(err)}`, + message: "Failed to extract GraphQL schema", }), }); From f534b9d583e49180924c5e2b7fe80d7dd59f1c05 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:49:28 -0700 Subject: [PATCH 051/108] Clean cloud auth and org test failures --- apps/cloud/src/auth/handlers.ts | 6 +++--- apps/cloud/src/org/handlers.test.ts | 19 ++++++++++++++----- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/apps/cloud/src/auth/handlers.ts b/apps/cloud/src/auth/handlers.ts index 921cd8861..cfc6c2c13 100644 --- a/apps/cloud/src/auth/handlers.ts +++ b/apps/cloud/src/auth/handlers.ts @@ -270,7 +270,7 @@ export const CloudSessionAuthHandlers = HttpApiBuilder.group( }, ); deleteCookie("wos-session", { path: "/" }); - return yield* Effect.fail(new WorkOSError()); + return yield* new WorkOSError(); } setCookie("wos-session", refreshed, COOKIE_OPTIONS); @@ -341,7 +341,7 @@ export const CloudSessionAuthHandlers = HttpApiBuilder.group( yield* Effect.logWarning("acceptInvitation: invitation has no organizationId", { invitationId: payload.invitationId, }); - return yield* Effect.fail(new WorkOSError()); + return yield* new WorkOSError(); } // Mirror the org locally so domain tables can FK against it. @@ -369,7 +369,7 @@ export const CloudSessionAuthHandlers = HttpApiBuilder.group( }, ); deleteCookie("wos-session", { path: "/" }); - return yield* Effect.fail(new WorkOSError()); + return yield* new WorkOSError(); } setCookie("wos-session", refreshed, COOKIE_OPTIONS); diff --git a/apps/cloud/src/org/handlers.test.ts b/apps/cloud/src/org/handlers.test.ts index 5cc5a0013..f73a9d451 100644 --- a/apps/cloud/src/org/handlers.test.ts +++ b/apps/cloud/src/org/handlers.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "@effect/vitest"; -import { Effect, Layer } from "effect"; +import { Data, Effect, Layer } from "effect"; import { AuthContext } from "../auth/middleware"; import { WorkOSAuth, type WorkOSAuthService } from "../auth/workos"; @@ -21,15 +21,24 @@ type StubOverrides = { listOrgRoles?: StubFn; }; +class UnstubbedWorkOSMethod extends Data.TaggedError("UnstubbedWorkOSMethod")<{ + method: string; +}> {} + const stubWorkOS = (overrides: StubOverrides = {}) => Layer.succeed( WorkOSAuth, new Proxy({} as WorkOSAuthService, { get: (_target, prop) => { - if (prop in overrides) return (overrides as Record)[prop as string]; - return () => { - throw new Error(`WorkOSAuth.${String(prop)} not stubbed`); - }; + if (typeof prop === "string" && prop in overrides) { + return overrides[prop as keyof StubOverrides]; + } + return () => + Effect.fail( + new UnstubbedWorkOSMethod({ + method: typeof prop === "string" ? prop : (prop.description ?? "symbol"), + }), + ); }, }), ); From 46d1c249e784f83ebcdaf93ebc8cf05ed880df6a Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:49:37 -0700 Subject: [PATCH 052/108] Clean local server tooling boundaries --- apps/local/src/server/main.ts | 26 +++++++++++++++++++++----- apps/local/vite.config.ts | 3 +++ 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/apps/local/src/server/main.ts b/apps/local/src/server/main.ts index 6e388cdaa..b7a9aef06 100644 --- a/apps/local/src/server/main.ts +++ b/apps/local/src/server/main.ts @@ -42,10 +42,21 @@ export type ServerHandlers = { }; const closeServerHandlers = async (handlers: ServerHandlers): Promise => { - await Promise.all([ - handlers.api.dispose().catch(() => undefined), - handlers.mcp.close().catch(() => undefined), - ]); + await Effect.runPromise( + Effect.all( + [ + Effect.tryPromise({ + try: () => handlers.api.dispose(), + catch: (cause) => cause, + }).pipe(Effect.ignore), + Effect.tryPromise({ + try: () => handlers.mcp.close(), + catch: (cause) => cause, + }).pipe(Effect.ignore), + ], + { concurrency: "unbounded" }, + ), + ); }; export const createServerHandlers = async (): Promise => { @@ -107,5 +118,10 @@ export const getServerHandlers = (): Promise => serverHandlersRuntime.runPromise(ServerHandlersService.asEffect()); export const disposeServerHandlers = async (): Promise => { - await serverHandlersRuntime.dispose().catch(() => undefined); + await Effect.runPromise( + Effect.tryPromise({ + try: () => serverHandlersRuntime.dispose(), + catch: (cause) => cause, + }).pipe(Effect.ignore), + ); }; diff --git a/apps/local/vite.config.ts b/apps/local/vite.config.ts index 7be2743a5..5dc6b51e3 100644 --- a/apps/local/vite.config.ts +++ b/apps/local/vite.config.ts @@ -6,10 +6,12 @@ import tailwindcss from "@tailwindcss/vite"; import { tanstackRouter } from "@tanstack/router-plugin/vite"; import executorVitePlugin from "@executor-js/vite-plugin"; +// oxlint-disable-next-line executor/no-json-parse -- boundary: Vite config reads package metadata from package.json const rootPackage = JSON.parse( readFileSync(new URL("../../package.json", import.meta.url), "utf8"), ) as { version: string; homepage?: string; repository?: string | { url?: string } }; +// oxlint-disable-next-line executor/no-json-parse -- boundary: Vite config reads package metadata from package.json const cliPackage = JSON.parse( readFileSync(new URL("../cli/package.json", import.meta.url), "utf8"), ) as { version?: string }; @@ -43,6 +45,7 @@ function executorApiPlugin(): Plugin { if (!isApi && !isMcp) return next(); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: Vite middleware must convert handler failures into HTTP 500 responses try { if (!handlers) { const { getServerHandlers } = await import("./src/server/main"); From ffc1374c4fc9e716375ce755f8cc9231e56b95d3 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:49:42 -0700 Subject: [PATCH 053/108] Parse OpenAPI migration test rows with Schema --- .../server/migrate-openapi-bindings.test.ts | 446 ++++++++++-------- 1 file changed, 244 insertions(+), 202 deletions(-) diff --git a/apps/local/src/server/migrate-openapi-bindings.test.ts b/apps/local/src/server/migrate-openapi-bindings.test.ts index 408b8fa3b..591521719 100644 --- a/apps/local/src/server/migrate-openapi-bindings.test.ts +++ b/apps/local/src/server/migrate-openapi-bindings.test.ts @@ -5,11 +5,12 @@ // openapi_source_binding.value), runs the migration runner, asserts // the new flat columns + child tables match. -import { describe, expect, it } from "@effect/vitest"; +import { afterEach, beforeEach, describe, expect, it } from "@effect/vitest"; import { Database } from "bun:sqlite"; import { mkdtempSync, rmSync } from "node:fs"; import { join } from "node:path"; import { tmpdir } from "node:os"; +import { Schema } from "effect"; import { drizzle } from "drizzle-orm/bun-sqlite"; import { migrate } from "drizzle-orm/bun-sqlite/migrator"; @@ -17,235 +18,276 @@ import { PRE_0007_SQL, stampPriorMigrationsApplied } from "./__test-helpers__/pr const MIGRATIONS_FOLDER = join(import.meta.dirname, "../../drizzle"); +const BindingRow = Schema.Struct({ + id: Schema.String, + kind: Schema.String, + secret_id: Schema.NullOr(Schema.String), + connection_id: Schema.NullOr(Schema.String), + text_value: Schema.NullOr(Schema.String), +}); + +const QueryParamRow = Schema.Struct({ + name: Schema.String, + kind: Schema.String, + text_value: Schema.NullOr(Schema.String), + secret_id: Schema.NullOr(Schema.String), +}); + +const FetchHeaderRow = Schema.Struct({ + name: Schema.String, + kind: Schema.String, + secret_id: Schema.NullOr(Schema.String), + secret_prefix: Schema.NullOr(Schema.String), +}); + +const FetchQueryParamRow = Schema.Struct({ + name: Schema.String, + secret_id: Schema.String, +}); + +const TableInfoRow = Schema.Struct({ + name: Schema.String, +}); + +const CountRow = Schema.Struct({ + n: Schema.Number, +}); + describe("0007_normalize_plugin_secret_refs (openapi)", () => { - it("flattens openapi_source_binding.value into kind/secret_id/connection_id/text_value", () => { - const dir = mkdtempSync(join(tmpdir(), "openapi-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - // Seed three bindings, one per kind. - const insert = db.prepare( - "INSERT INTO openapi_source_binding (id, source_id, source_scope_id, target_scope_id, slot, value, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", - ); - const now = Date.now(); - insert.run( - "b1", - "src", - "default-scope", - "default-scope", - "header:authorization", - JSON.stringify({ kind: "secret", secretId: "tok-secret" }), - now, - now, - ); - insert.run( - "b2", - "src", - "default-scope", - "default-scope", - "oauth2:default:connection", - JSON.stringify({ kind: "connection", connectionId: "conn-1" }), - now, - now, - ); - insert.run( - "b3", - "src", - "default-scope", - "default-scope", - "header:x-static", - JSON.stringify({ kind: "text", text: "literal" }), - now, - now, - ); - - // Need the parent openapi_source row so the source_id FK ergonomics - // are satisfied for any cascading delete logic — though the binding - // table has no DB-level FK, code paths assume the parent exists. - db.prepare( - "INSERT INTO openapi_source (scope_id, id, name, spec, invocation_config) VALUES (?, ?, ?, ?, ?)", - ).run("default-scope", "src", "Source", "{}", "{}"); + let dir: string; + let dbPath: string; + let openDatabases: Set; + + beforeEach(() => { + dir = mkdtempSync(join(tmpdir(), "openapi-mig-")); + dbPath = join(dir, "test.sqlite"); + openDatabases = new Set(); + }); + afterEach(() => { + for (const db of openDatabases) { db.close(); + } + openDatabases.clear(); + rmSync(dir, { recursive: true, force: true }); + }); + + const openDatabase = (...args: ConstructorParameters) => { + const db = new Database(...args); + openDatabases.add(db); + return db; + }; + + const closeDatabase = (db: Database) => { + db.close(); + openDatabases.delete(db); + }; - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + it("flattens openapi_source_binding.value into kind/secret_id/connection_id/text_value", () => { + const db = openDatabase(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + // Seed three bindings, one per kind. + const insert = db.prepare( + "INSERT INTO openapi_source_binding (id, source_id, source_scope_id, target_scope_id, slot, value, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + ); + const now = Date.now(); + insert.run( + "b1", + "src", + "default-scope", + "default-scope", + "header:authorization", + JSON.stringify({ kind: "secret", secretId: "tok-secret" }), + now, + now, + ); + insert.run( + "b2", + "src", + "default-scope", + "default-scope", + "oauth2:default:connection", + JSON.stringify({ kind: "connection", connectionId: "conn-1" }), + now, + now, + ); + insert.run( + "b3", + "src", + "default-scope", + "default-scope", + "header:x-static", + JSON.stringify({ kind: "text", text: "literal" }), + now, + now, + ); + + // Need the parent openapi_source row so the source_id FK ergonomics + // are satisfied for any cascading delete logic, though the binding + // table has no DB-level FK, code paths assume the parent exists. + db.prepare( + "INSERT INTO openapi_source (scope_id, id, name, spec, invocation_config) VALUES (?, ?, ?, ?, ?)", + ).run("default-scope", "src", "Source", "{}", "{}"); + + closeDatabase(db); + + const drizzleSqlite = openDatabase(dbPath); + const drizzleDb = drizzle(drizzleSqlite); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + closeDatabase(drizzleSqlite); - const after = new Database(dbPath, { readonly: true }); - const rows = after + const after = openDatabase(dbPath, { readonly: true }); + const rows = Schema.decodeUnknownSync(Schema.Array(BindingRow))( + after .prepare( "SELECT id, kind, secret_id, connection_id, text_value FROM openapi_source_binding ORDER BY id", ) - .all() as ReadonlyArray<{ - id: string; - kind: string; - secret_id: string | null; - connection_id: string | null; - text_value: string | null; - }>; - expect(rows).toHaveLength(3); - expect(rows[0]).toMatchObject({ - id: "b1", - kind: "secret", - secret_id: "tok-secret", - connection_id: null, - text_value: null, - }); - expect(rows[1]).toMatchObject({ - id: "b2", - kind: "connection", - secret_id: null, - connection_id: "conn-1", - text_value: null, - }); - expect(rows[2]).toMatchObject({ - id: "b3", - kind: "text", - secret_id: null, - connection_id: null, - text_value: "literal", - }); - // value json column dropped. - const cols = after - .prepare("PRAGMA table_info('openapi_source_binding')") - .all() as ReadonlyArray<{ name: string }>; - expect(cols.some((c) => c.name === "value")).toBe(false); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + .all(), + ); + expect(rows).toHaveLength(3); + expect(rows[0]).toMatchObject({ + id: "b1", + kind: "secret", + secret_id: "tok-secret", + connection_id: null, + text_value: null, + }); + expect(rows[1]).toMatchObject({ + id: "b2", + kind: "connection", + secret_id: null, + connection_id: "conn-1", + text_value: null, + }); + expect(rows[2]).toMatchObject({ + id: "b3", + kind: "text", + secret_id: null, + connection_id: null, + text_value: "literal", + }); + // value json column dropped. + const cols = Schema.decodeUnknownSync(Schema.Array(TableInfoRow))( + after.prepare("PRAGMA table_info('openapi_source_binding')").all(), + ); + expect(cols.some((c) => c.name === "value")).toBe(false); }); it("explodes query_params and specFetchCredentials json into child rows", () => { - const dir = mkdtempSync(join(tmpdir(), "openapi-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - const queryParams = { - api_key: { secretId: "qp-secret" }, - flag: "true", - }; - const invocationConfig = { - specFetchCredentials: { - headers: { - Authorization: { secretId: "fetch-tok", prefix: "Bearer " }, - }, - queryParams: { token: { secretId: "fetch-qp" } }, + const db = openDatabase(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); + + const queryParams = { + api_key: { secretId: "qp-secret" }, + flag: "true", + }; + const invocationConfig = { + specFetchCredentials: { + headers: { + Authorization: { secretId: "fetch-tok", prefix: "Bearer " }, }, - }; - - db.prepare( - "INSERT INTO openapi_source (scope_id, id, name, spec, query_params, invocation_config) VALUES (?, ?, ?, ?, ?, ?)", - ).run( - "default-scope", - "src", - "Source", - "{}", - JSON.stringify(queryParams), - JSON.stringify(invocationConfig), - ); + queryParams: { token: { secretId: "fetch-qp" } }, + }, + }; - db.close(); + db.prepare( + "INSERT INTO openapi_source (scope_id, id, name, spec, query_params, invocation_config) VALUES (?, ?, ?, ?, ?, ?)", + ).run( + "default-scope", + "src", + "Source", + "{}", + JSON.stringify(queryParams), + JSON.stringify(invocationConfig), + ); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + closeDatabase(db); - const after = new Database(dbPath, { readonly: true }); + const drizzleSqlite = openDatabase(dbPath); + const drizzleDb = drizzle(drizzleSqlite); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + closeDatabase(drizzleSqlite); - const qpRows = after + const after = openDatabase(dbPath, { readonly: true }); + + const qpRows = Schema.decodeUnknownSync(Schema.Array(QueryParamRow))( + after .prepare( "SELECT name, kind, text_value, secret_id FROM openapi_source_query_param WHERE source_id = ? ORDER BY name", ) - .all("src") as ReadonlyArray<{ - name: string; - kind: string; - text_value: string | null; - secret_id: string | null; - }>; - expect(qpRows).toHaveLength(2); - const byName = new Map(qpRows.map((r) => [r.name, r])); - expect(byName.get("api_key")).toMatchObject({ - kind: "secret", - secret_id: "qp-secret", - }); - expect(byName.get("flag")).toMatchObject({ - kind: "text", - text_value: "true", - }); - - const fetchHeaders = after + .all("src"), + ); + expect(qpRows).toHaveLength(2); + const byName = new Map(qpRows.map((r) => [r.name, r])); + expect(byName.get("api_key")).toMatchObject({ + kind: "secret", + secret_id: "qp-secret", + }); + expect(byName.get("flag")).toMatchObject({ + kind: "text", + text_value: "true", + }); + + const fetchHeaders = Schema.decodeUnknownSync(Schema.Array(FetchHeaderRow))( + after .prepare( "SELECT name, kind, secret_id, secret_prefix FROM openapi_source_spec_fetch_header WHERE source_id = ?", ) - .all("src") as ReadonlyArray<{ - name: string; - kind: string; - secret_id: string | null; - secret_prefix: string | null; - }>; - expect(fetchHeaders).toHaveLength(1); - expect(fetchHeaders[0]).toMatchObject({ - name: "Authorization", - kind: "secret", - secret_id: "fetch-tok", - secret_prefix: "Bearer ", - }); - - const fetchQp = after + .all("src"), + ); + expect(fetchHeaders).toHaveLength(1); + expect(fetchHeaders[0]).toMatchObject({ + name: "Authorization", + kind: "secret", + secret_id: "fetch-tok", + secret_prefix: "Bearer ", + }); + + const fetchQp = Schema.decodeUnknownSync(Schema.Array(FetchQueryParamRow))( + after .prepare( "SELECT name, secret_id FROM openapi_source_spec_fetch_query_param WHERE source_id = ?", ) - .all("src") as ReadonlyArray<{ name: string; secret_id: string }>; - expect(fetchQp).toHaveLength(1); - expect(fetchQp[0]).toMatchObject({ name: "token", secret_id: "fetch-qp" }); - - // Old json columns dropped. - const cols = after - .prepare("PRAGMA table_info('openapi_source')") - .all() as ReadonlyArray<{ name: string }>; - expect(cols.some((c) => c.name === "query_params")).toBe(false); - expect(cols.some((c) => c.name === "invocation_config")).toBe(false); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + .all("src"), + ); + expect(fetchQp).toHaveLength(1); + expect(fetchQp[0]).toMatchObject({ name: "token", secret_id: "fetch-qp" }); + + // Old json columns dropped. + const cols = Schema.decodeUnknownSync(Schema.Array(TableInfoRow))( + after.prepare("PRAGMA table_info('openapi_source')").all(), + ); + expect(cols.some((c) => c.name === "query_params")).toBe(false); + expect(cols.some((c) => c.name === "invocation_config")).toBe(false); }); it("survives empty / missing json on bindings and sources", () => { - const dir = mkdtempSync(join(tmpdir(), "openapi-mig-")); - const dbPath = join(dir, "test.sqlite"); - try { - const db = new Database(dbPath); - db.exec(PRE_0007_SQL); - stampPriorMigrationsApplied(db); - - // Source with empty invocation_config and no query_params. - db.prepare( - "INSERT INTO openapi_source (scope_id, id, name, spec, invocation_config) VALUES (?, ?, ?, ?, ?)", - ).run("default-scope", "bare", "Bare", "{}", JSON.stringify({})); + const db = openDatabase(dbPath); + db.exec(PRE_0007_SQL); + stampPriorMigrationsApplied(db); - db.close(); - const drizzleDb = drizzle(new Database(dbPath)); - migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); - - const after = new Database(dbPath, { readonly: true }); - const qpCount = ( - after - .prepare( - "SELECT count(*) as n FROM openapi_source_query_param WHERE source_id = ?", - ) - .get("bare") as { n: number } - ).n; - expect(qpCount).toBe(0); - after.close(); - } finally { - rmSync(dir, { recursive: true, force: true }); - } + // Source with empty invocation_config and no query_params. + db.prepare( + "INSERT INTO openapi_source (scope_id, id, name, spec, invocation_config) VALUES (?, ?, ?, ?, ?)", + ).run("default-scope", "bare", "Bare", "{}", JSON.stringify({})); + + closeDatabase(db); + const drizzleSqlite = openDatabase(dbPath); + const drizzleDb = drizzle(drizzleSqlite); + migrate(drizzleDb, { migrationsFolder: MIGRATIONS_FOLDER }); + closeDatabase(drizzleSqlite); + + const after = openDatabase(dbPath, { readonly: true }); + const qpCount = Schema.decodeUnknownSync(CountRow)( + after + .prepare( + "SELECT count(*) as n FROM openapi_source_query_param WHERE source_id = ?", + ) + .get("bare"), + ).n; + expect(qpCount).toBe(0); }); }); From 3b01c46bab9cd990260ab7dfd440c96abd8739d6 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:49:48 -0700 Subject: [PATCH 054/108] Remove MCP cross-user redundant casts --- packages/plugins/mcp/src/sdk/cross-user-isolation.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/cross-user-isolation.test.ts b/packages/plugins/mcp/src/sdk/cross-user-isolation.test.ts index 02be53341..0c0eb69d5 100644 --- a/packages/plugins/mcp/src/sdk/cross-user-isolation.test.ts +++ b/packages/plugins/mcp/src/sdk/cross-user-isolation.test.ts @@ -69,9 +69,9 @@ const makeSharedOrgExecutors = () => return { execA, execB, - aInnerId: aInnerId as string, - bInnerId: bInnerId as string, - orgScopeId: orgScopeId as string, + aInnerId, + bInnerId, + orgScopeId, }; }); From 1e26fe25f5e2a6deafa874c9ca30606951c829d1 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:49:52 -0700 Subject: [PATCH 055/108] Normalize WorkOS Vault errors --- .../plugins/workos-vault/src/sdk/client.ts | 35 +++++++++++-------- .../workos-vault/src/sdk/secret-store.ts | 29 +++++++++++---- 2 files changed, 42 insertions(+), 22 deletions(-) diff --git a/packages/plugins/workos-vault/src/sdk/client.ts b/packages/plugins/workos-vault/src/sdk/client.ts index 7b2fe37ed..304631a80 100644 --- a/packages/plugins/workos-vault/src/sdk/client.ts +++ b/packages/plugins/workos-vault/src/sdk/client.ts @@ -4,7 +4,7 @@ import { NotFoundException, WorkOS as WorkOSClient, } from "@workos-inc/node/worker"; -import { Data, Effect, Result } from "effect"; +import { Data, Effect, Option, Result, Schema } from "effect"; export interface WorkOSVaultObjectMetadata { readonly context: Record; @@ -20,44 +20,49 @@ export interface WorkOSVaultObject { readonly value?: string; } -// Minimal shape carrying an HTTP-style status code. Production WorkOS errors -// (`GenericServerException`/`NotFoundException`) and test fakes both populate -// a numeric `status`, so the boundary normalises against this named type -// rather than probing arbitrary unknown shapes. -interface ErrorWithStatus extends Error { - readonly status: number; -} +const WORKOS_KEK_NOT_READY_MESSAGE = + "KEK was created but is not yet ready. This request can be retried."; -const isErrorWithStatus = (cause: unknown): cause is ErrorWithStatus => - cause instanceof Error && typeof (cause as ErrorWithStatus).status === "number"; +const CauseWithStatusSchema = Schema.Struct({ + status: Schema.Number, +}); const statusFromWorkOSCause = (cause: unknown): number | undefined => { if (cause instanceof GenericServerException || cause instanceof NotFoundException) { return cause.status; } - if (isErrorWithStatus(cause)) return cause.status; - return undefined; + return Option.match(Schema.decodeUnknownOption(CauseWithStatusSchema)(cause), { + onNone: () => undefined, + onSome: (decoded) => decoded.status, + }); }; -const messageFromWorkOSCause = (cause: unknown): string => - cause instanceof Error ? cause.message : typeof cause === "string" ? cause : ""; +const isKekNotReadyWorkOSCause = (cause: unknown): boolean => + cause instanceof GenericServerException && + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: WorkOS only exposes this retryable Vault condition through its SDK exception message + cause.message.endsWith(WORKOS_KEK_NOT_READY_MESSAGE); export class WorkOSVaultClientError extends Data.TaggedError("WorkOSVaultClientError")<{ readonly cause: unknown; readonly message: string; readonly operation: string; + readonly retryKind?: "kek_not_ready"; readonly status?: number; }> { constructor(options: { readonly cause: unknown; readonly message?: string; readonly operation: string; + readonly retryKind?: "kek_not_ready"; readonly status?: number; }) { super({ cause: options.cause, - message: options.message ?? messageFromWorkOSCause(options.cause), + message: options.message ?? `WorkOS Vault ${options.operation} failed`, operation: options.operation, + retryKind: + options.retryKind ?? + (isKekNotReadyWorkOSCause(options.cause) ? "kek_not_ready" : undefined), status: options.status ?? statusFromWorkOSCause(options.cause), }); } diff --git a/packages/plugins/workos-vault/src/sdk/secret-store.ts b/packages/plugins/workos-vault/src/sdk/secret-store.ts index c487ce8aa..bcff7f1f5 100644 --- a/packages/plugins/workos-vault/src/sdk/secret-store.ts +++ b/packages/plugins/workos-vault/src/sdk/secret-store.ts @@ -158,7 +158,7 @@ const isStatusError = (error: WorkOSVaultClientError, status: number): boolean = error.status === status; const isKekNotReadyError = (error: WorkOSVaultClientError): boolean => - error.message.includes("KEK was created but is not yet ready"); + error.retryKind === "kek_not_ready"; // Default context builder. Each semantic piece of a scope id lives in // its own vault-context key so WorkOS's KEK matcher sees individual @@ -302,9 +302,6 @@ const deleteSecretValue = ( return true; }); -const formatVaultError = (error: WorkOSVaultClientError): StorageError => - new StorageError({ message: error.message, cause: error.cause }); - // --------------------------------------------------------------------------- // makeWorkOSVaultSecretProvider — builds a SecretProvider backed by // WorkOS Vault for values and the plugin's own metadata table for @@ -343,7 +340,13 @@ export const makeWorkOSVaultSecretProvider = ( const meta = yield* store.get(id, scope); if (!meta) return null; const object = yield* loadSecretObject(client, prefix, scope, id).pipe( - Effect.mapError(formatVaultError), + Effect.mapError( + (error) => + new StorageError({ + message: "WorkOS Vault secret read failed", + cause: error, + }), + ), ); if (!object || !object.value) return null; return object.value; @@ -353,7 +356,13 @@ export const makeWorkOSVaultSecretProvider = ( Effect.gen(function* () { const existing = yield* store.get(id, scope); yield* upsertSecretValue(client, prefix, scope, id, value, contextForScope).pipe( - Effect.mapError(formatVaultError), + Effect.mapError( + (error) => + new StorageError({ + message: "WorkOS Vault secret write failed", + cause: error, + }), + ), ); yield* store.upsert({ id, @@ -369,7 +378,13 @@ export const makeWorkOSVaultSecretProvider = ( const meta = yield* store.get(id, scope); if (!meta) return false; yield* deleteSecretValue(client, prefix, scope, id).pipe( - Effect.mapError(formatVaultError), + Effect.mapError( + (error) => + new StorageError({ + message: "WorkOS Vault secret delete failed", + cause: error, + }), + ), ); yield* store.remove(id, scope); return true; From d401d65dd7db18d8bb9694fe27ef69fd6cb889bc Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:56:07 -0700 Subject: [PATCH 056/108] Clean cloud auth and Autumn boundaries --- apps/cloud/src/api/autumn.ts | 28 ++++++++++------------- apps/cloud/src/auth/handlers.node.test.ts | 15 ++++++++---- apps/cloud/src/auth/workos.ts | 12 ++++++++-- apps/cloud/src/observability.test.ts | 3 +++ 4 files changed, 36 insertions(+), 22 deletions(-) diff --git a/apps/cloud/src/api/autumn.ts b/apps/cloud/src/api/autumn.ts index bc07ea634..9cd991248 100644 --- a/apps/cloud/src/api/autumn.ts +++ b/apps/cloud/src/api/autumn.ts @@ -1,5 +1,5 @@ import { env } from "cloudflare:workers"; -import { Effect } from "effect"; +import { Cause, Effect } from "effect"; import { HttpRouter, HttpServerRequest, @@ -30,13 +30,11 @@ const handler = Effect.gen(function* () { const session = yield* workos.authenticateRequest(webRequest); if (!session || !session.organizationId) { - return yield* Effect.fail( - new HttpResponseError({ - status: 401, - code: "unauthorized", - message: "Unauthorized", - }), - ); + return yield* new HttpResponseError({ + status: 401, + code: "unauthorized", + message: "Unauthorized", + }); } const url = new URL(webRequest.url); @@ -74,20 +72,18 @@ const handler = Effect.gen(function* () { if (statusCode >= 400) { console.error("[autumn] upstream error:", statusCode, response); - return yield* Effect.fail( - new HttpResponseError({ - status: statusCode, - code: "billing_request_failed", - message: "Billing request failed", - }), - ); + return yield* new HttpResponseError({ + status: statusCode, + code: "billing_request_failed", + message: "Billing request failed", + }); } return HttpServerResponse.jsonUnsafe(response, { status: statusCode }); }).pipe( Effect.catchCause((err) => { if (isServerError(err)) { - console.error("[autumn] request failed:", err instanceof Error ? err.stack : err); + console.error("[autumn] request failed:", Cause.pretty(err)); } return Effect.succeed(toErrorServerResponse(err)); }), diff --git a/apps/cloud/src/auth/handlers.node.test.ts b/apps/cloud/src/auth/handlers.node.test.ts index eb4c7b0e5..108c2385f 100644 --- a/apps/cloud/src/auth/handlers.node.test.ts +++ b/apps/cloud/src/auth/handlers.node.test.ts @@ -1,7 +1,7 @@ import { HttpApiBuilder, HttpApi } from "effect/unstable/httpapi"; import { HttpRouter, HttpServer } from "effect/unstable/http"; import { describe, expect, it } from "@effect/vitest"; -import { Effect, Layer } from "effect"; +import { Data, Effect, Layer } from "effect"; import type { Effect as EffectType } from "effect/Effect"; import { CloudAuthPublicApi } from "./api"; @@ -31,15 +31,22 @@ const fakeUser: AuthenticateWithCodeResult["user"] = { metadata: {}, }; +class UnstubbedWorkOSMethod extends Data.TaggedError("UnstubbedWorkOSMethod")<{ + method: string; +}> {} + const makeAuthFetch = (workos: Partial) => { const WorkOSTest = Layer.succeed( WorkOSAuth, new Proxy(workos as WorkOSAuth["Service"], { get: (target, prop) => { if (prop in target) return target[prop as keyof typeof target]; - return () => { - throw new Error(`WorkOSAuth.${String(prop)} not stubbed`); - }; + return () => + Effect.fail( + new UnstubbedWorkOSMethod({ + method: typeof prop === "string" ? prop : (prop.description ?? "symbol"), + }), + ); }, }), ); diff --git a/apps/cloud/src/auth/workos.ts b/apps/cloud/src/auth/workos.ts index a0d86a37a..2bce812ad 100644 --- a/apps/cloud/src/auth/workos.ts +++ b/apps/cloud/src/auth/workos.ts @@ -3,11 +3,17 @@ // --------------------------------------------------------------------------- import { env } from "cloudflare:workers"; -import { Context, Effect, Layer } from "effect"; +import { Context, Data, Effect, Layer } from "effect"; import { GeneratePortalLinkIntent, WorkOS } from "@workos-inc/node/worker"; import { WorkOSError, tryPromiseService, withServiceLogging } from "./errors"; const COOKIE_NAME = "wos-session"; +const INVALID_COOKIE_PASSWORD_MESSAGE = + "WORKOS_COOKIE_PASSWORD must be at least 32 characters"; + +class WorkOSAuthConfigurationError extends Data.TaggedError("WorkOSAuthConfigurationError")<{ + readonly message: string; +}> {} // --------------------------------------------------------------------------- // Service @@ -19,7 +25,9 @@ const make = Effect.gen(function* () { const cookiePassword = env.WORKOS_COOKIE_PASSWORD; if (!cookiePassword || cookiePassword.length < 32) { - return yield* Effect.die(new Error("WORKOS_COOKIE_PASSWORD must be at least 32 characters")); + return yield* new WorkOSAuthConfigurationError({ + message: INVALID_COOKIE_PASSWORD_MESSAGE, + }); } const workos = new WorkOS({ apiKey, clientId }); diff --git a/apps/cloud/src/observability.test.ts b/apps/cloud/src/observability.test.ts index 3572e8a0f..dffaab2a7 100644 --- a/apps/cloud/src/observability.test.ts +++ b/apps/cloud/src/observability.test.ts @@ -15,6 +15,7 @@ describe("sentryPayloadForCause", () => { // Reproduces the production chain: an inner runPromise rejects with a // CauseImpl (from Effect v4's causeSquash), Effect.promise re-wraps it // as Die(CauseImpl), and the outer catchCause receives this shape. + // oxlint-disable-next-line executor/no-error-constructor -- boundary: observability test must build a real Error for Sentry-compatible payload assertions const innerCause = Cause.fail(new Error("inner failure")); const outerCause = Cause.die(innerCause); @@ -25,11 +26,13 @@ describe("sentryPayloadForCause", () => { }); it("hands Sentry a real Error for an ordinary failed Cause", () => { + // oxlint-disable-next-line executor/no-error-constructor -- boundary: observability test must build a real Error for Sentry-compatible payload assertions const { primary } = sentryPayloadForCause(Cause.fail(new Error("plain failure"))); expect(looksLikeErrorToSentry(primary)).toBe(true); }); it("forwards non-Cause inputs as-is with no pretty cause attached", () => { + // oxlint-disable-next-line executor/no-error-constructor -- boundary: observability test must build a real Error for Sentry-compatible payload assertions const err = new Error("raw"); const { primary, pretty } = sentryPayloadForCause(err); expect(primary).toBe(err); From 78035f25b08b34871521e6a95e7fc5214241f8ae Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:56:15 -0700 Subject: [PATCH 057/108] Clean local DB migration boundaries --- apps/local/src/server/db-upgrade.ts | 2 + .../src/server/migrate-connections.test.ts | 39 +++++++++++++++---- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/apps/local/src/server/db-upgrade.ts b/apps/local/src/server/db-upgrade.ts index 2e5fe4f8b..f2364e76c 100644 --- a/apps/local/src/server/db-upgrade.ts +++ b/apps/local/src/server/db-upgrade.ts @@ -21,6 +21,7 @@ import * as fs from "node:fs"; export const isPreScopeSchema = (dbPath: string): boolean => { if (!fs.existsSync(dbPath)) return false; const db = new Database(dbPath, { readonly: true }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: local SQLite schema probe must close the DB handle try { const tableExists = db .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='source'") @@ -73,6 +74,7 @@ export interface LegacySecret { export const readLegacySecrets = (dbPath: string): readonly LegacySecret[] => { if (!fs.existsSync(dbPath)) return []; const db = new Database(dbPath, { readonly: true }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: local SQLite legacy-row read must close the DB handle try { const tableExists = db .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='secret'") diff --git a/apps/local/src/server/migrate-connections.test.ts b/apps/local/src/server/migrate-connections.test.ts index 7943975f9..2c562f9d1 100644 --- a/apps/local/src/server/migrate-connections.test.ts +++ b/apps/local/src/server/migrate-connections.test.ts @@ -2,6 +2,7 @@ import { afterEach, beforeEach, describe, expect, it } from "@effect/vitest"; import { Database } from "bun:sqlite"; import { migrate } from "drizzle-orm/bun-sqlite/migrator"; import { drizzle } from "drizzle-orm/bun-sqlite"; +import { Schema } from "effect"; import { mkdtempSync, rmSync } from "node:fs"; import { tmpdir } from "node:os"; import { join } from "node:path"; @@ -9,15 +10,26 @@ import { join } from "node:path"; import { migrateLegacyConnections } from "./migrate-connections"; let workDir: string; +let databases: Array; beforeEach(() => { workDir = mkdtempSync(join(tmpdir(), "executor-migrate-connections-")); + databases = []; }); afterEach(() => { + for (const db of databases) { + db.close(); + } rmSync(workDir, { recursive: true, force: true }); }); +const openDatabase = (): Database => { + const db = new Database(join(workDir, "data.db")); + databases.push(db); + return db; +}; + const columnNames = (db: Database, table: string): ReadonlyArray => ( db.prepare(`PRAGMA table_info('${table}')`).all() as ReadonlyArray<{ @@ -25,9 +37,24 @@ const columnNames = (db: Database, table: string): ReadonlyArray => }> ).map((column) => column.name); +const MigratedMcpConfig = Schema.Struct({ + auth: Schema.optional(Schema.Unknown), +}); +const decodeMigratedMcpConfig = Schema.decodeUnknownSync( + Schema.fromJsonString(MigratedMcpConfig), +); + +const MigratedOpenApiOAuth2 = Schema.Struct({ + kind: Schema.Literal("oauth2"), + connectionId: Schema.String, +}); +const decodeMigratedOpenApiOAuth2 = Schema.decodeUnknownSync( + Schema.fromJsonString(MigratedOpenApiOAuth2), +); + describe("migrateLegacyConnections", () => { it("backfills legacy MCP OAuth rows after connection.kind has been dropped", async () => { - const db = new Database(join(workDir, "data.db")); + const db = openDatabase(); migrate(drizzle(db), { migrationsFolder: join(import.meta.dirname, "../../drizzle"), }); @@ -98,7 +125,7 @@ describe("migrateLegacyConnections", () => { readonly auth_kind: string; readonly auth_connection_id: string; }; - expect(JSON.parse(source.config).auth).toBeUndefined(); + expect(decodeMigratedMcpConfig(source.config).auth).toBeUndefined(); expect(source.auth_kind).toBe("oauth2"); expect(source.auth_connection_id).toBe("mcp-oauth2-remote-mcp"); @@ -117,12 +144,10 @@ describe("migrateLegacyConnections", () => { owned_by_connection_id: "mcp-oauth2-remote-mcp", }, ]); - - db.close(); }); it("backfills legacy OpenAPI OAuth from oauth2 column after invocation_config has been dropped", async () => { - const db = new Database(join(workDir, "data.db")); + const db = openDatabase(); migrate(drizzle(db), { migrationsFolder: join(import.meta.dirname, "../../drizzle"), }); @@ -196,10 +221,8 @@ describe("migrateLegacyConnections", () => { const source = db .prepare("SELECT oauth2 FROM openapi_source WHERE scope_id = ? AND id = ?") .get("scope-1", "legacy-openapi") as { readonly oauth2: string }; - const oauth2 = JSON.parse(source.oauth2); + const oauth2 = decodeMigratedOpenApiOAuth2(source.oauth2); expect(oauth2.kind).toBe("oauth2"); expect(oauth2.connectionId).toBe(connection?.id); - - db.close(); }); }); From 9812c248a272bdb3c786bc488e8b09932f56fa85 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:56:21 -0700 Subject: [PATCH 058/108] Clean core API observability errors --- packages/core/api/src/observability.test.ts | 4 ++-- packages/core/api/src/observability.ts | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/core/api/src/observability.test.ts b/packages/core/api/src/observability.test.ts index 311a25995..c65c6b6bd 100644 --- a/packages/core/api/src/observability.test.ts +++ b/packages/core/api/src/observability.test.ts @@ -32,7 +32,7 @@ describe("capture", () => { it.effect("translates StorageError to InternalError with ErrorCapture trace id", () => Effect.gen(function* () { const { layer, seen } = yield* makeRecorder("trace-abc"); - const err = new StorageError({ message: "db down", cause: new Error("x") }); + const err = new StorageError({ message: "db down", cause: "x" }); const eff = capture(Effect.fail(err)); const result = yield* Effect.flip(eff).pipe(Effect.provide(layer)); @@ -81,7 +81,7 @@ describe("capture", () => { DomainError >; const result = yield* Effect.flip(capture(eff)); - expect(result._tag).toBe("DomainError"); + expect(result).toBeInstanceOf(DomainError); }), ); }); diff --git a/packages/core/api/src/observability.ts b/packages/core/api/src/observability.ts index cb622356c..b233a358d 100644 --- a/packages/core/api/src/observability.ts +++ b/packages/core/api/src/observability.ts @@ -97,6 +97,7 @@ export const capture = ( eff: Effect.Effect, ): Effect.Effect | InternalError, R> => (eff as Effect.Effect).pipe( + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: unique conflicts that reach the HTTP edge are unexpected defects captured by observabilityMiddleware Effect.catchTag("UniqueViolationError", (err) => Effect.die(err)), Effect.catchTag("StorageError", (err) => resolveCapture.pipe( @@ -126,14 +127,14 @@ export const captureEngineError = ( ): Effect.Effect => eff.pipe( Effect.catch((err) => - err instanceof InternalError + Schema.is(InternalError)(err) ? Effect.fail(err) : resolveCapture.pipe( Effect.flatMap((c) => c.captureException(Cause.fail(err))), Effect.flatMap((traceId) => Effect.fail(new InternalError({ traceId })), ), - ), + ), ), ); From f2f87210fa69da8ad580b8736fd42f11672e78c6 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:56:24 -0700 Subject: [PATCH 059/108] Mark JSON schema adapter boundaries --- packages/kernel/core/src/json-schema.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/kernel/core/src/json-schema.ts b/packages/kernel/core/src/json-schema.ts index cb3f6f213..dca5ebe7a 100644 --- a/packages/kernel/core/src/json-schema.ts +++ b/packages/kernel/core/src/json-schema.ts @@ -33,6 +33,7 @@ const pointerToPath = (pointer: string | undefined): ReadonlyArray const toIssueMessage = (error: ErrorObject): string => { const keyword = error.keyword.trim(); + // oxlint-disable-next-line executor/no-unknown-error-message -- typed AJV ErrorObject exposes optional validation message copy const message = (error.message ?? "Invalid value").trim(); return keyword.length > 0 ? `${keyword}: ${message}` : message; }; @@ -44,6 +45,7 @@ export const standardSchemaFromJsonSchema = ( fallback?: StandardSchema; }, ): StandardSchema => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: AJV compile throws for invalid schemas and this adapter preserves fallback behavior try { const validate = ajv.compile(schema as Record); From 432ae9639fd9d256ac9376144272ab9d71cad49b Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:56:33 -0700 Subject: [PATCH 060/108] Clean plugin API handler tests --- .../google-discovery/src/api/handlers.test.ts | 2 ++ packages/plugins/mcp/src/api/handlers.test.ts | 16 ++++++++++------ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/packages/plugins/google-discovery/src/api/handlers.test.ts b/packages/plugins/google-discovery/src/api/handlers.test.ts index fcc19ac43..23ff9e413 100644 --- a/packages/plugins/google-discovery/src/api/handlers.test.ts +++ b/packages/plugins/google-discovery/src/api/handlers.test.ts @@ -19,9 +19,11 @@ import { GoogleDiscoveryStoredSourceData } from "../sdk/types"; import { GoogleDiscoveryExtensionService, GoogleDiscoveryHandlers } from "./handlers"; import { GoogleDiscoveryGroup } from "./group"; +// oxlint-disable-next-line executor/no-error-constructor -- boundary: test injects a defect to verify opaque handler error responses const unused = Effect.die(new Error("unused")); const failingExtension: GoogleDiscoveryPluginExtension = { + // oxlint-disable-next-line executor/no-error-constructor -- boundary: test injects a defect to verify opaque handler error responses probeDiscovery: () => Effect.die(new Error("Not implemented")), addSource: () => unused, removeSource: (_namespace: string, _scope: string) => unused, diff --git a/packages/plugins/mcp/src/api/handlers.test.ts b/packages/plugins/mcp/src/api/handlers.test.ts index 862e4fddd..5e941bb3e 100644 --- a/packages/plugins/mcp/src/api/handlers.test.ts +++ b/packages/plugins/mcp/src/api/handlers.test.ts @@ -10,7 +10,7 @@ import { HttpApiBuilder } from "effect/unstable/httpapi"; import { HttpRouter, HttpServer } from "effect/unstable/http"; import { describe, expect, it } from "@effect/vitest"; -import { Effect, Layer } from "effect"; +import { Effect, Layer, Schema } from "effect"; import { addGroup, observabilityMiddleware } from "@executor-js/api"; import { CoreHandlers, ExecutionEngineService, ExecutorService } from "@executor-js/api/server"; @@ -22,6 +22,7 @@ import { McpGroup } from "./group"; const unused = Effect.die("unused"); const failingExtension: McpPluginExtension = { + // oxlint-disable-next-line executor/no-error-constructor -- boundary: test injects a defect to verify opaque handler error responses probeEndpoint: () => Effect.die(new Error("Not implemented")), addSource: () => unused, removeSource: () => unused, @@ -62,6 +63,11 @@ const webHandlerFor = (extension: McpPluginExtension) => // framework services available to the router itself. const WebHandler = webHandlerFor(failingExtension); +const McpConnectionErrorResponse = Schema.Struct({ + _tag: Schema.Literal("McpConnectionError"), + message: Schema.String, +}); + describe("McpHandlers", () => { it.effect( "defect-returning methods produce an opaque InternalError, no leakage", @@ -108,11 +114,9 @@ describe("McpHandlers", () => { ); expect(response.status).toBe(400); - const body = (yield* Effect.promise(() => response.json())) as { - _tag?: string; - message?: string; - }; - expect(body._tag).toBe("McpConnectionError"); + const body = yield* Schema.decodeUnknownEffect(McpConnectionErrorResponse)( + yield* Effect.promise(() => response.json()), + ); expect(body.message).toContain("Do you need to provide an API key"); }), ); From 9e84f999a6a350ac1433e54fcbe35be0b8641f6a Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 20:56:38 -0700 Subject: [PATCH 061/108] Use a typed chart payload guard --- packages/react/src/components/chart.tsx | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/packages/react/src/components/chart.tsx b/packages/react/src/components/chart.tsx index a37531827..c0e716750 100644 --- a/packages/react/src/components/chart.tsx +++ b/packages/react/src/components/chart.tsx @@ -303,26 +303,25 @@ function ChartLegendContent({ } // Helper to extract item config from a payload. +const isRecord = (value: unknown): value is Record => + typeof value === "object" && value !== null && !Array.isArray(value); + function getPayloadConfigFromPayload(config: ChartConfig, payload: unknown, key: string) { - if (typeof payload !== "object" || payload === null) { + if (!isRecord(payload)) { return undefined; } - const payloadPayload = - "payload" in payload && typeof payload.payload === "object" && payload.payload !== null - ? payload.payload - : undefined; + const payloadPayload = isRecord(payload.payload) ? payload.payload : undefined; let configLabelKey: string = key; - if (key in payload && typeof payload[key as keyof typeof payload] === "string") { - configLabelKey = payload[key as keyof typeof payload] as string; + if (typeof payload[key] === "string") { + configLabelKey = payload[key]; } else if ( payloadPayload && - key in payloadPayload && - typeof payloadPayload[key as keyof typeof payloadPayload] === "string" + typeof payloadPayload[key] === "string" ) { - configLabelKey = payloadPayload[key as keyof typeof payloadPayload] as string; + configLabelKey = payloadPayload[key]; } return configLabelKey in config ? config[configLabelKey] : config[key]; From c61c7f012930950d52c816a513bbe09a18754ec8 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:09:07 -0700 Subject: [PATCH 062/108] Fix storage typed boundary lint --- packages/core/storage-core/src/factory.ts | 348 ++++++--------- packages/core/storage-drizzle/src/adapter.ts | 434 +++++++++---------- 2 files changed, 350 insertions(+), 432 deletions(-) diff --git a/packages/core/storage-core/src/factory.ts b/packages/core/storage-core/src/factory.ts index 163d9f1e7..959241332 100644 --- a/packages/core/storage-core/src/factory.ts +++ b/packages/core/storage-core/src/factory.ts @@ -25,7 +25,7 @@ // to running the callback against the current adapter // --------------------------------------------------------------------------- -import { Effect } from "effect"; +import { Effect, Option, Schema } from "effect"; import type { CleanedWhere, @@ -72,8 +72,7 @@ const withApplyDefault = ( // when they passed null for a required field (upstream convention — // explicit null on an optional/nullable field is preserved). Without the // `required` gate we'd silently overwrite legitimate null writes. - const triggerDefault = - value === undefined || (field.required === true && value === null); + const triggerDefault = value === undefined || (field.required === true && value === null); if (triggerDefault && field.defaultValue !== undefined) { return typeof field.defaultValue === "function" ? (field.defaultValue as () => DBPrimitive)() @@ -96,9 +95,7 @@ export interface CreateAdapterOptions { * Wrap a CustomAdapter into a full DBAdapter that applies schema-driven * transforms. This is the single codepath every backend shares. */ -export const createAdapter = ( - options: CreateAdapterOptions, -): DBAdapter => { +export const createAdapter = (options: CreateAdapterOptions): DBAdapter => { const { schema, adapter: inner } = options; const typedOutput = (value: unknown): T => value as T; const config: Required< @@ -111,7 +108,8 @@ export const createAdapter = ( | "supportsArrays" | "disableIdGeneration" > - > & DBAdapterFactoryConfig = { + > & + DBAdapterFactoryConfig = { ...options.config, supportsJSON: options.config.supportsJSON ?? false, supportsDates: options.config.supportsDates ?? true, @@ -127,47 +125,34 @@ export const createAdapter = ( return defaultGenerateId(); }; - const getModelDef = ( - model: string, - ): Effect.Effect => + const getModelDef = (model: string): Effect.Effect => Effect.gen(function* () { const def = schema[model]; if (!def) { - return yield* Effect.fail( - new StorageError({ - message: `[storage-core] unknown model "${model}"`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `[storage-core] unknown model "${model}"`, + cause: undefined, + }); } return def; }); - // Sync accessor for call sites that can't sit inside Effect.gen (cleanWhere, - // getModelName, getPhysicalField). These are all fed model names that have - // already been validated upstream by the typed API, so unknown-model throws - // here are a caller bug, not a runtime failure channel. - const getModelDefSync = (model: string): DBSchema[string] => { - const def = schema[model]; - if (!def) throw new Error(`[storage-core] unknown model "${model}"`); - return def; - }; - // Map physical table name → logical model key, for renaming incoming model // arg in mapKeysTransformInput/Output when callers pass physical names. // We deliberately *don't* support plural or physical-name inputs — our // plugins always pass the logical key — so getModelName is identity. - const getModelName = (model: string): string => - getModelDefSync(model).modelName ?? model; + const getModelName = (model: string): Effect.Effect => + getModelDef(model).pipe(Effect.map((def) => def.modelName ?? model)); // Field name (logical → physical). Honors mapKeysTransformInput override. - const getPhysicalField = (model: string, logical: string): string => { - if (logical === "id") return config.mapKeysTransformInput?.["id"] ?? "id"; - const override = config.mapKeysTransformInput?.[logical]; - if (override) return override; - const attr = getModelDefSync(model).fields[logical]; - return attr?.fieldName ?? logical; - }; + const getPhysicalField = (model: string, logical: string): Effect.Effect => + Effect.gen(function* () { + if (logical === "id") return config.mapKeysTransformInput?.["id"] ?? "id"; + const override = config.mapKeysTransformInput?.[logical]; + if (override) return override; + const attr = (yield* getModelDef(model)).fields[logical]; + return attr?.fieldName ?? logical; + }); // Inverse of mapKeysTransformOutput: on the output path we may need to // rename a logical field to a different output key for the caller (symmetric @@ -180,10 +165,7 @@ export const createAdapter = ( // Value encode / decode based on supports* flags. // --------------------------------------------------------------------------- - const encodeValue = ( - attr: DBFieldAttribute | undefined, - value: unknown, - ): unknown => { + const encodeValue = (attr: DBFieldAttribute | undefined, value: unknown): unknown => { if (value === undefined) return undefined; if (value === null) return null; if (!attr) return value; @@ -216,19 +198,17 @@ export const createAdapter = ( return value; }; - const decodeValue = ( - attr: DBFieldAttribute | undefined, - value: unknown, - ): unknown => { + const decodeJsonFallback = (value: string): unknown => + Schema.decodeUnknownOption(Schema.UnknownFromJsonString)(value).pipe( + Option.getOrElse((): unknown => value), + ); + + const decodeValue = (attr: DBFieldAttribute | undefined, value: unknown): unknown => { if (value === undefined || value === null) return value; if (!attr) return value; const type = attr.type; if (type === "json" && typeof value === "string") { - try { - return JSON.parse(value); - } catch { - return value; - } + return decodeJsonFallback(value); } if (type === "date") { if (value instanceof Date) return value; @@ -240,15 +220,8 @@ export const createAdapter = ( if (type === "boolean" && typeof value === "number") { return value === 1; } - if ( - (type === "string[]" || type === "number[]") && - typeof value === "string" - ) { - try { - return JSON.parse(value); - } catch { - return value; - } + if ((type === "string[]" || type === "number[]") && typeof value === "string") { + return decodeJsonFallback(value); } if (type === "number" && typeof value === "string") { const n = Number(value); @@ -274,9 +247,9 @@ export const createAdapter = ( // id handling on create if (action === "create") { if (forceAllowId && "id" in data && data.id !== undefined && data.id !== null) { - out[getPhysicalField(model, "id")] = data.id; + out[yield* getPhysicalField(model, "id")] = data.id; } else if (!config.disableIdGeneration) { - out[getPhysicalField(model, "id")] = idGen(model); + out[yield* getPhysicalField(model, "id")] = idGen(model); } } @@ -293,11 +266,7 @@ export const createAdapter = ( !(value instanceof Date) && typeof value === "string" ) { - try { - value = new Date(value); - } catch { - // leave as-is - } + value = new Date(value); } // defaultValue / onUpdate @@ -313,7 +282,7 @@ export const createAdapter = ( try: () => res as Promise, catch: (cause) => new StorageError({ - message: `[storage-core] transform.input for "${model}.${logical}" failed: ${cause instanceof Error ? cause.message : String(cause)}`, + message: `[storage-core] transform.input for "${model}.${logical}" failed`, cause, }), }); @@ -324,7 +293,7 @@ export const createAdapter = ( if (value === undefined) continue; - const physical = getPhysicalField(model, logical); + const physical = yield* getPhysicalField(model, logical); let encoded = encodeValue(attr, value); // customTransformInput — user-land per-field hook, runs after the @@ -337,7 +306,7 @@ export const createAdapter = ( fieldAttributes: attr, field: physical, action, - model: getModelName(model), + model: yield* getModelName(model), schema, }); } @@ -363,7 +332,7 @@ export const createAdapter = ( const out: Record = {}; // id always returned - const idPhysical = getPhysicalField(model, "id"); + const idPhysical = yield* getPhysicalField(model, "id"); const idOutputKey = getOutputKey("id"); if (idPhysical in row && row[idPhysical] !== undefined) { out[idOutputKey] = row[idPhysical]; @@ -376,7 +345,7 @@ export const createAdapter = ( if (attr.returned === false) continue; if (select && select.length > 0 && !select.includes(logical)) continue; - const physical = getPhysicalField(model, logical); + const physical = yield* getPhysicalField(model, logical); if (!(physical in row)) continue; let value: unknown = decodeValue(attr, row[physical]); @@ -388,7 +357,7 @@ export const createAdapter = ( try: () => res as Promise, catch: (cause) => new StorageError({ - message: `[storage-core] transform.output for "${model}.${logical}" failed: ${cause instanceof Error ? cause.message : String(cause)}`, + message: `[storage-core] transform.output for "${model}.${logical}" failed`, cause, }), }); @@ -405,7 +374,7 @@ export const createAdapter = ( fieldAttributes: attr, field: logical, select: select ?? [], - model: getModelName(model), + model: yield* getModelName(model), schema, }); } @@ -441,95 +410,94 @@ export const createAdapter = ( // join that the schema can't resolve is a bug, not a runtime state. // --------------------------------------------------------------------------- - const resolveJoin = (base: string, join: JoinOption): JoinConfig => { - const baseDef = getModelDefSync(base); - const out: JoinConfig = {}; - for (const [target, raw] of Object.entries(join)) { - if (raw === false) continue; - const targetDef = getModelDefSync(target); - const limit = - typeof raw === "object" && raw.limit !== undefined ? raw.limit : undefined; - - // child → parent - let found: JoinConfig[string] | undefined; - for (const [fieldName, attr] of Object.entries(baseDef.fields)) { - if (attr.references?.model === target) { - found = { - on: { - from: getPhysicalField(base, fieldName), - to: - getPhysicalField(target, attr.references.field) || - attr.references.field, - }, - relation: "one-to-one", - ...(limit !== undefined ? { limit } : {}), - }; - break; - } - } - // parent → children - if (!found) { - for (const [fieldName, attr] of Object.entries(targetDef.fields)) { - if (attr.references?.model === base) { + const resolveJoin = (base: string, join: JoinOption): Effect.Effect => + Effect.gen(function* () { + const baseDef = yield* getModelDef(base); + const out: JoinConfig = {}; + for (const [target, raw] of Object.entries(join)) { + if (raw === false) continue; + const targetDef = yield* getModelDef(target); + const limit = typeof raw === "object" && raw.limit !== undefined ? raw.limit : undefined; + + // child → parent + let found: JoinConfig[string] | undefined; + for (const [fieldName, attr] of Object.entries(baseDef.fields)) { + if (attr.references?.model === target) { found = { on: { - from: - getPhysicalField(base, attr.references.field) || - attr.references.field, - to: getPhysicalField(target, fieldName), + from: yield* getPhysicalField(base, fieldName), + to: + (yield* getPhysicalField(target, attr.references.field)) || attr.references.field, }, - relation: "one-to-many", + relation: "one-to-one", ...(limit !== undefined ? { limit } : {}), }; break; } } + // parent → children + if (!found) { + for (const [fieldName, attr] of Object.entries(targetDef.fields)) { + if (attr.references?.model === base) { + found = { + on: { + from: + (yield* getPhysicalField(base, attr.references.field)) || attr.references.field, + to: yield* getPhysicalField(target, fieldName), + }, + relation: "one-to-many", + ...(limit !== undefined ? { limit } : {}), + }; + break; + } + } + } + if (!found) { + return yield* new StorageError({ + message: `[storage-core] cannot resolve join "${base}" -> "${target}": neither model declares a \`references\` for the other`, + cause: undefined, + }); + } + out[target] = found; } - if (!found) { - throw new Error( - `[storage-core] cannot resolve join "${base}" → "${target}": neither model declares a \`references\` for the other`, - ); - } - out[target] = found; - } - return out; - }; + return out; + }); const cleanWhere = ( model: string, where: readonly Where[] | undefined, - ): CleanedWhere[] | undefined => { - if (!where) return undefined; - const def = getModelDefSync(model); - return where.map((w) => { - const operator = w.operator ?? "eq"; - const connector = w.connector ?? "AND"; - const mode = w.mode ?? "sensitive"; - const logical = w.field; - const attr = - logical === "id" ? undefined : def.fields[logical]; - const physical = getPhysicalField(model, logical); - - let value: Where["value"] = w.value; - if (attr) { - if (Array.isArray(value)) { - value = (value as unknown[]).map((v) => - encodeValue(attr, v), - ) as typeof value; - } else { - value = encodeValue(attr, value) as typeof value; + ): Effect.Effect => + Effect.gen(function* () { + if (!where) return undefined; + const def = yield* getModelDef(model); + const out: CleanedWhere[] = []; + for (const w of where) { + const operator = w.operator ?? "eq"; + const connector = w.connector ?? "AND"; + const mode = w.mode ?? "sensitive"; + const logical = w.field; + const attr = logical === "id" ? undefined : def.fields[logical]; + const physical = yield* getPhysicalField(model, logical); + + let value: Where["value"] = w.value; + if (attr) { + if (Array.isArray(value)) { + value = (value as unknown[]).map((v) => encodeValue(attr, v)) as typeof value; + } else { + value = encodeValue(attr, value) as typeof value; + } } - } - return { - operator, - connector, - mode, - field: physical, - value, - } satisfies CleanedWhere; + out.push({ + operator, + connector, + mode, + field: physical, + value, + } satisfies CleanedWhere); + } + return out; }); - }; // --------------------------------------------------------------------------- // Transform skip helpers — disableTransformInput/Output let backend authors @@ -577,10 +545,7 @@ export const createAdapter = ( const decoded: unknown[] = []; for (const n of nested) { if (n && typeof n === "object") { - const t = yield* transformOutput( - target, - n as Record, - ); + const t = yield* transformOutput(target, n as Record); decoded.push(t); } else { decoded.push(n); @@ -588,10 +553,7 @@ export const createAdapter = ( } merged[target] = decoded; } else if (typeof nested === "object") { - merged[target] = yield* transformOutput( - target, - nested as Record, - ); + merged[target] = yield* transformOutput(target, nested as Record); } else { merged[target] = nested; } @@ -604,9 +566,7 @@ export const createAdapter = ( row: Record | null, select?: string[], ): Effect.Effect | null, StorageFailure> => - config.disableTransformOutput - ? Effect.succeed(row) - : transformOutput(model, row, select); + config.disableTransformOutput ? Effect.succeed(row) : transformOutput(model, row, select); // --------------------------------------------------------------------------- // DBAdapter surface @@ -629,7 +589,7 @@ export const createAdapter = ( data.forceAllowId === true, ); const res = yield* inner.create({ - model: getModelName(data.model), + model: yield* getModelName(data.model), data: input, select: data.select, }); @@ -672,18 +632,14 @@ export const createAdapter = ( ); } const res = yield* inner.createMany({ - model: getModelName(data.model), + model: yield* getModelName(data.model), data: inputs, }); const out: R[] = []; for (const row of res) { out.push( typedOutput( - yield* maybeTransformOutput( - data.model, - row as Record, - undefined, - ), + yield* maybeTransformOutput(data.model, row as Record, undefined), ), ); } @@ -705,10 +661,10 @@ export const createAdapter = ( join?: JoinOption | undefined; }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where) ?? []; - const join = data.join ? resolveJoin(data.model, data.join) : undefined; + const where = (yield* cleanWhere(data.model, data.where)) ?? []; + const join = data.join ? yield* resolveJoin(data.model, data.join) : undefined; const res = yield* inner.findOne>({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, select: data.select, join, @@ -735,16 +691,16 @@ export const createAdapter = ( join?: JoinOption | undefined; }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where); + const where = yield* cleanWhere(data.model, data.where); const sortBy = data.sortBy ? { - field: getPhysicalField(data.model, data.sortBy.field), + field: yield* getPhysicalField(data.model, data.sortBy.field), direction: data.sortBy.direction, } : undefined; - const join = data.join ? resolveJoin(data.model, data.join) : undefined; + const join = data.join ? yield* resolveJoin(data.model, data.join) : undefined; const res = yield* inner.findMany>({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, limit: data.limit, select: data.select, @@ -770,9 +726,9 @@ export const createAdapter = ( count: (data: { model: string; where?: Where[] | undefined }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where); + const where = yield* cleanWhere(data.model, data.where); return yield* inner.count({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, }); }).pipe( @@ -784,21 +740,12 @@ export const createAdapter = ( }), ), - update: (data: { - model: string; - where: Where[]; - update: Record; - }) => + update: (data: { model: string; where: Where[]; update: Record }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where) ?? []; - const update = yield* maybeTransformInput( - data.model, - data.update, - "update", - false, - ); + const where = (yield* cleanWhere(data.model, data.where)) ?? []; + const update = yield* maybeTransformInput(data.model, data.update, "update", false); const res = yield* inner.update>({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, update, }); @@ -813,21 +760,12 @@ export const createAdapter = ( }), ), - updateMany: (data: { - model: string; - where: Where[]; - update: Record; - }) => + updateMany: (data: { model: string; where: Where[]; update: Record }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where) ?? []; - const update = yield* maybeTransformInput( - data.model, - data.update, - "update", - false, - ); + const where = (yield* cleanWhere(data.model, data.where)) ?? []; + const update = yield* maybeTransformInput(data.model, data.update, "update", false); return yield* inner.updateMany({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, update, }); @@ -842,9 +780,9 @@ export const createAdapter = ( delete: (data: { model: string; where: Where[] }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where) ?? []; + const where = (yield* cleanWhere(data.model, data.where)) ?? []; yield* inner.delete({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, }); }).pipe( @@ -858,9 +796,9 @@ export const createAdapter = ( deleteMany: (data: { model: string; where: Where[] }) => Effect.gen(function* () { - const where = cleanWhere(data.model, data.where) ?? []; + const where = (yield* cleanWhere(data.model, data.where)) ?? []; return yield* inner.deleteMany({ - model: getModelName(data.model), + model: yield* getModelName(data.model), where, }); }).pipe( @@ -872,9 +810,7 @@ export const createAdapter = ( }), ), - transaction: ( - callback: (trx: DBTransactionAdapter) => Effect.Effect, - ) => { + transaction: (callback: (trx: DBTransactionAdapter) => Effect.Effect) => { const txFn = config.transaction; const ran = !txFn ? callback(self) : txFn(callback); return ran.pipe( @@ -890,9 +826,7 @@ export const createAdapter = ( // Forward the backend's createSchema verbatim. Upstream better-auth // mutates the `tables` set here to drop session when secondaryStorage // is set; we intentionally don't replicate that auth-specific concern. - createSchema: inner.createSchema - ? (props) => inner.createSchema!(props) - : undefined, + createSchema: inner.createSchema ? (props) => inner.createSchema!(props) : undefined, // Expose the full factory config + the inner adapter's own options to // plugin authors at runtime. Mirrors upstream's `options` field on diff --git a/packages/core/storage-drizzle/src/adapter.ts b/packages/core/storage-drizzle/src/adapter.ts index 47cbaae11..cb3d76ddf 100644 --- a/packages/core/storage-drizzle/src/adapter.ts +++ b/packages/core/storage-drizzle/src/adapter.ts @@ -11,7 +11,7 @@ // generation, encode/decode all happen in storage-core // --------------------------------------------------------------------------- -import { Effect, Result, Schedule } from "effect"; +import { Effect, Predicate, Result, Schedule } from "effect"; import { and, asc, @@ -41,11 +41,7 @@ import type { DBSchema, JoinConfig, } from "@executor-js/storage-core"; -import { - StorageError, - UniqueViolationError, - createAdapter, -} from "@executor-js/storage-core"; +import { StorageError, UniqueViolationError, createAdapter } from "@executor-js/storage-core"; // Mirrors `StorageFailure` from @executor-js/storage-core/adapter — kept // local so we don't force a new named export on the public index. Both @@ -59,8 +55,7 @@ type DrizzleTransactionCapable = { transaction: (fn: (tx: unknown) => Promise) => Promise; }; const rowAs = (row: Record): T => row as T; -const rowsAs = (rows: readonly Record[]): T[] => - rows.map(rowAs); +const rowsAs = (rows: readonly Record[]): T[] => rows.map(rowAs); // --------------------------------------------------------------------------- // Types @@ -101,11 +96,9 @@ const ilikeOrLike = (col: AnyTable, pattern: string, provider: DrizzleProvider) return sql`LOWER(${col}) LIKE LOWER(${pattern})`; }; -const insensitiveEq = (col: AnyTable, value: string) => - sql`LOWER(${col}) = LOWER(${value})`; +const insensitiveEq = (col: AnyTable, value: string) => sql`LOWER(${col}) = LOWER(${value})`; -const insensitiveNe = (col: AnyTable, value: string) => - sql`LOWER(${col}) <> LOWER(${value})`; +const insensitiveNe = (col: AnyTable, value: string) => sql`LOWER(${col}) <> LOWER(${value})`; // --------------------------------------------------------------------------- // Where compiler — CleanedWhere[] → drizzle-orm SQL @@ -123,109 +116,114 @@ const buildCond = ( table: AnyTable, w: CleanedWhere, provider: DrizzleProvider, -): SQL | undefined => { - const col = table[w.field]; - if (!col) { - throw new Error( - `[storage-drizzle] unknown column "${w.field}" on drizzle table`, - ); - } - const mode = w.mode; - const isInsensitive = - mode === "insensitive" && - (typeof w.value === "string" || - (Array.isArray(w.value) && - (w.value as unknown[]).every((v) => typeof v === "string"))); - - switch (w.operator) { - case "in": - if (!Array.isArray(w.value)) - throw new Error("Value must be an array for `in`"); - if (isInsensitive) { - const values = w.value as readonly string[]; - if (values.length === 0) return sql`1 = 0`; - const lowered = values.map((v) => v.toLowerCase()); - return sql`LOWER(${col}) IN ${lowered}`; - } - return inArray(col, w.value as unknown[]); - case "not_in": - if (!Array.isArray(w.value)) - throw new Error("Value must be an array for `not_in`"); - if (isInsensitive) { - const values = w.value as readonly string[]; - if (values.length === 0) return sql`1 = 1`; - const lowered = values.map((v) => v.toLowerCase()); - return sql`LOWER(${col}) NOT IN ${lowered}`; - } - return notInArray(col, w.value as unknown[]); - case "contains": - if (isInsensitive && typeof w.value === "string") { - return ilikeOrLike(col, `%${w.value}%`, provider); - } - return like(col, `%${w.value}%`); - case "starts_with": - if (isInsensitive && typeof w.value === "string") { - return ilikeOrLike(col, `${w.value}%`, provider); - } - return like(col, `${w.value}%`); - case "ends_with": - if (isInsensitive && typeof w.value === "string") { - return ilikeOrLike(col, `%${w.value}`, provider); - } - return like(col, `%${w.value}`); - case "lt": - return lt(col, w.value); - case "lte": - return lte(col, w.value); - case "gt": - return gt(col, w.value); - case "gte": - return gte(col, w.value); - case "ne": - if (w.value === null) return isNotNull(col); - if (isInsensitive && typeof w.value === "string") { - return insensitiveNe(col, w.value); - } - return ne(col, w.value); - case "eq": - default: - if (w.value === null) return isNull(col); - if (isInsensitive && typeof w.value === "string") { - return insensitiveEq(col, w.value); - } - return eq(col, w.value); - } -}; +): Effect.Effect => + Effect.gen(function* () { + const col = table[w.field]; + if (!col) { + return yield* new StorageError({ + message: `[storage-drizzle] unknown column "${w.field}" on drizzle table`, + cause: undefined, + }); + } + const mode = w.mode; + const isInsensitive = + mode === "insensitive" && + (typeof w.value === "string" || + (Array.isArray(w.value) && (w.value as unknown[]).every((v) => typeof v === "string"))); + + switch (w.operator) { + case "in": + if (!Array.isArray(w.value)) { + return yield* new StorageError({ + message: "Value must be an array for `in`", + cause: w, + }); + } + if (isInsensitive) { + const values = w.value as readonly string[]; + if (values.length === 0) return sql`1 = 0`; + const lowered = values.map((v) => v.toLowerCase()); + return sql`LOWER(${col}) IN ${lowered}`; + } + return inArray(col, w.value as unknown[]); + case "not_in": + if (!Array.isArray(w.value)) { + return yield* new StorageError({ + message: "Value must be an array for `not_in`", + cause: w, + }); + } + if (isInsensitive) { + const values = w.value as readonly string[]; + if (values.length === 0) return sql`1 = 1`; + const lowered = values.map((v) => v.toLowerCase()); + return sql`LOWER(${col}) NOT IN ${lowered}`; + } + return notInArray(col, w.value as unknown[]); + case "contains": + if (isInsensitive && typeof w.value === "string") { + return ilikeOrLike(col, `%${w.value}%`, provider); + } + return like(col, `%${w.value}%`); + case "starts_with": + if (isInsensitive && typeof w.value === "string") { + return ilikeOrLike(col, `${w.value}%`, provider); + } + return like(col, `${w.value}%`); + case "ends_with": + if (isInsensitive && typeof w.value === "string") { + return ilikeOrLike(col, `%${w.value}`, provider); + } + return like(col, `%${w.value}`); + case "lt": + return lt(col, w.value); + case "lte": + return lte(col, w.value); + case "gt": + return gt(col, w.value); + case "gte": + return gte(col, w.value); + case "ne": + if (w.value === null) return isNotNull(col); + if (isInsensitive && typeof w.value === "string") { + return insensitiveNe(col, w.value); + } + return ne(col, w.value); + case "eq": + default: + if (w.value === null) return isNull(col); + if (isInsensitive && typeof w.value === "string") { + return insensitiveEq(col, w.value); + } + return eq(col, w.value); + } + }); const compileWhere = ( table: AnyTable, where: readonly CleanedWhere[] | undefined, provider: DrizzleProvider, -): SQL | undefined => { - if (!where || where.length === 0) return undefined; - if (where.length === 1) { - return buildCond(table, where[0]!, provider); - } - const andGroup = where.filter( - (w) => w.connector === "AND" || !w.connector, - ); - const orGroup = where.filter((w) => w.connector === "OR"); - const andClause = - andGroup.length > 0 - ? and(...andGroup.map((w) => buildCond(table, w, provider))) - : undefined; - const orClause = - orGroup.length > 0 - ? or(...orGroup.map((w) => buildCond(table, w, provider))) - : undefined; - if (andClause && orClause) return and(andClause, orClause); - return andClause ?? orClause; -}; +): Effect.Effect => + Effect.gen(function* () { + if (!where || where.length === 0) return undefined; + if (where.length === 1) { + return yield* buildCond(table, where[0]!, provider); + } + const andGroup = where.filter((w) => w.connector === "AND" || !w.connector); + const orGroup = where.filter((w) => w.connector === "OR"); + const andClause = + andGroup.length > 0 + ? and(...(yield* Effect.all(andGroup.map((w) => buildCond(table, w, provider))))) + : undefined; + const orClause = + orGroup.length > 0 + ? or(...(yield* Effect.all(orGroup.map((w) => buildCond(table, w, provider))))) + : undefined; + if (andClause && orClause) return and(andClause, orClause); + return andClause ?? orClause; + }); -const rowIdentityClause = ( - table: AnyTable, - row: Record, -): SQL => { +const rowIdentityClause = (table: AnyTable, row: Record): SQL => { const idClause = eq(table.id, row.id); if (table.scope_id && typeof row.scope_id === "string") { return and(eq(table.scope_id, row.scope_id), idClause) as SQL; @@ -301,19 +299,23 @@ const unwrapDriverCause = (cause: unknown): unknown => { return cur; }; -const classifyError = ( - op: string, - model: string | undefined, - cause: unknown, -): StorageFailure => { +const hasStringMessage = (value: unknown): value is { readonly message: string } => + typeof value === "object" && + value !== null && + "message" in value && + typeof value.message === "string"; + +const readStringMessage = (value: { readonly message: string }): string => value.message; + +const classifyError = (op: string, model: string | undefined, cause: unknown): StorageFailure => { const driverCause = unwrapDriverCause(cause); if (isUniqueViolation(driverCause)) { - return model !== undefined - ? new UniqueViolationError({ model }) - : new UniqueViolationError({}); + return model !== undefined ? new UniqueViolationError({ model }) : new UniqueViolationError({}); } return new StorageError({ - message: `[storage-drizzle] ${op} failed: ${driverCause instanceof Error ? driverCause.message : String(driverCause)}`, + message: `[storage-drizzle] ${op} failed${ + hasStringMessage(driverCause) ? `: ${readStringMessage(driverCause)}` : "" + }`, cause: driverCause, }); }; @@ -326,8 +328,9 @@ const classifyError = ( // retry transient errors twice with short exponential backoff. Unique // violations and anything else fail fast. export const isTransientStorageError = (err: StorageFailure): boolean => { - if (err._tag !== "StorageError") return false; - const msg = err.message; + if (!Predicate.isTagged(err, "StorageError")) return false; + if (!hasStringMessage(err)) return false; + const msg = readStringMessage(err); return ( msg.includes("Network connection lost") || msg.includes("CONNECTION_CLOSED") || @@ -387,21 +390,16 @@ const withReturning = ( model, )) as Record[]; if (!rows[0]) - return yield* Effect.fail( - new StorageError({ - message: "[storage-drizzle] mysql insert: no row returned", - cause: undefined, - }), - ); + return yield* new StorageError({ + message: "[storage-drizzle] mysql insert: no row returned", + cause: undefined, + }); return rows[0]; } - return yield* Effect.fail( - new StorageError({ - message: - "[storage-drizzle] mysql insert: id not provided, cannot recover row", - cause: undefined, - }), - ); + return yield* new StorageError({ + message: "[storage-drizzle] mysql insert: id not provided, cannot recover row", + cause: undefined, + }); } const rows = (yield* runPromise( "insert returning", @@ -409,12 +407,10 @@ const withReturning = ( model, )) as Record[]; if (!rows[0]) - return yield* Effect.fail( - new StorageError({ - message: "[storage-drizzle] insert returned no rows", - cause: undefined, - }), - ); + return yield* new StorageError({ + message: "[storage-drizzle] insert returned no rows", + cause: undefined, + }); return rows[0]; }); @@ -426,15 +422,18 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { const { db, provider } = options; const fullSchema: Record = db._.fullSchema ?? {}; - const getTable = (model: string): AnyTable => { - const t = fullSchema[model]; - if (!t) - throw new Error( - `[storage-drizzle] unknown model "${model}" — not found in db._.fullSchema. ` + - `Make sure the table is exported from the generated schema and passed to drizzle().`, - ); - return t; - }; + const getTable = (model: string): Effect.Effect => + Effect.gen(function* () { + const t = fullSchema[model]; + if (!t) + return yield* new StorageError({ + message: + `[storage-drizzle] unknown model "${model}" - not found in db._.fullSchema. ` + + `Make sure the table is exported from the generated schema and passed to drizzle().`, + cause: undefined, + }); + return t; + }); const backendAttrs = (model: string) => ({ "executor.storage.backend": "drizzle" as const, @@ -450,23 +449,16 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { data: T; select?: string[] | undefined; }) => - Effect.gen(function* () { - const table = getTable(model); - const builder = db.insert(table).values(data); - const row = yield* withReturning( - db, - provider, - table, - builder, - data, - model, - ); - return rowAs(row); - }).pipe( - Effect.withSpan("executor.storage.backend.create", { - attributes: backendAttrs(model), - }), - ); + Effect.gen(function* () { + const table = yield* getTable(model); + const builder = db.insert(table).values(data); + const row = yield* withReturning(db, provider, table, builder, data, model); + return rowAs(row); + }).pipe( + Effect.withSpan("executor.storage.backend.create", { + attributes: backendAttrs(model), + }), + ); const createMany: CustomAdapter["createMany"] = >({ model, @@ -477,7 +469,7 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { }) => Effect.gen(function* () { if (data.length === 0) return []; - const table = getTable(model); + const table = yield* getTable(model); const CHUNK = 500; const all: Record[] = []; for (let i = 0; i < data.length; i += CHUNK) { @@ -510,8 +502,8 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { join?: JoinConfig | undefined; }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); if (join && db.query && db.query[model]) { const includes = buildIncludes(join); const rows = (yield* runPromise( @@ -529,11 +521,10 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { } let q = db.select().from(table); if (clause) q = q.where(clause); - const rows = (yield* runPromise( - "findOne select", - () => q.limit(1), - model, - )) as Record[]; + const rows = (yield* runPromise("findOne select", () => q.limit(1), model)) as Record< + string, + unknown + >[]; return rows[0] ? rowAs(rows[0]) : null; }).pipe( Effect.withSpan("executor.storage.backend.find_one", { @@ -558,8 +549,8 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { join?: JoinConfig | undefined; }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); if (join && db.query && db.query[model]) { const includes = buildIncludes(join); const opts: Record = { @@ -588,8 +579,7 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { q = q.orderBy(fn(col)); } if (limit !== undefined) q = q.limit(limit); - else if (offset !== undefined && provider === "sqlite") - q = q.limit(Number.MAX_SAFE_INTEGER); + else if (offset !== undefined && provider === "sqlite") q = q.limit(Number.MAX_SAFE_INTEGER); if (offset !== undefined) q = q.offset(offset); const rows = (yield* runPromise( "findMany select", @@ -603,14 +593,18 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { }), ); - const updateOne: CustomAdapter["update"] = ({ model, where, update }: { + const updateOne: CustomAdapter["update"] = ({ + model, + where, + update, + }: { model: string; where: CleanedWhere[]; update: T; }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); let findQ = db.select().from(table); if (clause) findQ = findQ.where(clause); const matched = (yield* runPromise( @@ -631,11 +625,7 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { )) as Record[]; return rows[0] ? rowAs(rows[0]) : null; } - yield* runPromise( - "mysql update execute", - () => updQ.execute(), - model, - ); + yield* runPromise("mysql update execute", () => updQ.execute(), model); const reread = (yield* runPromise( "mysql update reread", () => db.select().from(table).where(identity).limit(1), @@ -667,15 +657,13 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { count: ({ model, where }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); let q = db.select({ c: count() }).from(table); if (clause) q = q.where(clause); - const rows = (yield* runPromise( - "count select", - () => Promise.resolve(q), - model, - )) as { c: number | string | bigint }[]; + const rows = (yield* runPromise("count select", () => Promise.resolve(q), model)) as { + c: number | string | bigint; + }[]; const raw = rows[0]?.c ?? 0; return typeof raw === "number" ? raw : Number(raw); }).pipe( @@ -688,8 +676,8 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { updateMany: ({ model, where, update }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); // Count first for the return value (sqlite's .run returns changes // but we don't want to rely on that in the generic path) let countQ = db.select({ c: count() }).from(table); @@ -703,11 +691,7 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { if (n === 0) return 0; let updQ = db.update(table).set(update); if (clause) updQ = updQ.where(clause); - yield* runPromise( - "updateMany execute", - () => Promise.resolve(updQ), - model, - ); + yield* runPromise("updateMany execute", () => Promise.resolve(updQ), model); return n; }).pipe( Effect.withSpan("executor.storage.backend.update_many", { @@ -717,8 +701,8 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { delete: ({ model, where }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); // Mirror in-memory semantics: delete first matching row only let findQ = db.select().from(table); if (clause) findQ = findQ.where(clause); @@ -742,8 +726,8 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { deleteMany: ({ model, where }) => Effect.gen(function* () { - const table = getTable(model); - const clause = compileWhere(table, where, provider); + const table = yield* getTable(model); + const clause = yield* compileWhere(table, where, provider); let countQ = db.select({ c: count() }).from(table); if (clause) countQ = countQ.where(clause); const rows = (yield* runPromise( @@ -755,11 +739,7 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { if (n === 0) return 0; let delQ = db.delete(table); if (clause) delQ = delQ.where(clause); - yield* runPromise( - "deleteMany exec", - () => Promise.resolve(delQ), - model, - ); + yield* runPromise("deleteMany exec", () => Promise.resolve(delQ), model); return n; }).pipe( Effect.withSpan("executor.storage.backend.delete_many", { @@ -783,11 +763,9 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { // mysql: same raw-statement path as sqlite, untested in-tree. const txFn: DBAdapterFactoryConfig["transaction"] = options.supportsTransaction ? ( - cb: (trx: Parameters[0] extends ( - t: infer T, - ) => unknown - ? T - : never) => Effect.Effect, + cb: ( + trx: Parameters[0] extends (t: infer T) => unknown ? T : never, + ) => Effect.Effect, ) => { if (provider === "pg") { // Wrap drizzle's real transaction. The nested adapter runs @@ -810,6 +788,7 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { supportsTransaction: false, }) as TxShape; const exit = await Effect.runPromise(Effect.result(cb(nested))); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: drizzle transaction callbacks require throwing to trigger rollback if (Result.isFailure(exit)) throw new TxFailure(exit.failure); return exit.success; }), @@ -837,18 +816,23 @@ export const drizzleAdapter = (options: DrizzleAdapterOptions): DBAdapter => { ? dbAny.execute.bind(dbAny) : undefined; const runStmt = (stmt: string) => - Effect.try({ - try: () => { - if (!runner) { - throw new Error("drizzle db has neither run() nor execute()"); - } - const res = runner(sql.raw(stmt)); - if (res && typeof (res as { then?: unknown }).then === "function") { - return res as Promise; - } - return res; - }, - catch: (cause) => classifyError(stmt, undefined, cause), + Effect.gen(function* () { + if (!runner) { + return yield* new StorageError({ + message: "drizzle db has neither run() nor execute()", + cause: undefined, + }); + } + return yield* Effect.try({ + try: () => { + const res = runner(sql.raw(stmt)); + if (res && typeof (res as { then?: unknown }).then === "function") { + return res as Promise; + } + return res; + }, + catch: (cause) => classifyError(stmt, undefined, cause), + }); }); const maybePromise = yield* runStmt("BEGIN"); if (maybePromise && typeof (maybePromise as { then?: unknown }).then === "function") { From 110adf1b3e62cb0cfca0d310d4252186211e7c11 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:09:27 -0700 Subject: [PATCH 063/108] Parse GraphQL store rows with Schema --- .../plugins/graphql/src/sdk/plugin.test.ts | 282 +++++++++--------- packages/plugins/graphql/src/sdk/store.ts | 211 ++++++------- 2 files changed, 223 insertions(+), 270 deletions(-) diff --git a/packages/plugins/graphql/src/sdk/plugin.test.ts b/packages/plugins/graphql/src/sdk/plugin.test.ts index a42f1e2dc..4a55c13eb 100644 --- a/packages/plugins/graphql/src/sdk/plugin.test.ts +++ b/packages/plugins/graphql/src/sdk/plugin.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "@effect/vitest"; -import { Effect } from "effect"; +import { Data, Effect } from "effect"; import { ConnectionId, @@ -19,6 +19,8 @@ import type { IntrospectionResult } from "./introspect"; const TEST_SCOPE = "test-scope"; +class TestServerAddressError extends Data.TaggedError("TestServerAddressError")<{}> {} + // --------------------------------------------------------------------------- // Mock introspection response // --------------------------------------------------------------------------- @@ -95,14 +97,12 @@ const makeMemorySecretsPlugin = () => { const provider: SecretProvider = { key: "memory", writable: true, - get: (id, scope) => - Effect.sync(() => store.get(`${scope}\u0000${id}`) ?? null), + get: (id, scope) => Effect.sync(() => store.get(`${scope}\u0000${id}`) ?? null), set: (id, value, scope) => Effect.sync(() => { store.set(`${scope}\u0000${id}`, value); }), - delete: (id, scope) => - Effect.sync(() => store.delete(`${scope}\u0000${id}`)), + delete: (id, scope) => Effect.sync(() => store.delete(`${scope}\u0000${id}`)), list: () => Effect.sync(() => Array.from(store.keys()).map((key) => { @@ -144,9 +144,7 @@ describe("graphqlPlugin", () => { const queryTool = tools.find((t) => t.id === "test_api.query.hello"); expect(queryTool?.description).toBe("Say hello"); - const mutationTool = tools.find( - (t) => t.id === "test_api.mutation.setGreeting", - ); + const mutationTool = tools.find((t) => t.id === "test_api.mutation.setGreeting"); expect(mutationTool?.description).toBe("Set greeting message"); }), ); @@ -218,14 +216,10 @@ describe("graphqlPlugin", () => { }); const tools = yield* executor.tools.list(); - const mutationTool = tools.find( - (t) => t.id === "approval_test.mutation.setGreeting", - ); + const mutationTool = tools.find((t) => t.id === "approval_test.mutation.setGreeting"); expect(mutationTool).toBeDefined(); expect(mutationTool!.annotations?.requiresApproval).toBe(true); - expect(mutationTool!.annotations?.approvalDescription).toBe( - "mutation setGreeting", - ); + expect(mutationTool!.annotations?.approvalDescription).toBe("mutation setGreeting"); const queryTool = tools.find((t) => t.id === "approval_test.query.hello"); expect(queryTool).toBeDefined(); @@ -233,108 +227,111 @@ describe("graphqlPlugin", () => { }), ); - it.effect( - "updateSource patches endpoint/headers without re-registering", - () => - Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ plugins: [graphqlPlugin()] as const }), - ); + it.effect("updateSource patches endpoint/headers without re-registering", () => + Effect.gen(function* () { + const executor = yield* createExecutor( + makeTestConfig({ plugins: [graphqlPlugin()] as const }), + ); - yield* executor.graphql.addSource({ - endpoint: "http://localhost:4000/graphql", - scope: "test-scope", - introspectionJson, - namespace: "patched", - }); - - yield* executor.graphql.updateSource("patched", TEST_SCOPE, { - endpoint: "http://localhost:5000/graphql", - headers: { "x-custom": "abc" }, - }); - - const source = yield* executor.graphql.getSource("patched", TEST_SCOPE); - expect(source?.endpoint).toBe("http://localhost:5000/graphql"); - expect(source?.headers).toEqual({ "x-custom": "abc" }); - - // Tools still present (no re-register happened, but they were - // already there from addSource and haven't been removed). - const tools = yield* executor.tools.list(); - expect(tools.filter((t) => t.sourceId === "patched").length).toBe(2); - }), + yield* executor.graphql.addSource({ + endpoint: "http://localhost:4000/graphql", + scope: "test-scope", + introspectionJson, + namespace: "patched", + }); + + yield* executor.graphql.updateSource("patched", TEST_SCOPE, { + endpoint: "http://localhost:5000/graphql", + headers: { "x-custom": "abc" }, + }); + + const source = yield* executor.graphql.getSource("patched", TEST_SCOPE); + expect(source?.endpoint).toBe("http://localhost:5000/graphql"); + expect(source?.headers).toEqual({ "x-custom": "abc" }); + + // Tools still present (no re-register happened, but they were + // already there from addSource and haven't been removed). + const tools = yield* executor.tools.list(); + expect(tools.filter((t) => t.sourceId === "patched").length).toBe(2); + }), ); it.effect("invokes OAuth-backed sources with a bearer token", () => Effect.gen(function* () { const http = yield* Effect.promise(() => import("node:http")); let authorizationHeader: string | null = null; - const server = http.createServer((req, res) => { - authorizationHeader = req.headers.authorization ?? null; - res.setHeader("content-type", "application/json"); - res.end(JSON.stringify({ data: { hello: "Hello Ada" } })); - }); - yield* Effect.callback((resume) => { - server.listen(0, "127.0.0.1", () => resume(Effect.void)); - server.once("error", (error) => resume(Effect.fail(error))); - }); + const server = yield* Effect.acquireRelease( + Effect.callback< + { + readonly server: ReturnType; + readonly port: number; + }, + Error | TestServerAddressError + >((resume) => { + const server = http.createServer((req, res) => { + authorizationHeader = req.headers.authorization ?? null; + res.setHeader("content-type", "application/json"); + res.end(JSON.stringify({ data: { hello: "Hello Ada" } })); + }); + server.listen(0, "127.0.0.1", () => { + const address = server.address(); + if (!address || typeof address === "string") { + resume(Effect.fail(new TestServerAddressError())); + return; + } + resume(Effect.succeed({ server, port: address.port })); + }); + server.once("error", (error) => resume(Effect.fail(error))); + }), + ({ server }) => + Effect.callback((resume) => { + server.close(() => resume(Effect.void)); + }).pipe(Effect.ignore), + ); - try { - const address = server.address(); - if (!address || typeof address === "string") { - throw new Error("Expected TCP test server address"); - } + const executor = yield* createExecutor( + makeTestConfig({ + plugins: [makeMemorySecretsPlugin()(), graphqlPlugin()] as const, + }), + ); - const executor = yield* createExecutor( - makeTestConfig({ - plugins: [makeMemorySecretsPlugin()(), graphqlPlugin()] as const, - }), - ); - - const connectionId = ConnectionId.make("graphql-oauth2-test"); - yield* executor.connections.create( - new CreateConnectionInput({ - id: connectionId, - scope: ScopeId.make(TEST_SCOPE), - provider: "oauth2", - identityLabel: "GraphQL Test", - accessToken: new TokenMaterial({ - secretId: SecretId.make(`${connectionId}.access_token`), - name: "GraphQL Access Token", - value: "secret-token", - }), - refreshToken: null, - expiresAt: null, - oauthScope: null, - providerState: null, + const connectionId = ConnectionId.make("graphql-oauth2-test"); + yield* executor.connections.create( + new CreateConnectionInput({ + id: connectionId, + scope: ScopeId.make(TEST_SCOPE), + provider: "oauth2", + identityLabel: "GraphQL Test", + accessToken: new TokenMaterial({ + secretId: SecretId.make(`${connectionId}.access_token`), + name: "GraphQL Access Token", + value: "secret-token", }), - ); + refreshToken: null, + expiresAt: null, + oauthScope: null, + providerState: null, + }), + ); - yield* executor.graphql.addSource({ - endpoint: `http://127.0.0.1:${address.port}/graphql`, - scope: TEST_SCOPE, - introspectionJson, - namespace: "oauth_graph", - auth: { kind: "oauth2", connectionId }, - }); - - const result = yield* executor.tools.invoke("oauth_graph.query.hello", { - name: "Ada", - }); - - expect(result).toEqual({ - status: 200, - data: { hello: "Hello Ada" }, - errors: null, - }); - expect(authorizationHeader).toBe("Bearer secret-token"); - } finally { - yield* Effect.promise( - () => - new Promise((resolve, reject) => { - server.close((error) => (error ? reject(error) : resolve())); - }), - ); - } + yield* executor.graphql.addSource({ + endpoint: `http://127.0.0.1:${server.port}/graphql`, + scope: TEST_SCOPE, + introspectionJson, + namespace: "oauth_graph", + auth: { kind: "oauth2", connectionId }, + }); + + const result = yield* executor.tools.invoke("oauth_graph.query.hello", { + name: "Ada", + }); + + expect(result).toEqual({ + status: 200, + data: { hello: "Hello Ada" }, + errors: null, + }); + expect(authorizationHeader).toBe("Bearer secret-token"); }), ); @@ -367,12 +364,20 @@ describe("graphqlPlugin", () => { // scenario is reproducible against the pre-fix store. // ------------------------------------------------------------------------- - const ORG_SCOPE = ScopeId.make("org-scope"); - const USER_SCOPE = ScopeId.make("user-scope"); + const ORG_SCOPE = "org-scope"; + const USER_SCOPE = "user-scope"; const stackedScopes = [ - new Scope({ id: USER_SCOPE, name: "user", createdAt: new Date() }), - new Scope({ id: ORG_SCOPE, name: "org", createdAt: new Date() }), + new Scope({ + id: ScopeId.make(USER_SCOPE), + name: "user", + createdAt: new Date(), + }), + new Scope({ + id: ScopeId.make(ORG_SCOPE), + name: "org", + createdAt: new Date(), + }), ] as const; it.effect("shadowed addSource does not wipe the outer-scope source", () => @@ -387,7 +392,7 @@ describe("graphqlPlugin", () => { // Org-level base source yield* executor.graphql.addSource({ endpoint: "http://org.example.com/graphql", - scope: ORG_SCOPE as string, + scope: ORG_SCOPE, introspectionJson, namespace: "shared", name: "Org Source", @@ -396,28 +401,22 @@ describe("graphqlPlugin", () => { // Per-user shadow with the same namespace yield* executor.graphql.addSource({ endpoint: "http://user.example.com/graphql", - scope: USER_SCOPE as string, + scope: USER_SCOPE, introspectionJson, namespace: "shared", name: "User Source", }); - const userView = yield* executor.graphql.getSource( - "shared", - USER_SCOPE as string, - ); - const orgView = yield* executor.graphql.getSource( - "shared", - ORG_SCOPE as string, - ); + const userView = yield* executor.graphql.getSource("shared", USER_SCOPE); + const orgView = yield* executor.graphql.getSource("shared", ORG_SCOPE); // Both rows must coexist — innermost-wins reads come from the // executor; the store's scope-pinned getters return the exact row. expect(userView?.name).toBe("User Source"); - expect(userView?.scope).toBe(USER_SCOPE as string); + expect(userView?.scope).toBe(USER_SCOPE); expect(userView?.endpoint).toBe("http://user.example.com/graphql"); expect(orgView?.name).toBe("Org Source"); - expect(orgView?.scope).toBe(ORG_SCOPE as string); + expect(orgView?.scope).toBe(ORG_SCOPE); expect(orgView?.endpoint).toBe("http://org.example.com/graphql"); }), ); @@ -433,29 +432,23 @@ describe("graphqlPlugin", () => { yield* executor.graphql.addSource({ endpoint: "http://org.example.com/graphql", - scope: ORG_SCOPE as string, + scope: ORG_SCOPE, introspectionJson, namespace: "shared", name: "Org Source", }); yield* executor.graphql.addSource({ endpoint: "http://user.example.com/graphql", - scope: USER_SCOPE as string, + scope: USER_SCOPE, introspectionJson, namespace: "shared", name: "User Source", }); - yield* executor.graphql.removeSource("shared", USER_SCOPE as string); + yield* executor.graphql.removeSource("shared", USER_SCOPE); - const userView = yield* executor.graphql.getSource( - "shared", - USER_SCOPE as string, - ); - const orgView = yield* executor.graphql.getSource( - "shared", - ORG_SCOPE as string, - ); + const userView = yield* executor.graphql.getSource("shared", USER_SCOPE); + const orgView = yield* executor.graphql.getSource("shared", ORG_SCOPE); expect(userView).toBeNull(); expect(orgView?.name).toBe("Org Source"); @@ -474,32 +467,26 @@ describe("graphqlPlugin", () => { yield* executor.graphql.addSource({ endpoint: "http://org.example.com/graphql", - scope: ORG_SCOPE as string, + scope: ORG_SCOPE, introspectionJson, namespace: "shared", name: "Org Source", }); yield* executor.graphql.addSource({ endpoint: "http://user.example.com/graphql", - scope: USER_SCOPE as string, + scope: USER_SCOPE, introspectionJson, namespace: "shared", name: "User Source", }); - yield* executor.graphql.updateSource("shared", USER_SCOPE as string, { + yield* executor.graphql.updateSource("shared", USER_SCOPE, { name: "User Renamed", endpoint: "http://user-new.example.com/graphql", }); - const userView = yield* executor.graphql.getSource( - "shared", - USER_SCOPE as string, - ); - const orgView = yield* executor.graphql.getSource( - "shared", - ORG_SCOPE as string, - ); + const userView = yield* executor.graphql.getSource("shared", USER_SCOPE); + const orgView = yield* executor.graphql.getSource("shared", ORG_SCOPE); expect(userView?.name).toBe("User Renamed"); expect(userView?.endpoint).toBe("http://user-new.example.com/graphql"); @@ -536,7 +523,9 @@ describe("graphqlPlugin", () => { scope: TEST_SCOPE, introspectionJson, namespace: "with_secret", - headers: { Authorization: { secretId: "api-key", prefix: "Bearer " } }, + headers: { + Authorization: { secretId: "api-key", prefix: "Bearer " }, + }, queryParams: { token: { secretId: "api-key" } }, }); @@ -575,9 +564,10 @@ describe("graphqlPlugin", () => { }); const result = yield* executor.secrets.remove(SecretId.make("locked")).pipe( - Effect.flip, + Effect.as("removed"), + Effect.catchTag("SecretInUseError", () => Effect.succeed("SecretInUseError" as const)), ); - expect((result as { _tag: string })._tag).toBe("SecretInUseError"); + expect(result).toBe("SecretInUseError"); // After detaching the source, remove succeeds. yield* executor.graphql.removeSource("ref", TEST_SCOPE); diff --git a/packages/plugins/graphql/src/sdk/store.ts b/packages/plugins/graphql/src/sdk/store.ts index 55ac34a55..241c75582 100644 --- a/packages/plugins/graphql/src/sdk/store.ts +++ b/packages/plugins/graphql/src/sdk/store.ts @@ -1,10 +1,6 @@ -import { Effect } from "effect"; +import { Effect, Schema } from "effect"; -import { - defineSchema, - type StorageDeps, - type StorageFailure, -} from "@executor-js/sdk/core"; +import { defineSchema, type StorageDeps, type StorageFailure } from "@executor-js/sdk/core"; import { OperationBinding, @@ -114,53 +110,63 @@ export interface StoredOperation { readonly binding: OperationBinding; } -// Persisted JSON shape for an OperationBinding. Reconstructed into a -// Schema.Class instance on read. -interface BindingJson { - readonly kind: "query" | "mutation"; - readonly fieldName: string; - readonly operationString: string; - readonly variableNames: readonly string[]; -} - const decodeBinding = (value: unknown): OperationBinding => { - const data = - typeof value === "string" - ? (JSON.parse(value) as BindingJson) - : (value as BindingJson); - return new OperationBinding({ - kind: data.kind, - fieldName: data.fieldName, - operationString: data.operationString, - variableNames: [...data.variableNames], - }); + if (typeof value === "string") { + return Schema.decodeUnknownSync(Schema.fromJsonString(OperationBinding))(value); + } + return Schema.decodeUnknownSync(OperationBinding)(value); }; -const encodeBinding = (binding: OperationBinding): BindingJson => ({ - kind: binding.kind, - fieldName: binding.fieldName, - operationString: binding.operationString, - variableNames: [...binding.variableNames], +const encodeBinding = Schema.encodeSync(OperationBinding); + +const toJsonRecord = (value: unknown): Record => value as Record; + +const SourceRow = Schema.Struct({ + id: Schema.String, + scope_id: Schema.String, + name: Schema.String, + endpoint: Schema.String, + auth_kind: Schema.Literals(["none", "oauth2"]), + auth_connection_id: Schema.NullOr(Schema.String).pipe(Schema.optionalKey), +}); + +const ChildValueRow = Schema.Struct({ + name: Schema.String, + kind: Schema.Literals(["text", "secret"]), + text_value: Schema.NullOr(Schema.String).pipe(Schema.optionalKey), + secret_id: Schema.NullOr(Schema.String).pipe(Schema.optionalKey), + secret_prefix: Schema.NullOr(Schema.String).pipe(Schema.optionalKey), +}); + +const OperationRow = Schema.Struct({ + id: Schema.String, + source_id: Schema.String, + binding: Schema.Unknown, +}); + +const ChildUsageRowSchema = Schema.Struct({ + source_id: Schema.String, + scope_id: Schema.String, + name: Schema.String, }); -const toJsonRecord = (value: unknown): Record => - value as Record; +const decodeSourceRow = Schema.decodeUnknownSync(SourceRow); +const decodeChildValueRow = Schema.decodeUnknownSync(ChildValueRow); +const decodeOperationRow = Schema.decodeUnknownSync(OperationRow); +const decodeChildUsageRow = Schema.decodeUnknownSync(ChildUsageRowSchema); // Header / query-param rows: collapse the flat columns back into a // `SecretBackedValue` map keyed by header name. `kind` discriminates the // shape; `secret_prefix` is optional and only populated when present in // the original config. -const rowsToValueMap = ( - rows: readonly Record[], -): Record => { +const rowsToValueMap = (rows: readonly Record[]): Record => { const out: Record = {}; - for (const row of rows) { - const name = row.name as string; + for (const rawRow of rows) { + const row = decodeChildValueRow(rawRow); + const name = row.name; if (row.kind === "secret" && typeof row.secret_id === "string") { - const prefix = row.secret_prefix as string | undefined | null; - out[name] = prefix - ? { secretId: row.secret_id, prefix } - : { secretId: row.secret_id }; + const prefix = row.secret_prefix; + out[name] = prefix ? { secretId: row.secret_id, prefix } : { secretId: row.secret_id }; } else if (row.kind === "text" && typeof row.text_value === "string") { out[name] = row.text_value; } @@ -201,11 +207,8 @@ const valueToChildRow = ( }; }; -const rowToAuth = (row: Record): GraphqlSourceAuth => { - if ( - row.auth_kind === "oauth2" && - typeof row.auth_connection_id === "string" - ) { +const rowToAuth = (row: typeof SourceRow.Type): GraphqlSourceAuth => { + if (row.auth_kind === "oauth2" && typeof row.auth_connection_id === "string") { return { kind: "oauth2", connectionId: row.auth_connection_id }; } return { kind: "none" }; @@ -229,11 +232,7 @@ const rowToAuth = (row: Record): GraphqlSourceAuth => { /** Flat row shape returned by the usage-lookup helpers. Mirrors the new * child-table columns so callers can build a `Usage` without * re-decoding. */ -export interface ChildUsageRow { - readonly source_id: string; - readonly scope_id: string; - readonly name: string; -} +export type ChildUsageRow = typeof ChildUsageRowSchema.Type; export interface GraphqlStore { readonly upsertSource: ( @@ -258,10 +257,7 @@ export interface GraphqlStore { scope: string, ) => Effect.Effect; - readonly listSources: () => Effect.Effect< - readonly StoredGraphqlSource[], - StorageFailure - >; + readonly listSources: () => Effect.Effect; readonly getOperationByToolId: ( toolId: string, @@ -273,10 +269,7 @@ export interface GraphqlStore { scope: string, ) => Effect.Effect; - readonly removeSource: ( - namespace: string, - scope: string, - ) => Effect.Effect; + readonly removeSource: (namespace: string, scope: string) => Effect.Effect; // --------------------------------------------------------------------- // Usage lookups — power `usagesForSecret` / `usagesForConnection`. @@ -340,26 +333,30 @@ export const makeDefaultGraphqlStore = ({ row: Record, ): Effect.Effect => Effect.gen(function* () { - const sourceId = row.id as string; - const scope = row.scope_id as string; + const source = decodeSourceRow(row); + const sourceId = source.id; + const scope = source.scope_id; const headers = yield* loadHeaders(sourceId, scope); const queryParams = yield* loadQueryParams(sourceId, scope); return { namespace: sourceId, scope, - name: row.name as string, - endpoint: row.endpoint as string, + name: source.name, + endpoint: source.endpoint, headers, queryParams, - auth: rowToAuth(row), + auth: rowToAuth(source), }; }); - const rowToOperation = (row: Record): StoredOperation => ({ - toolId: row.id as string, - sourceId: row.source_id as string, - binding: decodeBinding(row.binding), - }); + const rowToOperation = (row: Record): StoredOperation => { + const operation = decodeOperationRow(row); + return { + toolId: operation.id, + sourceId: operation.source_id, + binding: decodeBinding(operation.binding), + }; + }; // Replace child rows for a source by deleting then bulk-inserting. Used // by both upsertSource (full rewrite) and updateSourceMeta (partial @@ -382,9 +379,7 @@ export const makeDefaultGraphqlStore = ({ if (entries.length === 0) return; yield* db.createMany({ model, - data: entries.map(([name, value]) => - valueToChildRow(sourceId, scope, name, value), - ), + data: entries.map(([name, value]) => valueToChildRow(sourceId, scope, name, value)), forceAllowId: true, }); }); @@ -433,8 +428,7 @@ export const makeDefaultGraphqlStore = ({ name: input.name, endpoint: input.endpoint, auth_kind: input.auth.kind, - auth_connection_id: - input.auth.kind === "oauth2" ? input.auth.connectionId : undefined, + auth_connection_id: input.auth.kind === "oauth2" ? input.auth.connectionId : undefined, }, forceAllowId: true, }); @@ -479,8 +473,7 @@ export const makeDefaultGraphqlStore = ({ if (patch.endpoint !== undefined) update.endpoint = patch.endpoint; if (patch.auth !== undefined) { update.auth_kind = patch.auth.kind; - update.auth_connection_id = - patch.auth.kind === "oauth2" ? patch.auth.connectionId : null; + update.auth_connection_id = patch.auth.kind === "oauth2" ? patch.auth.connectionId : null; } if (Object.keys(update).length > 0) { yield* db.update({ @@ -493,20 +486,10 @@ export const makeDefaultGraphqlStore = ({ }); } if (patch.headers !== undefined) { - yield* replaceChildren( - "graphql_source_header", - namespace, - scope, - patch.headers, - ); + yield* replaceChildren("graphql_source_header", namespace, scope, patch.headers); } if (patch.queryParams !== undefined) { - yield* replaceChildren( - "graphql_source_query_param", - namespace, - scope, - patch.queryParams, - ); + yield* replaceChildren("graphql_source_query_param", namespace, scope, patch.queryParams); } }), @@ -561,17 +544,7 @@ export const makeDefaultGraphqlStore = ({ model: "graphql_source_header", where: [{ field: "secret_id", value: secretId }], }) - .pipe( - Effect.map((rows) => - rows.map( - (r): ChildUsageRow => ({ - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, - }), - ), - ), - ), + .pipe(Effect.map((rows) => rows.map((row): ChildUsageRow => decodeChildUsageRow(row)))), findQueryParamRowsBySecret: (secretId) => db @@ -579,41 +552,30 @@ export const makeDefaultGraphqlStore = ({ model: "graphql_source_query_param", where: [{ field: "secret_id", value: secretId }], }) - .pipe( - Effect.map((rows) => - rows.map( - (r): ChildUsageRow => ({ - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, - }), - ), - ), - ), + .pipe(Effect.map((rows) => rows.map((row): ChildUsageRow => decodeChildUsageRow(row)))), findSourcesByConnection: (connectionId) => Effect.gen(function* () { const rows = yield* db.findMany({ model: "graphql_source", - where: [ - { field: "auth_connection_id", value: connectionId }, - ], + where: [{ field: "auth_connection_id", value: connectionId }], }); // Skip the children load — usage callers only need the parent // row's name + scope. Synthesize a minimal StoredGraphqlSource // shape with empty headers/params so the type matches without // a wasted child fetch. - return rows.map( - (row): StoredGraphqlSource => ({ - namespace: row.id as string, - scope: row.scope_id as string, - name: row.name as string, - endpoint: row.endpoint as string, + return rows.map((rawRow): StoredGraphqlSource => { + const row = decodeSourceRow(rawRow); + return { + namespace: row.id, + scope: row.scope_id, + name: row.name, + endpoint: row.endpoint, headers: {}, queryParams: {}, auth: rowToAuth(row), - }), - ); + }; + }); }), lookupSourceNames: (keys) => @@ -626,9 +588,10 @@ export const makeDefaultGraphqlStore = ({ const rows = yield* db.findMany({ model: "graphql_source" }); const requested = new Set(keys); const out = new Map(); - for (const r of rows) { - const key = `${r.scope_id as string}:${r.id as string}`; - if (requested.has(key)) out.set(key, r.name as string); + for (const rawRow of rows) { + const row = decodeSourceRow(rawRow); + const key = `${row.scope_id}:${row.id}`; + if (requested.has(key)) out.set(key, row.name); } return out; }), From de48f2d7da1b018f99c971d0ac1e47d3606cab0e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:09:33 -0700 Subject: [PATCH 064/108] Fix Google Discovery SDK test boundaries --- .../google-discovery/src/sdk/plugin.test.ts | 890 +++++++++--------- 1 file changed, 421 insertions(+), 469 deletions(-) diff --git a/packages/plugins/google-discovery/src/sdk/plugin.test.ts b/packages/plugins/google-discovery/src/sdk/plugin.test.ts index 56edde038..fa4adce05 100644 --- a/packages/plugins/google-discovery/src/sdk/plugin.test.ts +++ b/packages/plugins/google-discovery/src/sdk/plugin.test.ts @@ -3,7 +3,7 @@ import { readFileSync } from "node:fs"; import { resolve } from "node:path"; import { describe, expect, it, vi } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Schema } from "effect"; import { ConnectionId, @@ -24,6 +24,10 @@ const autoApprove: InvokeOptions = { onElicitation: "accept-all" }; const fixturePath = resolve(__dirname, "../../fixtures/drive.json"); const fixtureText = readFileSync(fixturePath, "utf8"); +const DiscoveryFixtureJson = Schema.Record(Schema.String, Schema.Unknown); +const fixtureJson = Schema.decodeUnknownSync(Schema.fromJsonString(DiscoveryFixtureJson))( + fixtureText, +); // --------------------------------------------------------------------------- // Test HTTP server — serves the discovery document and echoes API calls. @@ -68,7 +72,7 @@ const startServer = (): Promise => return; } const dynamicFixture = JSON.stringify({ - ...JSON.parse(fixtureText), + ...fixtureJson, rootUrl: `http://127.0.0.1:${address.port}/`, }); response.statusCode = 200; @@ -84,11 +88,13 @@ const startServer = (): Promise => server.listen(0, "127.0.0.1", (error?: Error) => { if (error) { + // oxlint-disable-next-line executor/no-promise-reject -- boundary: node listen callback reports startup failure through Promise adapter rejectPromise(error); return; } const address = server.address(); if (!address || typeof address === "string") { + // oxlint-disable-next-line executor/no-promise-reject, executor/no-error-constructor -- boundary: node listen callback reports startup failure through Promise adapter rejectPromise(new Error("Failed to resolve test server address")); return; } @@ -99,12 +105,18 @@ const startServer = (): Promise => requests, close: () => new Promise((resolveClose, rejectClose) => { + // oxlint-disable-next-line executor/no-promise-reject -- boundary: node close callback reports shutdown failure through Promise adapter server.close((err) => (err ? rejectClose(err) : resolveClose())); }), }); }); }); +const TestServer = Effect.acquireRelease( + Effect.promise(() => startServer()), + (handle) => Effect.promise(() => handle.close()).pipe(Effect.ignore), +); + // --------------------------------------------------------------------------- // Memory secret provider plugin — lets the test store secrets with // `executor.secrets.set` / `ctx.secrets.set`. Without this there's no @@ -118,14 +130,12 @@ const makeMemorySecretsPlugin = () => { const provider: SecretProvider = { key: "memory", writable: true, - get: (id, scope) => - Effect.sync(() => store.get(`${scope}\u0000${id}`) ?? null), + get: (id, scope) => Effect.sync(() => store.get(`${scope}\u0000${id}`) ?? null), set: (id, value, scope) => Effect.sync(() => { store.set(`${scope}\u0000${id}`, value); }), - delete: (id, scope) => - Effect.sync(() => store.delete(`${scope}\u0000${id}`)), + delete: (id, scope) => Effect.sync(() => store.delete(`${scope}\u0000${id}`)), list: () => Effect.sync(() => Array.from(store.keys()).map((k) => { @@ -148,47 +158,50 @@ const makeMemorySecretsPlugin = () => { describe("Google Discovery plugin", () => { it.effect("normalizes legacy googleapis discovery urls", () => Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ - plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, - }), + const executor = yield* Effect.acquireRelease( + createExecutor( + makeTestConfig({ + plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, + }), + ), + (executor) => executor.close().pipe(Effect.ignore), ); const originalFetch = globalThis.fetch; - const fetchMock = vi.spyOn(globalThis, "fetch").mockImplementation((( - input: RequestInfo | URL, - init?: RequestInit, - ) => { - const url = - typeof input === "string" - ? input - : input instanceof URL - ? input.toString() - : input.url; - if (url === "https://www.googleapis.com/discovery/v1/apis/drive/v3/rest") { - return Promise.resolve( - new Response(fixtureText, { - status: 200, - headers: { "content-type": "application/json" }, - }), - ); - } - return originalFetch(input, init); - }) as typeof fetch); - - try { - const result = yield* executor.googleDiscovery.probeDiscovery( - "https://drive.googleapis.com/$discovery/rest?version=v3", - ); - expect(result.service).toBe("drive"); - expect(fetchMock).toHaveBeenCalledWith( - "https://www.googleapis.com/discovery/v1/apis/drive/v3/rest", - expect.objectContaining({ signal: expect.any(AbortSignal) }), - ); - } finally { - fetchMock.mockRestore(); - yield* executor.close(); - } + const fetchMock = yield* Effect.acquireRelease( + Effect.sync(() => + vi.spyOn(globalThis, "fetch").mockImplementation((( + input: RequestInfo | URL, + init?: RequestInit, + ) => { + const url = + typeof input === "string" + ? input + : input instanceof URL + ? input.toString() + : input.url; + if (url === "https://www.googleapis.com/discovery/v1/apis/drive/v3/rest") { + return Promise.resolve( + new Response(fixtureText, { + status: 200, + headers: { "content-type": "application/json" }, + }), + ); + } + return originalFetch(input, init); + }) as typeof fetch), + ), + (mock) => Effect.sync(() => mock.mockRestore()), + ); + + const result = yield* executor.googleDiscovery.probeDiscovery( + "https://drive.googleapis.com/$discovery/rest?version=v3", + ); + expect(result.service).toBe("drive"); + expect(fetchMock).toHaveBeenCalledWith( + "https://www.googleapis.com/discovery/v1/apis/drive/v3/rest", + expect.objectContaining({ signal: expect.any(AbortSignal) }), + ); }), ); @@ -199,251 +212,236 @@ describe("Google Discovery plugin", () => { it.effect("starts oauth using caller-supplied discovery scopes", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - - yield* executor.secrets.set( - new SetSecretInput({ - id: SecretId.make("google-client-id"), - scope: "test-scope" as SetSecretInput["scope"], - name: "Google Client ID", - value: "client-123", - }), - ); + ), + (executor) => executor.close().pipe(Effect.ignore), + ); - const connectionId = "google-discovery-oauth2-test-start"; - const result = yield* executor.oauth.start({ - endpoint: handle.discoveryUrl, - redirectUrl: "http://localhost/callback", - connectionId, - tokenScope: "test-scope", - strategy: { - kind: "authorization-code", - authorizationEndpoint: "https://accounts.google.com/o/oauth2/v2/auth", - tokenEndpoint: "https://oauth2.googleapis.com/token", - clientIdSecretId: "google-client-id", - clientSecretSecretId: null, - scopes: ["https://www.googleapis.com/auth/drive"], - extraAuthorizationParams: { - access_type: "offline", - include_granted_scopes: "true", - prompt: "consent", - }, + yield* executor.secrets.set( + new SetSecretInput({ + id: SecretId.make("google-client-id"), + scope: "test-scope" as SetSecretInput["scope"], + name: "Google Client ID", + value: "client-123", + }), + ); + + const connectionId = "google-discovery-oauth2-test-start"; + const result = yield* executor.oauth.start({ + endpoint: handle.discoveryUrl, + redirectUrl: "http://localhost/callback", + connectionId, + tokenScope: "test-scope", + strategy: { + kind: "authorization-code", + authorizationEndpoint: "https://accounts.google.com/o/oauth2/v2/auth", + tokenEndpoint: "https://oauth2.googleapis.com/token", + clientIdSecretId: "google-client-id", + clientSecretSecretId: null, + scopes: ["https://www.googleapis.com/auth/drive"], + extraAuthorizationParams: { + access_type: "offline", + include_granted_scopes: "true", + prompt: "consent", }, - pluginId: "google-discovery", - }); + }, + pluginId: "google-discovery", + }); - if (result.authorizationUrl === null) { - throw new Error("expected an authorization URL for authorization-code"); - } - const authorizationUrl = new URL(result.authorizationUrl); - expect(authorizationUrl.searchParams.get("client_id")).toBe("client-123"); - expect(authorizationUrl.searchParams.get("access_type")).toBe("offline"); - expect(authorizationUrl.searchParams.get("prompt")).toBe("consent"); - expect(authorizationUrl.searchParams.get("scope")).toBe( - "https://www.googleapis.com/auth/drive", - ); - - yield* executor.close(); - } finally { - yield* Effect.promise(() => handle.close()); - } + expect(result.authorizationUrl).not.toBeNull(); + const authorizationUrl = new URL(result.authorizationUrl ?? "about:blank"); + expect(authorizationUrl.searchParams.get("client_id")).toBe("client-123"); + expect(authorizationUrl.searchParams.get("access_type")).toBe("offline"); + expect(authorizationUrl.searchParams.get("prompt")).toBe("consent"); + expect(authorizationUrl.searchParams.get("scope")).toBe( + "https://www.googleapis.com/auth/drive", + ); }), ); it.effect("completes oauth and stores token secrets on a connection", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - - yield* executor.secrets.set( - new SetSecretInput({ - id: SecretId.make("google-client-id"), - scope: "test-scope" as SetSecretInput["scope"], - name: "Google Client ID", - value: "client-123", - }), - ); - yield* executor.secrets.set( - new SetSecretInput({ - id: SecretId.make("google-client-secret"), - scope: "test-scope" as SetSecretInput["scope"], - name: "Google Client Secret", - value: "client-secret-value", - }), - ); - - const originalFetch = globalThis.fetch; - let tokenRequestInit: RequestInit | undefined; - const fetchMock = vi.spyOn(globalThis, "fetch").mockImplementation((( - input: RequestInfo | URL, - init?: RequestInit, - ) => { - const url = - typeof input === "string" - ? input - : input instanceof URL - ? input.toString() - : input.url; - if (url === "https://oauth2.googleapis.com/token") { - tokenRequestInit = init; - return Promise.resolve( - new Response( - JSON.stringify({ - access_token: "access-token-value", - refresh_token: "refresh-token-value", - token_type: "Bearer", - expires_in: 3600, - scope: "https://www.googleapis.com/auth/drive", - }), - { status: 200, headers: { "content-type": "application/json" } }, - ), - ); - } - return originalFetch(input, init); - }) as typeof fetch); - - try { - const connectionId = "google-discovery-oauth2-test-complete"; - const started = yield* executor.oauth.start({ - endpoint: handle.discoveryUrl, - redirectUrl: "http://localhost/callback", - connectionId, - tokenScope: "test-scope", - strategy: { - kind: "authorization-code", - authorizationEndpoint: "https://accounts.google.com/o/oauth2/v2/auth", - tokenEndpoint: "https://oauth2.googleapis.com/token", - clientIdSecretId: "google-client-id", - clientSecretSecretId: "google-client-secret", - scopes: ["https://www.googleapis.com/auth/drive"], - extraAuthorizationParams: { - access_type: "offline", - include_granted_scopes: "true", - prompt: "consent", - }, - }, - pluginId: "google-discovery", - }); - - const completed = yield* executor.oauth.complete({ - state: started.sessionId, - code: "code-123", - }); - - expect(completed.connectionId).toBe(connectionId); - expect(tokenRequestInit?.method).toBe("POST"); - - // Tokens live on the SDK connection — resolving via - // ctx.connections.accessToken returns the minted value. - const accessToken = yield* executor.connections.accessToken( - completed.connectionId as Parameters[0], - ); - expect(accessToken).toBe("access-token-value"); - - // Backing access-token secret is owned by the connection, so - // it's filtered out of the user-facing secret list. - const secretIds = new Set( - (yield* executor.secrets.list()).map((s) => String(s.id)), - ); - expect(secretIds).not.toContain(`${completed.connectionId}.access_token`); - expect(secretIds).not.toContain(`${completed.connectionId}.refresh_token`); - } finally { - fetchMock.mockRestore(); - yield* executor.close(); - } - } finally { - yield* Effect.promise(() => handle.close()); - } + ), + (executor) => executor.close().pipe(Effect.ignore), + ); + + yield* executor.secrets.set( + new SetSecretInput({ + id: SecretId.make("google-client-id"), + scope: "test-scope" as SetSecretInput["scope"], + name: "Google Client ID", + value: "client-123", + }), + ); + yield* executor.secrets.set( + new SetSecretInput({ + id: SecretId.make("google-client-secret"), + scope: "test-scope" as SetSecretInput["scope"], + name: "Google Client Secret", + value: "client-secret-value", + }), + ); + + const originalFetch = globalThis.fetch; + let tokenRequestInit: RequestInit | undefined; + yield* Effect.acquireRelease( + Effect.sync(() => + vi.spyOn(globalThis, "fetch").mockImplementation((( + input: RequestInfo | URL, + init?: RequestInit, + ) => { + const url = + typeof input === "string" + ? input + : input instanceof URL + ? input.toString() + : input.url; + if (url === "https://oauth2.googleapis.com/token") { + tokenRequestInit = init; + return Promise.resolve( + new Response( + JSON.stringify({ + access_token: "access-token-value", + refresh_token: "refresh-token-value", + token_type: "Bearer", + expires_in: 3600, + scope: "https://www.googleapis.com/auth/drive", + }), + { status: 200, headers: { "content-type": "application/json" } }, + ), + ); + } + return originalFetch(input, init); + }) as typeof fetch), + ), + (mock) => Effect.sync(() => mock.mockRestore()), + ); + + const connectionId = "google-discovery-oauth2-test-complete"; + const started = yield* executor.oauth.start({ + endpoint: handle.discoveryUrl, + redirectUrl: "http://localhost/callback", + connectionId, + tokenScope: "test-scope", + strategy: { + kind: "authorization-code", + authorizationEndpoint: "https://accounts.google.com/o/oauth2/v2/auth", + tokenEndpoint: "https://oauth2.googleapis.com/token", + clientIdSecretId: "google-client-id", + clientSecretSecretId: "google-client-secret", + scopes: ["https://www.googleapis.com/auth/drive"], + extraAuthorizationParams: { + access_type: "offline", + include_granted_scopes: "true", + prompt: "consent", + }, + }, + pluginId: "google-discovery", + }); + + const completed = yield* executor.oauth.complete({ + state: started.sessionId, + code: "code-123", + }); + + expect(completed.connectionId).toBe(connectionId); + expect(tokenRequestInit?.method).toBe("POST"); + + // Tokens live on the SDK connection — resolving via + // ctx.connections.accessToken returns the minted value. + const accessToken = yield* executor.connections.accessToken( + completed.connectionId as Parameters[0], + ); + expect(accessToken).toBe("access-token-value"); + + // Backing access-token secret is owned by the connection, so + // it's filtered out of the user-facing secret list. + const secretIds = new Set((yield* executor.secrets.list()).map((s) => String(s.id))); + expect(secretIds).not.toContain(`${completed.connectionId}.access_token`); + expect(secretIds).not.toContain(`${completed.connectionId}.refresh_token`); }), ); it.effect("registers and invokes google discovery tools with oauth headers", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - - try { - // A connection wraps the access token (+ optional refresh) and - // the invoke path resolves via ctx.connections.accessToken. - const connectionId = ConnectionId.make( - "google-discovery-oauth2-test", - ); - yield* executor.connections.create( - new CreateConnectionInput({ - id: connectionId, - scope: ScopeId.make("test-scope"), - provider: "oauth2", - identityLabel: "Drive Test", - accessToken: new TokenMaterial({ - secretId: SecretId.make(`${connectionId}.access_token`), - name: "Drive Access Token", - value: "secret-token", - }), - refreshToken: null, - expiresAt: null, - oauthScope: null, - providerState: { - clientIdSecretId: "drive-client-id", - clientSecretSecretId: null, - scopes: ["https://www.googleapis.com/auth/drive.readonly"], - }, - }), - ); - - const result = yield* executor.googleDiscovery.addSource({ - name: "Google Drive", - scope: "test-scope", - discoveryUrl: handle.discoveryUrl, - namespace: "drive", - auth: { - kind: "oauth2", - connectionId, - clientIdSecretId: "drive-client-id", - clientSecretSecretId: null, - scopes: ["https://www.googleapis.com/auth/drive.readonly"], - }, - }); - - expect(result.toolCount).toBe(2); - - const invocation = (yield* executor.tools.invoke( - "drive.files.get", - { fileId: "123", fields: "id,name", prettyPrint: true }, - autoApprove, - )) as { data: unknown; error: unknown }; - - expect(invocation.error).toBeNull(); - expect(invocation.data).toEqual({ id: "123", name: "Quarterly Plan" }); - - const apiRequest = handle.requests.find((request) => - request.url.startsWith("/drive/v3/files/123"), - ); - expect(apiRequest).toBeDefined(); - expect(apiRequest!.headers.authorization).toBe("Bearer secret-token"); - expect(apiRequest!.url).toContain("fields=id%2Cname"); - expect(apiRequest!.url).toContain("prettyPrint=true"); - } finally { - yield* executor.close(); - } - } finally { - yield* Effect.promise(() => handle.close()); - } + ), + (executor) => executor.close().pipe(Effect.ignore), + ); + + // A connection wraps the access token (+ optional refresh) and + // the invoke path resolves via ctx.connections.accessToken. + const connectionId = ConnectionId.make("google-discovery-oauth2-test"); + yield* executor.connections.create( + new CreateConnectionInput({ + id: connectionId, + scope: ScopeId.make("test-scope"), + provider: "oauth2", + identityLabel: "Drive Test", + accessToken: new TokenMaterial({ + secretId: SecretId.make(`${connectionId}.access_token`), + name: "Drive Access Token", + value: "secret-token", + }), + refreshToken: null, + expiresAt: null, + oauthScope: null, + providerState: { + clientIdSecretId: "drive-client-id", + clientSecretSecretId: null, + scopes: ["https://www.googleapis.com/auth/drive.readonly"], + }, + }), + ); + + const result = yield* executor.googleDiscovery.addSource({ + name: "Google Drive", + scope: "test-scope", + discoveryUrl: handle.discoveryUrl, + namespace: "drive", + auth: { + kind: "oauth2", + connectionId, + clientIdSecretId: "drive-client-id", + clientSecretSecretId: null, + scopes: ["https://www.googleapis.com/auth/drive.readonly"], + }, + }); + + expect(result.toolCount).toBe(2); + + const invocation = (yield* executor.tools.invoke( + "drive.files.get", + { fileId: "123", fields: "id,name", prettyPrint: true }, + autoApprove, + )) as { data: unknown; error: unknown }; + + expect(invocation.error).toBeNull(); + expect(invocation.data).toEqual({ id: "123", name: "Quarterly Plan" }); + + const apiRequest = handle.requests.find((request) => + request.url.startsWith("/drive/v3/files/123"), + ); + expect(apiRequest).toBeDefined(); + expect(apiRequest!.headers.authorization).toBe("Bearer secret-token"); + expect(apiRequest!.url).toContain("fields=id%2Cname"); + expect(apiRequest!.url).toContain("prettyPrint=true"); }), ); @@ -456,6 +454,8 @@ describe("Google Discovery plugin", () => { const ORG_SCOPE = ScopeId.make("org-scope"); const USER_SCOPE = ScopeId.make("user-scope"); + const ORG_SCOPE_STRING = String(ORG_SCOPE); + const USER_SCOPE_STRING = String(USER_SCOPE); const stackedScopes = [ new Scope({ id: USER_SCOPE, name: "user", createdAt: new Date() }), @@ -464,165 +464,129 @@ describe("Google Discovery plugin", () => { it.effect("shadowed addSource does not wipe the outer-scope source", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ scopes: stackedScopes, plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - try { - // Org-level base source - yield* executor.googleDiscovery.addSource({ - name: "Org Drive", - scope: ORG_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - - // Per-user shadow with the same namespace - yield* executor.googleDiscovery.addSource({ - name: "User Drive", - scope: USER_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - - const userView = yield* executor.googleDiscovery.getSource( - "shared", - USER_SCOPE as string, - ); - const orgView = yield* executor.googleDiscovery.getSource( - "shared", - ORG_SCOPE as string, - ); - - // Both rows must coexist — innermost-wins reads come from the - // executor; the store's scope-pinned getters return the exact row. - expect(userView?.name).toBe("User Drive"); - expect(userView?.scope).toBe(USER_SCOPE as string); - expect(orgView?.name).toBe("Org Drive"); - expect(orgView?.scope).toBe(ORG_SCOPE as string); - } finally { - yield* executor.close(); - } - } finally { - yield* Effect.promise(() => handle.close()); - } + ), + (executor) => executor.close().pipe(Effect.ignore), + ); + // Org-level base source + yield* executor.googleDiscovery.addSource({ + name: "Org Drive", + scope: ORG_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + + // Per-user shadow with the same namespace + yield* executor.googleDiscovery.addSource({ + name: "User Drive", + scope: USER_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + + const userView = yield* executor.googleDiscovery.getSource("shared", USER_SCOPE_STRING); + const orgView = yield* executor.googleDiscovery.getSource("shared", ORG_SCOPE_STRING); + + // Both rows must coexist — innermost-wins reads come from the + // executor; the store's scope-pinned getters return the exact row. + expect(userView?.name).toBe("User Drive"); + expect(userView?.scope).toBe(USER_SCOPE_STRING); + expect(orgView?.name).toBe("Org Drive"); + expect(orgView?.scope).toBe(ORG_SCOPE_STRING); }), ); it.effect("removeSource on user shadow leaves the org row intact", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ scopes: stackedScopes, plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - try { - yield* executor.googleDiscovery.addSource({ - name: "Org Drive", - scope: ORG_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - yield* executor.googleDiscovery.addSource({ - name: "User Drive", - scope: USER_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - - yield* executor.googleDiscovery.removeSource( - "shared", - USER_SCOPE as string, - ); - - const userView = yield* executor.googleDiscovery.getSource( - "shared", - USER_SCOPE as string, - ); - const orgView = yield* executor.googleDiscovery.getSource( - "shared", - ORG_SCOPE as string, - ); - - expect(userView).toBeNull(); - expect(orgView?.name).toBe("Org Drive"); - } finally { - yield* executor.close(); - } - } finally { - yield* Effect.promise(() => handle.close()); - } + ), + (executor) => executor.close().pipe(Effect.ignore), + ); + yield* executor.googleDiscovery.addSource({ + name: "Org Drive", + scope: ORG_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + yield* executor.googleDiscovery.addSource({ + name: "User Drive", + scope: USER_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + + yield* executor.googleDiscovery.removeSource("shared", USER_SCOPE_STRING); + + const userView = yield* executor.googleDiscovery.getSource("shared", USER_SCOPE_STRING); + const orgView = yield* executor.googleDiscovery.getSource("shared", ORG_SCOPE_STRING); + + expect(userView).toBeNull(); + expect(orgView?.name).toBe("Org Drive"); }), ); it.effect("re-adding a user shadow does not wipe the org row's bindings", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ scopes: stackedScopes, plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - try { - yield* executor.googleDiscovery.addSource({ - name: "Org Drive", - scope: ORG_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - // Add user shadow, then add it again — the internal - // registerManifest sequence does a scope-pinned - // removeBindingsBySource before re-upserting. Without pinning - // scope, the inner re-add would wipe the org-level bindings - // via fall-through. - yield* executor.googleDiscovery.addSource({ - name: "User Drive v1", - scope: USER_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - yield* executor.googleDiscovery.addSource({ - name: "User Drive v2", - scope: USER_SCOPE as string, - discoveryUrl: handle.discoveryUrl, - namespace: "shared", - auth: { kind: "none" }, - }); - - const userView = yield* executor.googleDiscovery.getSource( - "shared", - USER_SCOPE as string, - ); - const orgView = yield* executor.googleDiscovery.getSource( - "shared", - ORG_SCOPE as string, - ); - - expect(userView?.name).toBe("User Drive v2"); - expect(userView?.scope).toBe(USER_SCOPE as string); - expect(orgView?.name).toBe("Org Drive"); - expect(orgView?.scope).toBe(ORG_SCOPE as string); - } finally { - yield* executor.close(); - } - } finally { - yield* Effect.promise(() => handle.close()); - } + ), + (executor) => executor.close().pipe(Effect.ignore), + ); + yield* executor.googleDiscovery.addSource({ + name: "Org Drive", + scope: ORG_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + // Add user shadow, then add it again — the internal + // registerManifest sequence does a scope-pinned + // removeBindingsBySource before re-upserting. Without pinning + // scope, the inner re-add would wipe the org-level bindings + // via fall-through. + yield* executor.googleDiscovery.addSource({ + name: "User Drive v1", + scope: USER_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + yield* executor.googleDiscovery.addSource({ + name: "User Drive v2", + scope: USER_SCOPE_STRING, + discoveryUrl: handle.discoveryUrl, + namespace: "shared", + auth: { kind: "none" }, + }); + + const userView = yield* executor.googleDiscovery.getSource("shared", USER_SCOPE_STRING); + const orgView = yield* executor.googleDiscovery.getSource("shared", ORG_SCOPE_STRING); + + expect(userView?.name).toBe("User Drive v2"); + expect(userView?.scope).toBe(USER_SCOPE_STRING); + expect(orgView?.name).toBe("Org Drive"); + expect(orgView?.scope).toBe(ORG_SCOPE_STRING); }), ); @@ -634,73 +598,61 @@ describe("Google Discovery plugin", () => { it.effect("usagesForSecret returns refs across auth + credential rows", () => Effect.gen(function* () { - const handle = yield* Effect.promise(() => startServer()); - try { - const executor = yield* createExecutor( + const handle = yield* TestServer; + const executor = yield* Effect.acquireRelease( + createExecutor( makeTestConfig({ - plugins: [ - makeMemorySecretsPlugin()(), - googleDiscoveryPlugin(), - ] as const, + plugins: [makeMemorySecretsPlugin()(), googleDiscoveryPlugin()] as const, }), - ); - try { - const connectionId = ConnectionId.make( - "google-discovery-oauth2-usages", - ); - yield* executor.connections.create( - new CreateConnectionInput({ - id: connectionId, - scope: ScopeId.make("test-scope"), - provider: "oauth2", - identityLabel: "Drive Usages", - accessToken: new TokenMaterial({ - secretId: SecretId.make(`${connectionId}.access_token`), - name: "Drive Access Token", - value: "secret-token", - }), - refreshToken: null, - expiresAt: null, - oauthScope: null, - providerState: null, - }), - ); - - yield* executor.googleDiscovery.addSource({ - name: "Drive (Usages)", - scope: "test-scope", - discoveryUrl: handle.discoveryUrl, - namespace: "drive_u", - auth: { - kind: "oauth2", - connectionId, - clientIdSecretId: "shared-secret", - clientSecretSecretId: null, - scopes: [], - }, - }); - - // The auth.client_id_secret_id alone holds `shared-secret`. - const usages = yield* executor.secrets.usages( - SecretId.make("shared-secret"), - ); - expect(usages.length).toBe(1); - expect(usages[0]).toMatchObject({ - pluginId: "google-discovery", - ownerKind: "google-discovery-source", - ownerId: "drive_u", - slot: "auth.oauth2.client_id", - }); - - const connUsages = yield* executor.connections.usages(connectionId); - expect(connUsages.length).toBe(1); - expect(connUsages[0].slot).toBe("auth.oauth2.connection"); - } finally { - yield* executor.close(); - } - } finally { - yield* Effect.promise(() => handle.close()); - } + ), + (executor) => executor.close().pipe(Effect.ignore), + ); + const connectionId = ConnectionId.make("google-discovery-oauth2-usages"); + yield* executor.connections.create( + new CreateConnectionInput({ + id: connectionId, + scope: ScopeId.make("test-scope"), + provider: "oauth2", + identityLabel: "Drive Usages", + accessToken: new TokenMaterial({ + secretId: SecretId.make(`${connectionId}.access_token`), + name: "Drive Access Token", + value: "secret-token", + }), + refreshToken: null, + expiresAt: null, + oauthScope: null, + providerState: null, + }), + ); + + yield* executor.googleDiscovery.addSource({ + name: "Drive (Usages)", + scope: "test-scope", + discoveryUrl: handle.discoveryUrl, + namespace: "drive_u", + auth: { + kind: "oauth2", + connectionId, + clientIdSecretId: "shared-secret", + clientSecretSecretId: null, + scopes: [], + }, + }); + + // The auth.client_id_secret_id alone holds `shared-secret`. + const usages = yield* executor.secrets.usages(SecretId.make("shared-secret")); + expect(usages.length).toBe(1); + expect(usages[0]).toMatchObject({ + pluginId: "google-discovery", + ownerKind: "google-discovery-source", + ownerId: "drive_u", + slot: "auth.oauth2.client_id", + }); + + const connUsages = yield* executor.connections.usages(connectionId); + expect(connUsages.length).toBe(1); + expect(connUsages[0].slot).toBe("auth.oauth2.connection"); }), ); }); From 1858ea28333020aa2f6ce47d8c3d40e23b31e7f8 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:09:43 -0700 Subject: [PATCH 065/108] Use promiseExit in source UI boundaries --- apps/marketing/src/pages/api/detect.ts | 3 + .../src/react/AddGoogleDiscoverySource.tsx | 139 +++-- .../plugins/mcp/src/react/AddMcpSource.tsx | 126 +++-- .../openapi/src/react/AddOpenApiSource.tsx | 504 ++++++++++-------- .../openapi/src/react/EditOpenApiSource.tsx | 348 ++++++------ packages/react/src/plugins/secret-form.tsx | 41 +- 6 files changed, 640 insertions(+), 521 deletions(-) diff --git a/apps/marketing/src/pages/api/detect.ts b/apps/marketing/src/pages/api/detect.ts index b45732010..f6b9d2178 100644 --- a/apps/marketing/src/pages/api/detect.ts +++ b/apps/marketing/src/pages/api/detect.ts @@ -45,6 +45,7 @@ function formatTools(tools: readonly Tool[]) { } export const POST: APIRoute = async ({ request }) => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: Astro route converts request/parsing failures to a stable HTTP response try { const body = (await request.json()) as { url?: string }; const url = body.url?.trim(); @@ -55,6 +56,7 @@ export const POST: APIRoute = async ({ request }) => { }); } + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform validator for request input try { new URL(url); } catch { @@ -70,6 +72,7 @@ export const POST: APIRoute = async ({ request }) => { }); const executor = yield* createExecutor(config); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: ensure executor cleanup runs after best-effort marketing detection try { // Detect what kind of source lives at this URL const detected = yield* executor.sources.detect(url).pipe(Effect.timeout("10 seconds")); diff --git a/packages/plugins/google-discovery/src/react/AddGoogleDiscoverySource.tsx b/packages/plugins/google-discovery/src/react/AddGoogleDiscoverySource.tsx index bca1c86aa..41b63b1db 100644 --- a/packages/plugins/google-discovery/src/react/AddGoogleDiscoverySource.tsx +++ b/packages/plugins/google-discovery/src/react/AddGoogleDiscoverySource.tsx @@ -1,5 +1,6 @@ import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { useAtomSet } from "@effect/atom-react"; +import { Exit, Option, Schema } from "effect"; import { usePendingSources } from "@executor-js/react/api/optimistic"; import { sourceWriteKeys } from "@executor-js/react/api/reactivity-keys"; @@ -95,24 +96,50 @@ type GoogleDiscoveryTemplate = GoogleDiscoveryPreset & { }; const GOOGLE_G_ICON = "https://fonts.gstatic.com/s/i/productlogos/googleg/v6/192px.svg"; +const PublicErrorMessage = Schema.Struct({ + _tag: Schema.Literals([ + "GoogleDiscoveryParseError", + "GoogleDiscoveryOAuthError", + "GoogleDiscoverySourceError", + ]), + message: Schema.String, +}); + +const messageFromExit = (exit: Exit.Exit, fallback: string): string => { + const error = Exit.findErrorOption(exit); + if (Option.isNone(error)) return fallback; + const errorMessage = Schema.decodeUnknownOption(PublicErrorMessage)(error.value); + return Option.match(errorMessage, { + onNone: () => fallback, + onSome: (value) => value.message, + }); +}; -function parseGoogleDiscoveryPreset(preset: GoogleDiscoveryPreset): GoogleDiscoveryTemplate { +const parseUrlOption = (value: string): URL | null => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform URL parser try { - const url = new URL(preset.url); - const parts = url.pathname.split("/").filter(Boolean); - const apisIndex = parts.indexOf("apis"); - const service = apisIndex >= 0 ? parts[apisIndex + 1] : undefined; - const version = - apisIndex >= 0 ? parts[apisIndex + 2] : (url.searchParams.get("version") ?? undefined); - return { - ...preset, - discoveryUrl: preset.url, - service: service ?? url.hostname.replace(/\.googleapis\.com$/, ""), - version: version ?? "", - }; + return new URL(value); } catch { + return null; + } +}; + +function parseGoogleDiscoveryPreset(preset: GoogleDiscoveryPreset): GoogleDiscoveryTemplate { + const url = parseUrlOption(preset.url); + if (!url) { return { ...preset, discoveryUrl: preset.url, service: preset.id, version: "" }; } + const parts = url.pathname.split("/").filter(Boolean); + const apisIndex = parts.indexOf("apis"); + const service = apisIndex >= 0 ? parts[apisIndex + 1] : undefined; + const version = + apisIndex >= 0 ? parts[apisIndex + 2] : (url.searchParams.get("version") ?? undefined); + return { + ...preset, + discoveryUrl: preset.url, + service: service ?? url.hostname.replace(/\.googleapis\.com$/, ""), + version: version ?? "", + }; } const GOOGLE_DISCOVERY_TEMPLATES = googleDiscoveryPresets.map(parseGoogleDiscoveryPreset); @@ -202,8 +229,8 @@ export default function AddGoogleDiscoverySource(props: { "google"; const scopeId = useScope(); - const doProbe = useAtomSet(probeGoogleDiscovery, { mode: "promise" }); - const doAdd = useAtomSet(addGoogleDiscoverySource, { mode: "promise" }); + const doProbe = useAtomSet(probeGoogleDiscovery, { mode: "promiseExit" }); + const doAdd = useAtomSet(addGoogleDiscoverySource, { mode: "promiseExit" }); const { beginAdd } = usePendingSources(); const secretList = useSecretPickerSecrets(); const oauth = useOAuthPopupFlow({ @@ -235,25 +262,26 @@ export default function AddGoogleDiscoverySource(props: { setError(null); setOauthAuth(null); setShowScopes(false); - try { - const result = await doProbe({ - params: { scopeId }, - payload: { discoveryUrl: discoveryUrl.trim() }, - }); - setProbe({ - ...result, - scopes: [...result.scopes], - operations: [...result.operations], - }); - if (result.scopes.length === 0) { - setAuthKind("none"); - } - } catch (e) { + const exit = await doProbe({ + params: { scopeId }, + payload: { discoveryUrl: discoveryUrl.trim() }, + }); + if (Exit.isFailure(exit)) { setProbe(null); - setError(e instanceof Error ? e.message : "Failed to inspect discovery document"); - } finally { setLoadingProbe(false); + setError(messageFromExit(exit, "Failed to inspect discovery document")); + return; + } + const result = exit.value; + setProbe({ + ...result, + scopes: [...result.scopes], + operations: [...result.operations], + }); + if (result.scopes.length === 0) { + setAuthKind("none"); } + setLoadingProbe(false); }, [discoveryUrl, doProbe, scopeId]); // Keep the latest handleProbe in a ref so the debounced effect can call it @@ -331,33 +359,32 @@ export default function AddGoogleDiscoverySource(props: { name: displayName, kind: "google-discovery", }); - try { - await doAdd({ - params: { scopeId }, - payload: { - name: displayName, - discoveryUrl: discoveryUrl.trim(), - namespace, - auth: - authKind === "oauth2" && oauthAuth - ? { - kind: "oauth2" as const, - connectionId: oauthAuth.connectionId, - clientIdSecretId: oauthAuth.clientIdSecretId, - clientSecretSecretId: oauthAuth.clientSecretSecretId, - scopes: oauthAuth.scopes, - } - : { kind: "none" as const }, - }, - reactivityKeys: [...sourceWriteKeys], - }); - props.onComplete(); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to add source"); + const exit = await doAdd({ + params: { scopeId }, + payload: { + name: displayName, + discoveryUrl: discoveryUrl.trim(), + namespace, + auth: + authKind === "oauth2" && oauthAuth + ? { + kind: "oauth2" as const, + connectionId: oauthAuth.connectionId, + clientIdSecretId: oauthAuth.clientIdSecretId, + clientSecretSecretId: oauthAuth.clientSecretSecretId, + scopes: oauthAuth.scopes, + } + : { kind: "none" as const }, + }, + reactivityKeys: [...sourceWriteKeys], + }); + placeholder.done(); + if (Exit.isFailure(exit)) { + setError(messageFromExit(exit, "Failed to add source")); setAdding(false); - } finally { - placeholder.done(); + return; } + props.onComplete(); }, [ probe, doAdd, diff --git a/packages/plugins/mcp/src/react/AddMcpSource.tsx b/packages/plugins/mcp/src/react/AddMcpSource.tsx index 35c8db9db..aa0d3b06b 100644 --- a/packages/plugins/mcp/src/react/AddMcpSource.tsx +++ b/packages/plugins/mcp/src/react/AddMcpSource.tsx @@ -1,5 +1,6 @@ import { useReducer, useCallback, useEffect, useRef, useState, type ReactNode } from "react"; import { useAtomSet } from "@effect/atom-react"; +import { Exit, Option, Schema } from "effect"; import { useScope } from "@executor-js/react/api/scope-context"; import { Button } from "@executor-js/react/components/button"; @@ -53,6 +54,21 @@ import { usePendingSources } from "@executor-js/react/api/optimistic"; import { probeMcpEndpoint, addMcpSource } from "./atoms"; import { mcpPresets, type McpPreset } from "../sdk/presets"; +const PublicErrorMessage = Schema.Struct({ + _tag: Schema.Literals(["McpConnectionError", "McpToolDiscoveryError", "McpOAuthError"]), + message: Schema.String, +}); + +const messageFromExit = (exit: Exit.Exit, fallback: string): string => { + const error = Exit.findErrorOption(exit); + if (Option.isNone(error)) return fallback; + const errorMessage = Schema.decodeUnknownOption(PublicErrorMessage)(error.value); + return Option.match(errorMessage, { + onNone: () => fallback, + onSome: (value) => value.message, + }); +}; + // --------------------------------------------------------------------------- // Preset lookup // --------------------------------------------------------------------------- @@ -268,8 +284,8 @@ export default function AddMcpSource(props: { ); const scopeId = useScope(); - const doProbe = useAtomSet(probeMcpEndpoint, { mode: "promise" }); - const doAdd = useAtomSet(addMcpSource, { mode: "promise" }); + const doProbe = useAtomSet(probeMcpEndpoint, { mode: "promiseExit" }); + const doAdd = useAtomSet(addMcpSource, { mode: "promiseExit" }); const { beginAdd } = usePendingSources(); const secretList = useSecretPickerSecrets(); const oauth = useOAuthPopupFlow({ @@ -333,24 +349,24 @@ export default function AddMcpSource(props: { const handleProbe = useCallback(async () => { dispatch({ type: "probe-start" }); - try { - const { headers, queryParams } = serializeHttpCredentials(remoteCredentials); - const result = await doProbe({ - params: { scopeId }, - payload: { - endpoint: state.url.trim(), - ...(Object.keys(headers).length > 0 ? { headers } : {}), - ...(Object.keys(queryParams).length > 0 ? { queryParams } : {}), - }, - }); - setRemoteAuthMode(result.requiresOAuth ? "oauth2" : "none"); - dispatch({ type: "probe-ok", probe: result }); - } catch (e) { + const { headers, queryParams } = serializeHttpCredentials(remoteCredentials); + const exit = await doProbe({ + params: { scopeId }, + payload: { + endpoint: state.url.trim(), + ...(Object.keys(headers).length > 0 ? { headers } : {}), + ...(Object.keys(queryParams).length > 0 ? { queryParams } : {}), + }, + }); + if (Exit.isFailure(exit)) { dispatch({ type: "probe-fail", - error: e instanceof Error ? e.message : "Failed to connect", + error: messageFromExit(exit, "Failed to connect"), }); + return; } + setRemoteAuthMode(exit.value.requiresOAuth ? "oauth2" : "none"); + dispatch({ type: "probe-ok", probe: exit.value }); }, [state.url, scopeId, doProbe, remoteCredentials]); // Keep the latest handleProbe in a ref so the debounced effect can call it @@ -473,33 +489,30 @@ export default function AddMcpSource(props: { kind: "mcp", url: state.url.trim(), }); - try { - await doAdd({ - params: { scopeId }, - payload: { - transport: "remote" as const, - name: displayName, - namespace: slugNamespace || undefined, - endpoint: state.url.trim(), - auth, - ...(Object.keys(remoteRequestHeaders).length > 0 - ? { headers: remoteRequestHeaders } - : {}), - ...(Object.keys(credentials.queryParams).length > 0 - ? { queryParams: credentials.queryParams } - : {}), - }, - reactivityKeys: sourceWriteKeys, - }); - props.onComplete(); - } catch (e) { + const exit = await doAdd({ + params: { scopeId }, + payload: { + transport: "remote" as const, + name: displayName, + namespace: slugNamespace || undefined, + endpoint: state.url.trim(), + auth, + ...(Object.keys(remoteRequestHeaders).length > 0 ? { headers: remoteRequestHeaders } : {}), + ...(Object.keys(credentials.queryParams).length > 0 + ? { queryParams: credentials.queryParams } + : {}), + }, + reactivityKeys: sourceWriteKeys, + }); + placeholder.done(); + if (Exit.isFailure(exit)) { dispatch({ type: "add-fail", - error: e instanceof Error ? e.message : "Failed to add source", + error: messageFromExit(exit, "Failed to add source"), }); - } finally { - placeholder.done(); + return; } + props.onComplete(); }, [ probe, remoteAuthMode, @@ -553,26 +566,25 @@ export default function AddMcpSource(props: { name: displayName, kind: "mcp", }); - try { - await doAdd({ - params: { scopeId }, - payload: { - transport: "stdio" as const, - name: displayName, - namespace: slugNamespace || undefined, - command: cmd, - args: parseStdioArgs(stdioArgs), - env: parseStdioEnv(stdioEnv), - }, - reactivityKeys: sourceWriteKeys, - }); - props.onComplete(); - } catch (e) { - setStdioError(e instanceof Error ? e.message : "Failed to add source"); + const exit = await doAdd({ + params: { scopeId }, + payload: { + transport: "stdio" as const, + name: displayName, + namespace: slugNamespace || undefined, + command: cmd, + args: parseStdioArgs(stdioArgs), + env: parseStdioEnv(stdioEnv), + }, + reactivityKeys: sourceWriteKeys, + }); + placeholder.done(); + if (Exit.isFailure(exit)) { + setStdioError(messageFromExit(exit, "Failed to add source")); setStdioAdding(false); - } finally { - placeholder.done(); + return; } + props.onComplete(); }, [stdioCommand, stdioArgs, stdioEnv, stdioIdentity, doAdd, scopeId, props, beginAdd]); // ---- Render ---- diff --git a/packages/plugins/openapi/src/react/AddOpenApiSource.tsx b/packages/plugins/openapi/src/react/AddOpenApiSource.tsx index 251174fe4..618c49959 100644 --- a/packages/plugins/openapi/src/react/AddOpenApiSource.tsx +++ b/packages/plugins/openapi/src/react/AddOpenApiSource.tsx @@ -1,6 +1,6 @@ import { useCallback, useEffect, useRef, useState } from "react"; import { useAtomSet } from "@effect/atom-react"; -import { Option } from "effect"; +import { Effect, Exit, Option, Schema } from "effect"; import { ConnectionId, ScopeId, SecretId } from "@executor-js/sdk/core"; import { startOAuth } from "@executor-js/react/api/atoms"; @@ -83,6 +83,31 @@ import { export const OPENAPI_OAUTH_POPUP_NAME = "openapi-oauth"; export const OPENAPI_OAUTH_CALLBACK_PATH = "/api/oauth/callback"; +const PublicErrorMessage = Schema.Struct({ + _tag: Schema.Literals(["OpenApiParseError", "OpenApiExtractionError", "OpenApiOAuthError"]), + message: Schema.String, +}); + +const messageFromExit = (exit: Exit.Exit, fallback: string): string => { + const error = Exit.findErrorOption(exit); + if (Option.isNone(error)) return fallback; + const errorMessage = Schema.decodeUnknownOption(PublicErrorMessage)(error.value); + return Option.match(errorMessage, { + onNone: () => fallback, + onSome: (value) => value.message, + }); +}; + +const failPromise = (message: string): Promise => Effect.runPromise(Effect.fail(message)); + +const parseUrlOption = (url: string, baseUrl?: string): URL | null => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform URL parser + try { + return baseUrl === undefined ? new URL(url) : new URL(url, baseUrl); + } catch { + return null; + } +}; const substituteUrlVariables = (url: string, values: Record): string => { let out = url; @@ -109,25 +134,15 @@ export const openApiOAuthConnectionId = ( */ export function resolveOAuthUrl(url: string, baseUrl: string): string { if (!url) return url; - try { - new URL(url); + if (parseUrlOption(url)) { return url; - } catch { - if (!baseUrl) return url; - try { - return new URL(url, baseUrl).toString(); - } catch { - return url; - } } + if (!baseUrl) return url; + return parseUrlOption(url, baseUrl)?.toString() ?? url; } export function inferOAuthIssuerUrl(authorizationUrl: string): string | null { - try { - return new URL(authorizationUrl).origin; - } catch { - return null; - } + return parseUrlOption(authorizationUrl)?.origin ?? null; } type StrategySelection = @@ -242,10 +257,10 @@ export default function AddOpenApiSource(props: { const scopeId = useScope(); const userScope = useUserScope(); - const doPreview = useAtomSet(previewOpenApiSpec, { mode: "promise" }); - const doAdd = useAtomSet(addOpenApiSpec, { mode: "promise" }); - const doStartOAuth = useAtomSet(startOAuth, { mode: "promise" }); - const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promise" }); + const doPreview = useAtomSet(previewOpenApiSpec, { mode: "promiseExit" }); + const doAdd = useAtomSet(addOpenApiSpec, { mode: "promiseExit" }); + const doStartOAuth = useAtomSet(startOAuth, { mode: "promiseExit" }); + const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promiseExit" }); const { beginAdd } = usePendingSources(); const secretList = useSecretPickerSecrets(); const oauth = useOAuthPopupFlow({ @@ -371,45 +386,46 @@ export default function AddOpenApiSource(props: { setAnalyzing(true); setAnalyzeError(null); setAddError(null); - try { - const credentials = serializeHttpCredentials(specFetchCredentials); - const result = await doPreview({ - params: { scopeId }, - payload: { - spec: specUrl, - specFetchCredentials: credentials, - }, - }); - setPreview(result); - - const firstServer = result.servers[0]; - if (firstServer) { - setSelectedServerIndex(0); - setVariableSelections(defaultSelectionsFor(firstServer)); - setCustomBaseUrl(""); - } else { - setSelectedServerIndex(-1); - setVariableSelections({}); - setCustomBaseUrl(""); - } - - const firstPreset = result.headerPresets[0]; - if (firstPreset) { - setStrategy({ kind: "header", presetIndex: 0 }); - setCustomHeaders(entriesFromSpecPreset(firstPreset)); - } else { - // No header presets — default to "custom" so the headers editor is - // visible immediately. Specs with no `security` block (e.g. Microsoft - // Graph) would otherwise leave the user staring at just the - // Authentication heading with no way to add headers. - setStrategy({ kind: "custom" }); - setCustomHeaders([]); - } - } catch (e) { - setAnalyzeError(e instanceof Error ? e.message : "Failed to parse spec"); - } finally { + const credentials = serializeHttpCredentials(specFetchCredentials); + const previewExit = await doPreview({ + params: { scopeId }, + payload: { + spec: specUrl, + specFetchCredentials: credentials, + }, + }); + if (Exit.isFailure(previewExit)) { + setAnalyzeError(messageFromExit(previewExit, "Failed to parse spec")); setAnalyzing(false); + return; + } + const result = previewExit.value; + setPreview(result); + + const firstServer = result.servers[0]; + if (firstServer) { + setSelectedServerIndex(0); + setVariableSelections(defaultSelectionsFor(firstServer)); + setCustomBaseUrl(""); + } else { + setSelectedServerIndex(-1); + setVariableSelections({}); + setCustomBaseUrl(""); + } + + const firstPreset = result.headerPresets[0]; + if (firstPreset) { + setStrategy({ kind: "header", presetIndex: 0 }); + setCustomHeaders(entriesFromSpecPreset(firstPreset)); + } else { + // No header presets — default to "custom" so the headers editor is + // visible immediately. Specs with no `security` block (e.g. Microsoft + // Graph) would otherwise leave the user staring at just the + // Authentication heading with no way to add headers. + setStrategy({ kind: "custom" }); + setCustomHeaders([]); } + setAnalyzing(false); }; handleAnalyzeRef.current = handleAnalyze; @@ -470,122 +486,126 @@ export default function AddOpenApiSource(props: { if (!selectedOAuth2Preset || !oauth2ClientIdSecretId || !preview) return; oauth.cancel(); setOauth2Error(null); - try { - const displayName = identity.name.trim() || selectedOAuth2Preset.securitySchemeName; - - const tokenUrl = resolveOAuthUrl(selectedOAuth2Preset.tokenUrl, resolvedBaseUrl); - - if (selectedOAuth2Preset.flow === "clientCredentials") { - // RFC 6749 §4.4: no user-interactive consent step. The client_secret - // is mandatory; the backend exchanges tokens inline and returns a - // completed OAuth2Auth we can attach to the source directly. - if (!oauth2ClientSecretSecretId) { - setOauth2Error("client_credentials requires a client secret"); - return; - } - setStartingOAuth(true); - const connectionId = openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow); - const response = await doStartOAuth({ + const displayName = identity.name.trim() || selectedOAuth2Preset.securitySchemeName; + + const tokenUrl = resolveOAuthUrl(selectedOAuth2Preset.tokenUrl, resolvedBaseUrl); + + if (selectedOAuth2Preset.flow === "clientCredentials") { + // RFC 6749 §4.4: no user-interactive consent step. The client_secret + // is mandatory; the backend exchanges tokens inline and returns a + // completed OAuth2Auth we can attach to the source directly. + if (!oauth2ClientSecretSecretId) { + setOauth2Error("client_credentials requires a client secret"); + return; + } + setStartingOAuth(true); + const connectionId = openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow); + const startExit = await doStartOAuth({ + params: { scopeId }, + payload: { + endpoint: tokenUrl, + redirectUrl: tokenUrl, + connectionId, + tokenScope: scopeId, + strategy: { + kind: "client-credentials", + tokenEndpoint: tokenUrl, + clientIdSecretId: oauth2ClientIdSecretId, + clientSecretSecretId: oauth2ClientSecretSecretId, + scopes: [...oauth2SelectedScopes], + }, + pluginId: "openapi", + identityLabel: `${displayName} OAuth`, + }, + }); + setStartingOAuth(false); + if (Exit.isFailure(startExit)) { + setOauth2Error(messageFromExit(startExit, "Failed to start OAuth")); + return; + } + const response = startExit.value; + if (!response.completedConnection) { + setOauth2Error("client_credentials flow did not mint a connection"); + return; + } + setOauth2AuthState({ + fingerprint: selectedOAuth2Fingerprint, + auth: new OAuth2Auth({ + kind: "oauth2", + connectionId: response.completedConnection.connectionId, + securitySchemeName: selectedOAuth2Preset.securitySchemeName, + flow: "clientCredentials", + tokenUrl, + authorizationUrl: null, + clientIdSecretId: oauth2ClientIdSecretId, + clientSecretSecretId: oauth2ClientSecretSecretId, + scopes: [...oauth2SelectedScopes], + }), + }); + setOauth2Error(null); + return; + } + + const authorizationUrl = resolveOAuthUrl( + Option.getOrElse(selectedOAuth2Preset.authorizationUrl, () => ""), + resolvedBaseUrl, + ); + const issuerUrl = inferOAuthIssuerUrl(authorizationUrl); + + await oauth.openAuthorization({ + run: async () => { + const startExit = await doStartOAuth({ params: { scopeId }, payload: { - endpoint: tokenUrl, - redirectUrl: tokenUrl, - connectionId, - tokenScope: scopeId as string, + endpoint: authorizationUrl, + connectionId: openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow), + tokenScope: scopeId, + redirectUrl: oauth2RedirectUrl, strategy: { - kind: "client-credentials", + kind: "authorization-code", + authorizationEndpoint: authorizationUrl, tokenEndpoint: tokenUrl, + issuerUrl, clientIdSecretId: oauth2ClientIdSecretId, - clientSecretSecretId: oauth2ClientSecretSecretId, + clientSecretSecretId: oauth2ClientSecretSecretId ?? null, scopes: [...oauth2SelectedScopes], }, pluginId: "openapi", identityLabel: `${displayName} OAuth`, }, }); - setStartingOAuth(false); - if (!response.completedConnection) { - setOauth2Error("client_credentials flow did not mint a connection"); - return; + if (Exit.isFailure(startExit)) { + return failPromise(messageFromExit(startExit, "Failed to start OAuth")); } + const response = startExit.value; + if (response.authorizationUrl === null) { + return failPromise("Unexpected response flow from server"); + } + return { + sessionId: response.sessionId, + authorizationUrl: response.authorizationUrl, + }; + }, + onSuccess: (result) => { setOauth2AuthState({ fingerprint: selectedOAuth2Fingerprint, auth: new OAuth2Auth({ kind: "oauth2", - connectionId: response.completedConnection.connectionId, + connectionId: result.connectionId, securitySchemeName: selectedOAuth2Preset.securitySchemeName, - flow: "clientCredentials", + flow: "authorizationCode", tokenUrl, - authorizationUrl: null, + authorizationUrl, + issuerUrl, clientIdSecretId: oauth2ClientIdSecretId, clientSecretSecretId: oauth2ClientSecretSecretId, scopes: [...oauth2SelectedScopes], }), }); setOauth2Error(null); - return; - } - - const authorizationUrl = resolveOAuthUrl( - Option.getOrElse(selectedOAuth2Preset.authorizationUrl, () => ""), - resolvedBaseUrl, - ); - const issuerUrl = inferOAuthIssuerUrl(authorizationUrl); - - await oauth.openAuthorization({ - run: async () => { - const response = await doStartOAuth({ - params: { scopeId }, - payload: { - endpoint: authorizationUrl, - connectionId: openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow), - tokenScope: scopeId as string, - redirectUrl: oauth2RedirectUrl, - strategy: { - kind: "authorization-code", - authorizationEndpoint: authorizationUrl, - tokenEndpoint: tokenUrl, - issuerUrl, - clientIdSecretId: oauth2ClientIdSecretId, - clientSecretSecretId: oauth2ClientSecretSecretId ?? null, - scopes: [...oauth2SelectedScopes], - }, - pluginId: "openapi", - identityLabel: `${displayName} OAuth`, - }, - }); - if (response.authorizationUrl === null) { - throw new Error("Unexpected response flow from server"); - } - return { - sessionId: response.sessionId, - authorizationUrl: response.authorizationUrl, - }; - }, - onSuccess: (result) => { - setOauth2AuthState({ - fingerprint: selectedOAuth2Fingerprint, - auth: new OAuth2Auth({ - kind: "oauth2", - connectionId: result.connectionId, - securitySchemeName: selectedOAuth2Preset.securitySchemeName, - flow: "authorizationCode", - tokenUrl, - authorizationUrl, - issuerUrl, - clientIdSecretId: oauth2ClientIdSecretId, - clientSecretSecretId: oauth2ClientSecretSecretId, - scopes: [...oauth2SelectedScopes], - }), - }); - setOauth2Error(null); - }, - onError: setOauth2Error, - }); - } catch (e) { - setStartingOAuth(false); - setOauth2Error(e instanceof Error ? e.message : "Failed to start OAuth"); - } + }, + onError: setOauth2Error, + }); }, [ selectedOAuth2Preset, oauth2ClientIdSecretId, @@ -621,103 +641,127 @@ export default function AddOpenApiSource(props: { kind: "openapi", url: resolvedBaseUrl || undefined, }); - try { - const result = await doAdd({ + const addExit = await doAdd({ + params: { scopeId }, + payload: { + spec: specUrl, + specFetchCredentials: serializeHttpCredentials(specFetchCredentials), + name: identity.name.trim() || undefined, + namespace: slugifyNamespace(identity.namespace) || undefined, + baseUrl: resolvedBaseUrl || undefined, + ...(hasHeaders ? { headers: configuredHeaders } : {}), + ...(Object.keys(serializeHttpCredentials(runtimeCredentials).queryParams).length > 0 + ? { queryParams: serializeHttpCredentials(runtimeCredentials).queryParams } + : {}), + ...(configuredOAuth2 ? { oauth2: configuredOAuth2 } : {}), + }, + reactivityKeys: addSpecWriteKeys, + }); + if (Exit.isFailure(addExit)) { + placeholder.done(); + setAddError(messageFromExit(addExit, "Failed to add source")); + setAdding(false); + return; + } + + const sourceId = addExit.value.namespace; + const sourceScope = ScopeId.make(scopeId); + const bindingScope = ScopeId.make(userScope); + + for (const binding of headerBindings) { + const bindingExit = await doSetBinding({ params: { scopeId }, payload: { - spec: specUrl, - specFetchCredentials: serializeHttpCredentials(specFetchCredentials), - name: identity.name.trim() || undefined, - namespace: slugifyNamespace(identity.namespace) || undefined, - baseUrl: resolvedBaseUrl || undefined, - ...(hasHeaders ? { headers: configuredHeaders } : {}), - ...(Object.keys(serializeHttpCredentials(runtimeCredentials).queryParams).length > 0 - ? { queryParams: serializeHttpCredentials(runtimeCredentials).queryParams } - : {}), - ...(configuredOAuth2 ? { oauth2: configuredOAuth2 } : {}), + sourceId, + sourceScope, + scope: bindingScope, + slot: binding.slot, + value: { + kind: "secret", + secretId: SecretId.make(binding.secretId), + }, }, - reactivityKeys: addSpecWriteKeys, + reactivityKeys: bindingWriteKeys, }); - - const sourceId = result.namespace; - const sourceScope = ScopeId.make(scopeId); - const bindingScope = ScopeId.make(userScope); - - for (const binding of headerBindings) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: binding.slot, - value: { - kind: "secret", - secretId: SecretId.make(binding.secretId), - }, - }, - reactivityKeys: bindingWriteKeys, - }); + if (Exit.isFailure(bindingExit)) { + placeholder.done(); + setAddError(messageFromExit(bindingExit, "Failed to add source")); + setAdding(false); + return; } + } - if (configuredOAuth2 && oauth2ClientIdSecretId) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: configuredOAuth2.clientIdSlot, - value: { - kind: "secret", - secretId: SecretId.make(oauth2ClientIdSecretId), - }, + if (configuredOAuth2 && oauth2ClientIdSecretId) { + const bindingExit = await doSetBinding({ + params: { scopeId }, + payload: { + sourceId, + sourceScope, + scope: bindingScope, + slot: configuredOAuth2.clientIdSlot, + value: { + kind: "secret", + secretId: SecretId.make(oauth2ClientIdSecretId), }, - reactivityKeys: bindingWriteKeys, - }); + }, + reactivityKeys: bindingWriteKeys, + }); + if (Exit.isFailure(bindingExit)) { + placeholder.done(); + setAddError(messageFromExit(bindingExit, "Failed to add source")); + setAdding(false); + return; } + } - if (configuredOAuth2?.clientSecretSlot && oauth2ClientSecretSecretId) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: configuredOAuth2.clientSecretSlot, - value: { - kind: "secret", - secretId: SecretId.make(oauth2ClientSecretSecretId), - }, + if (configuredOAuth2?.clientSecretSlot && oauth2ClientSecretSecretId) { + const bindingExit = await doSetBinding({ + params: { scopeId }, + payload: { + sourceId, + sourceScope, + scope: bindingScope, + slot: configuredOAuth2.clientSecretSlot, + value: { + kind: "secret", + secretId: SecretId.make(oauth2ClientSecretSecretId), }, - reactivityKeys: bindingWriteKeys, - }); + }, + reactivityKeys: bindingWriteKeys, + }); + if (Exit.isFailure(bindingExit)) { + placeholder.done(); + setAddError(messageFromExit(bindingExit, "Failed to add source")); + setAdding(false); + return; } + } - if (configuredOAuth2 && oauth2Auth) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: configuredOAuth2.connectionSlot, - value: { - kind: "connection", - connectionId: ConnectionId.make(oauth2Auth.connectionId), - }, + if (configuredOAuth2 && oauth2Auth) { + const bindingExit = await doSetBinding({ + params: { scopeId }, + payload: { + sourceId, + sourceScope, + scope: bindingScope, + slot: configuredOAuth2.connectionSlot, + value: { + kind: "connection", + connectionId: ConnectionId.make(oauth2Auth.connectionId), }, - reactivityKeys: bindingWriteKeys, - }); + }, + reactivityKeys: bindingWriteKeys, + }); + if (Exit.isFailure(bindingExit)) { + placeholder.done(); + setAddError(messageFromExit(bindingExit, "Failed to add source")); + setAdding(false); + return; } - - props.onComplete(); - } catch (e) { - setAddError(e instanceof Error ? e.message : "Failed to add source"); - setAdding(false); - } finally { - placeholder.done(); } + + placeholder.done(); + props.onComplete(); }; // ---- Render ---- diff --git a/packages/plugins/openapi/src/react/EditOpenApiSource.tsx b/packages/plugins/openapi/src/react/EditOpenApiSource.tsx index b5cb3f5ff..193b7019b 100644 --- a/packages/plugins/openapi/src/react/EditOpenApiSource.tsx +++ b/packages/plugins/openapi/src/react/EditOpenApiSource.tsx @@ -1,5 +1,6 @@ import { useEffect, useMemo, useRef, useState } from "react"; import { useAtomSet, useAtomValue } from "@effect/atom-react"; +import { Effect, Exit, Option, Schema } from "effect"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import { connectionsAtom, sourceAtom, startOAuth } from "@executor-js/react/api/atoms"; @@ -79,12 +80,37 @@ const openApiOAuthConnectionId = ( targetScope: ScopeId, ): ConnectionId => ConnectionId.make( - `openapi-oauth-${slugify(sourceId)}-${slugify(securitySchemeName)}-${shortHash(targetScope as string)}`, + `openapi-oauth-${slugify(sourceId)}-${slugify(securitySchemeName)}-${shortHash(targetScope)}`, ); const bindingSecretId = (sourceId: string, slot: string, scopeId: string): string => `source-binding-${slugify(sourceId)}-${slugify(slot)}-${slugify(scopeId)}`; +const PublicErrorMessage = Schema.Struct({ + _tag: Schema.Literals(["OpenApiParseError", "OpenApiExtractionError", "OpenApiOAuthError"]), + message: Schema.String, +}); +const SecretBindingValue = Schema.Struct({ + kind: Schema.Literal("secret"), + secretId: Schema.String, +}); +const ConnectionBindingValue = Schema.Struct({ + kind: Schema.Literal("connection"), + connectionId: Schema.String, +}); + +const messageFromExit = (exit: Exit.Exit, fallback: string): string => { + const error = Exit.findErrorOption(exit); + if (Option.isNone(error)) return fallback; + const errorMessage = Schema.decodeUnknownOption(PublicErrorMessage)(error.value); + return Option.match(errorMessage, { + onNone: () => fallback, + onSome: (value) => value.message, + }); +}; + +const failPromise = (message: string): Promise => Effect.runPromise(Effect.fail(message)); + const effectiveClientSecretSlot = (oauth2: { readonly securitySchemeName: string; readonly clientSecretSlot: string | null; @@ -101,7 +127,7 @@ const exactBindingForScope = ( ) => rows.find((row) => row.slot === slot && row.scopeId === scopeId) ?? null; const scopeRank = (ranks: ReadonlyMap, scopeId: ScopeId): number => - ranks.get(scopeId as string) ?? Number.MAX_SAFE_INTEGER; + ranks.get(scopeId) ?? Number.MAX_SAFE_INTEGER; const effectiveBindingForScope = ( rows: readonly { @@ -120,20 +146,12 @@ const effectiveBindingForScope = ( const isSecretBindingValue = ( value: unknown, ): value is Extract => - typeof value === "object" && - value !== null && - "kind" in value && - (value as { kind?: unknown }).kind === "secret" && - "secretId" in value; + Option.isSome(Schema.decodeUnknownOption(SecretBindingValue)(value)); const isConnectionBindingValue = ( value: unknown, ): value is Extract => - typeof value === "object" && - value !== null && - "kind" in value && - (value as { kind?: unknown }).kind === "connection" && - "connectionId" in value; + Option.isSome(Schema.decodeUnknownOption(ConnectionBindingValue)(value)); export default function EditOpenApiSource(props: { readonly sourceId: string; @@ -150,7 +168,7 @@ export default function EditOpenApiSource(props: { const sourceScopeId = sourceSummary?.scopeId ?? displayScope; const sourceScope = ScopeId.make(sourceScopeId); const scopeRanks = useMemo( - () => new Map(scopeStack.map((scope, index) => [scope.id as string, index] as const)), + () => new Map(scopeStack.map((scope, index) => [scope.id, index] as const)), [scopeStack], ); @@ -161,10 +179,10 @@ export default function EditOpenApiSource(props: { const connectionsResult = useAtomValue(connectionsAtom(displayScope)); const secretList = useSecretPickerSecrets(); - const doUpdate = useAtomSet(updateOpenApiSource, { mode: "promise" }); - const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promise" }); - const doRemoveBinding = useAtomSet(removeOpenApiSourceBinding, { mode: "promise" }); - const doStartOAuth = useAtomSet(startOAuth, { mode: "promise" }); + const doUpdate = useAtomSet(updateOpenApiSource, { mode: "promiseExit" }); + const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promiseExit" }); + const doRemoveBinding = useAtomSet(removeOpenApiSourceBinding, { mode: "promiseExit" }); + const doStartOAuth = useAtomSet(startOAuth, { mode: "promiseExit" }); const oauth = useOAuthPopupFlow({ popupName: OPENAPI_OAUTH_POPUP_NAME, popupBlockedMessage: "OAuth popup was blocked by the browser", @@ -224,28 +242,29 @@ export default function EditOpenApiSource(props: { const seq = ++sourceSaveSeq.current; setSourceSaveState("saving"); setError(null); - void doUpdate({ - params: { scopeId: ScopeId.make(sourceScopeId), namespace: props.sourceId }, - payload: { - name: nextName || undefined, - baseUrl: nextBaseUrl || undefined, - headers: source.config.headers, - oauth2: source.config.oauth2, - }, - reactivityKeys: openApiWriteKeys, - }) - .then(() => { - if (sourceSaveSeq.current !== seq) return; - setSourceSaveState("saved"); - window.setTimeout(() => { - if (sourceSaveSeq.current === seq) setSourceSaveState("idle"); - }, 1600); - }) - .catch((e: unknown) => { + void (async () => { + const exit = await doUpdate({ + params: { scopeId: ScopeId.make(sourceScopeId), namespace: props.sourceId }, + payload: { + name: nextName || undefined, + baseUrl: nextBaseUrl || undefined, + headers: source.config.headers, + oauth2: source.config.oauth2, + }, + reactivityKeys: openApiWriteKeys, + }); + if (Exit.isFailure(exit)) { if (sourceSaveSeq.current !== seq) return; setSourceSaveState("idle"); - setError(e instanceof Error ? e.message : "Failed to save source details"); - }); + setError(messageFromExit(exit, "Failed to save source details")); + return; + } + if (sourceSaveSeq.current !== seq) return; + setSourceSaveState("saved"); + window.setTimeout(() => { + if (sourceSaveSeq.current === seq) setSourceSaveState("idle"); + }, 1600); + })(); }, 600); return () => window.clearTimeout(timeout); @@ -321,44 +340,40 @@ export default function EditOpenApiSource(props: { if (!trimmed) return; setBusyKey(inputKey); setError(null); - try { - await doSetBinding({ - params: { scopeId: displayScope }, - payload: { - sourceId: props.sourceId, - sourceScope, - scope: targetScope, - slot, - value: { kind: "secret", secretId: SecretId.make(trimmed) }, - }, - reactivityKeys: sourceWriteKeys, - }); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to save credential binding"); - } finally { - setBusyKey(null); + const exit = await doSetBinding({ + params: { scopeId: displayScope }, + payload: { + sourceId: props.sourceId, + sourceScope, + scope: targetScope, + slot, + value: { kind: "secret", secretId: SecretId.make(trimmed) }, + }, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { + setError(messageFromExit(exit, "Failed to save credential binding")); } + setBusyKey(null); }; const clearBinding = async (targetScope: ScopeId, slot: string) => { setBusyKey(`${targetScope}:${slot}:clear`); setError(null); - try { - await doRemoveBinding({ - params: { scopeId: displayScope }, - payload: { - sourceId: props.sourceId, - sourceScope, - slot, - scope: targetScope, - }, - reactivityKeys: sourceWriteKeys, - }); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to clear credential binding"); - } finally { - setBusyKey(null); + const exit = await doRemoveBinding({ + params: { scopeId: displayScope }, + payload: { + sourceId: props.sourceId, + sourceScope, + slot, + scope: targetScope, + }, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { + setError(messageFromExit(exit, "Failed to clear credential binding")); } + setBusyKey(null); }; const connectOAuth = async (targetScope: ScopeId) => { @@ -410,35 +425,109 @@ export default function EditOpenApiSource(props: { setPendingOAuthConnection({ scopeId: targetScope, slot: oauth2.connectionSlot, - connectionId: connectionId as string, + connectionId, }); setError(null); - try { - const displayName = source.name; - const tokenUrl = resolveOAuthUrl(oauth2.tokenUrl, source.config.baseUrl ?? ""); - if (oauth2.flow === "clientCredentials") { - const response = await doStartOAuth({ + const displayName = source.name; + const tokenUrl = resolveOAuthUrl(oauth2.tokenUrl, source.config.baseUrl ?? ""); + if (oauth2.flow === "clientCredentials") { + const startExit = await doStartOAuth({ + params: { scopeId: displayScope }, + payload: { + endpoint: tokenUrl, + redirectUrl: tokenUrl, + connectionId, + tokenScope: targetScope, + strategy: { + kind: "client-credentials", + tokenEndpoint: tokenUrl, + clientIdSecretId, + clientSecretSecretId: clientSecretValue!.secretId, + scopes: [...oauth2.scopes], + }, + pluginId: "openapi", + identityLabel: `${displayName} OAuth`, + }, + }); + if (Exit.isFailure(startExit)) { + setError(messageFromExit(startExit, "Failed to connect OAuth")); + setPendingOAuthConnection(null); + setBusyKey(null); + return; + } + const response = startExit.value; + if (!response.completedConnection) { + setError("Unexpected OAuth response"); + setPendingOAuthConnection(null); + setBusyKey(null); + return; + } + const bindingExit = await doSetBinding({ + params: { scopeId: displayScope }, + payload: { + sourceId: props.sourceId, + sourceScope, + scope: targetScope, + slot: oauth2.connectionSlot, + value: { + kind: "connection", + connectionId: ConnectionId.make(response.completedConnection.connectionId), + }, + }, + reactivityKeys: [...sourceWriteKeys, ...connectionWriteKeys], + }); + if (Exit.isFailure(bindingExit)) { + setError(messageFromExit(bindingExit, "Failed to connect OAuth")); + } + setPendingOAuthConnection(null); + setBusyKey(null); + return; + } + + const authorizationUrl = resolveOAuthUrl( + oauth2.authorizationUrl ?? "", + source.config.baseUrl ?? "", + ); + const issuerUrl = oauth2.issuerUrl ?? inferOAuthIssuerUrl(authorizationUrl); + await oauth.openAuthorization({ + run: async () => { + const startExit = await doStartOAuth({ params: { scopeId: displayScope }, payload: { - endpoint: tokenUrl, - redirectUrl: tokenUrl, - connectionId: connectionId as string, - tokenScope: targetScope as string, + endpoint: authorizationUrl, + connectionId, + tokenScope: targetScope, + redirectUrl: oauth2RedirectUrl, strategy: { - kind: "client-credentials", + kind: "authorization-code", + authorizationEndpoint: authorizationUrl, tokenEndpoint: tokenUrl, + issuerUrl, clientIdSecretId, - clientSecretSecretId: clientSecretValue!.secretId, + clientSecretSecretId: + clientSecretBinding && isSecretBindingValue(clientSecretBinding.value) + ? clientSecretBinding.value.secretId + : null, scopes: [...oauth2.scopes], }, pluginId: "openapi", identityLabel: `${displayName} OAuth`, }, }); - if (!response.completedConnection) { - throw new Error("Unexpected OAuth response"); + if (Exit.isFailure(startExit)) { + return failPromise(messageFromExit(startExit, "Failed to connect OAuth")); + } + const response = startExit.value; + if (response.authorizationUrl === null) { + return failPromise("Unexpected OAuth response"); } - await doSetBinding({ + return { + sessionId: response.sessionId, + authorizationUrl: response.authorizationUrl, + }; + }, + onSuccess: async (result) => { + const bindingExit = await doSetBinding({ params: { scopeId: displayScope }, payload: { sourceId: props.sourceId, @@ -447,83 +536,26 @@ export default function EditOpenApiSource(props: { slot: oauth2.connectionSlot, value: { kind: "connection", - connectionId: ConnectionId.make(response.completedConnection.connectionId), + connectionId: ConnectionId.make(result.connectionId), }, }, reactivityKeys: [...sourceWriteKeys, ...connectionWriteKeys], }); - setPendingOAuthConnection(null); - setBusyKey(null); - return; - } - - const authorizationUrl = resolveOAuthUrl( - oauth2.authorizationUrl ?? "", - source.config.baseUrl ?? "", - ); - const issuerUrl = oauth2.issuerUrl ?? inferOAuthIssuerUrl(authorizationUrl); - await oauth.openAuthorization({ - run: async () => { - const response = await doStartOAuth({ - params: { scopeId: displayScope }, - payload: { - endpoint: authorizationUrl, - connectionId: connectionId as string, - tokenScope: targetScope as string, - redirectUrl: oauth2RedirectUrl, - strategy: { - kind: "authorization-code", - authorizationEndpoint: authorizationUrl, - tokenEndpoint: tokenUrl, - issuerUrl, - clientIdSecretId, - clientSecretSecretId: - clientSecretBinding && isSecretBindingValue(clientSecretBinding.value) - ? clientSecretBinding.value.secretId - : null, - scopes: [...oauth2.scopes], - }, - pluginId: "openapi", - identityLabel: `${displayName} OAuth`, - }, - }); - if (response.authorizationUrl === null) { - throw new Error("Unexpected OAuth response"); - } - return { - sessionId: response.sessionId, - authorizationUrl: response.authorizationUrl, - }; - }, - onSuccess: async (result) => { - await doSetBinding({ - params: { scopeId: displayScope }, - payload: { - sourceId: props.sourceId, - sourceScope, - scope: targetScope, - slot: oauth2.connectionSlot, - value: { - kind: "connection", - connectionId: ConnectionId.make(result.connectionId), - }, - }, - reactivityKeys: [...sourceWriteKeys, ...connectionWriteKeys], - }); + if (Exit.isFailure(bindingExit)) { + setError(messageFromExit(bindingExit, "Failed to connect OAuth")); setPendingOAuthConnection(null); setBusyKey(null); - }, - onError: (message) => { - setError(message); - setPendingOAuthConnection(null); - setBusyKey(null); - }, - }); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to connect OAuth"); - setPendingOAuthConnection(null); - setBusyKey(null); - } + return; + } + setPendingOAuthConnection(null); + setBusyKey(null); + }, + onError: (message) => { + setError(message); + setPendingOAuthConnection(null); + setBusyKey(null); + }, + }); }; return ( @@ -590,10 +622,10 @@ export default function EditOpenApiSource(props: { ({ - value: entry.scopeId as string, + value: entry.scopeId, label: entry.label, }))} - value={activeCredentialScopeId as string} + value={activeCredentialScopeId} onChange={setSelectedCredentialScope} /> @@ -618,9 +650,9 @@ export default function EditOpenApiSource(props: { isSecretBindingValue(effective.value); const currentSecretId = exact && isSecretBindingValue(exact.value) - ? (exact.value.secretId as string) + ? exact.value.secretId : inherited && effective && isSecretBindingValue(effective.value) - ? (effective.value.secretId as string) + ? effective.value.secretId : null; return ( (null); function useSecretForm(): SecretFormContextValue { const ctx = use(SecretFormContext); if (!ctx) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: React context invariant surfaces programmer misuse during render throw new Error("SecretForm parts must be rendered inside "); } return ctx; @@ -110,7 +109,7 @@ function SecretFormProvider(props: SecretFormProviderProps) { const defaultScope = useScope(); const scopeId = scopeIdProp ?? defaultScope; - const doSet = useAtomSet(setSecret, { mode: "promise" }); + const doSet = useAtomSet(setSecret, { mode: "promiseExit" }); const [state, setState] = useState(() => ({ name: "", @@ -142,27 +141,27 @@ function SecretFormProvider(props: SecretFormProviderProps) { const submit = async () => { if (!canSubmit) return; setState((s) => ({ ...s, status: { kind: "submitting" } })); - try { - await doSet({ - params: { scopeId }, - payload: { - id: SecretId.make(id.trim()), - name: displayName || id.trim(), - value: state.value.trim(), - provider: state.provider === "auto" ? undefined : state.provider, - }, - reactivityKeys: secretWriteKeys, - }); - onCreated(id.trim()); - } catch (e) { + const exit = await doSet({ + params: { scopeId }, + payload: { + id: SecretId.make(id.trim()), + name: displayName || id.trim(), + value: state.value.trim(), + provider: state.provider === "auto" ? undefined : state.provider, + }, + reactivityKeys: secretWriteKeys, + }); + if (Exit.isFailure(exit)) { setState((s) => ({ ...s, status: { kind: "error", - message: e instanceof Error ? e.message : "Failed to save secret", + message: "Failed to save secret", }, })); + return; } + onCreated(id.trim()); }; const value: SecretFormContextValue = { @@ -255,7 +254,9 @@ function ValueField(props: { revealable?: boolean; placeholder?: string }) { )} - {errored && {state.status.kind === "error" ? state.status.message : ""}} + {errored && ( + {state.status.kind === "error" ? state.status.message : ""} + )} ); } From 70d01de442ea56406357e6dbc9b7ca49c4341bc2 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:19:48 -0700 Subject: [PATCH 066/108] Fix core OAuth typed boundaries --- packages/core/sdk/src/oauth-discovery.ts | 272 +++++----- packages/core/sdk/src/oauth-service.ts | 658 +++++++++-------------- 2 files changed, 388 insertions(+), 542 deletions(-) diff --git a/packages/core/sdk/src/oauth-discovery.ts b/packages/core/sdk/src/oauth-discovery.ts index cd33e9b87..4a9eaadfa 100644 --- a/packages/core/sdk/src/oauth-discovery.ts +++ b/packages/core/sdk/src/oauth-discovery.ts @@ -18,7 +18,7 @@ // callers actually need. // --------------------------------------------------------------------------- -import { Data, Effect, Result, Schema } from "effect"; +import { Data, Effect, Option, Predicate, Result, Schema } from "effect"; import * as oauth from "oauth4webapi"; import { @@ -37,30 +37,23 @@ import { * token-endpoint failures. A plugin's refresh path should never have * to inspect error messages to tell "metadata drifted, re-discover" * apart from "refresh token is no longer honoured". */ -export class OAuthDiscoveryError extends Data.TaggedError( - "OAuthDiscoveryError", -)<{ +export class OAuthDiscoveryError extends Data.TaggedError("OAuthDiscoveryError")<{ readonly message: string; readonly status?: number; readonly cause?: unknown; }> {} -const discoveryError = ( - message: string, - options: { status?: number; cause?: unknown } = {}, -): OAuthDiscoveryError => - new OAuthDiscoveryError({ - message, - status: options.status, - cause: options.cause, - }); - // --------------------------------------------------------------------------- // Schemas (narrow structural parsing — the RFCs leave many fields // optional; we validate only the subset consumers read) // --------------------------------------------------------------------------- const StringArray = Schema.Array(Schema.String); +const JsonValueSchema = Schema.fromJsonString(Schema.Unknown); +const DcrErrorBodySchema = Schema.Struct({ + error: Schema.NonEmptyString, + error_description: Schema.optional(Schema.String), +}); export const OAuthProtectedResourceMetadataSchema = Schema.Struct({ resource: Schema.optional(Schema.String), @@ -69,8 +62,7 @@ export const OAuthProtectedResourceMetadataSchema = Schema.Struct({ bearer_methods_supported: Schema.optional(StringArray), resource_documentation: Schema.optional(Schema.String), }).annotate({ identifier: "OAuthProtectedResourceMetadata" }); -export type OAuthProtectedResourceMetadata = - typeof OAuthProtectedResourceMetadataSchema.Type; +export type OAuthProtectedResourceMetadata = typeof OAuthProtectedResourceMetadataSchema.Type; export const OAuthAuthorizationServerMetadataSchema = Schema.Struct({ issuer: Schema.String, @@ -87,8 +79,7 @@ export const OAuthAuthorizationServerMetadataSchema = Schema.Struct({ userinfo_endpoint: Schema.optional(Schema.String), id_token_signing_alg_values_supported: Schema.optional(StringArray), }).annotate({ identifier: "OAuthAuthorizationServerMetadata" }); -export type OAuthAuthorizationServerMetadata = - typeof OAuthAuthorizationServerMetadataSchema.Type; +export type OAuthAuthorizationServerMetadata = typeof OAuthAuthorizationServerMetadataSchema.Type; export type DynamicClientMetadata = { readonly client_name?: string; @@ -127,15 +118,9 @@ export const OAuthClientInformationSchema = Schema.Struct({ }).annotate({ identifier: "OAuthClientInformation" }); export type OAuthClientInformation = typeof OAuthClientInformationSchema.Type; -const decodeResourceMetadata = Schema.decodeUnknownEffect( - OAuthProtectedResourceMetadataSchema, -); -const decodeAuthServerMetadata = Schema.decodeUnknownEffect( - OAuthAuthorizationServerMetadataSchema, -); -const decodeClientInformation = Schema.decodeUnknownEffect( - OAuthClientInformationSchema, -); +const decodeResourceMetadata = Schema.decodeUnknownEffect(OAuthProtectedResourceMetadataSchema); +const decodeAuthServerMetadata = Schema.decodeUnknownEffect(OAuthAuthorizationServerMetadataSchema); +const decodeClientInformation = Schema.decodeUnknownEffect(OAuthClientInformationSchema); export interface DiscoveryRequestOptions { /** Injected for tests. Defaults to the global `fetch`. */ @@ -154,23 +139,28 @@ export interface DiscoveryRequestOptions { const MCP_PROTOCOL_VERSION_HEADER = "mcp-protocol-version"; -const isLoopbackHttpUrl = (value: string): boolean => { +const parseUrlOption = (value: string): URL | null => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform URL parser try { - const url = new URL(value); - if (url.protocol !== "http:") return false; - const hostname = url.hostname.toLowerCase(); - return ( - hostname === "localhost" || - hostname === "0.0.0.0" || - hostname === "::1" || - hostname === "[::1]" || - hostname.startsWith("127.") - ); + return new URL(value); } catch { - return false; + return null; } }; +const isLoopbackHttpUrl = (value: string): boolean => { + const parsed = parseUrlOption(value); + if (!parsed || parsed.protocol !== "http:") return false; + const hostname = parsed.hostname.toLowerCase(); + return ( + hostname === "localhost" || + hostname === "0.0.0.0" || + hostname === "::1" || + hostname === "[::1]" || + hostname.startsWith("127.") + ); +}; + const oauth4webapiOptions = ( options: DiscoveryRequestOptions, targetUrl?: string, @@ -178,9 +168,7 @@ const oauth4webapiOptions = ( const out: Record = {}; if (options.fetch) (out as { [customFetch]?: typeof fetch })[customFetch] = options.fetch; if (targetUrl && isLoopbackHttpUrl(targetUrl)) { - (out as { [oauth.allowInsecureRequests]?: boolean })[ - oauth.allowInsecureRequests - ] = true; + (out as { [oauth.allowInsecureRequests]?: boolean })[oauth.allowInsecureRequests] = true; } const signal = AbortSignal.timeout(options.timeoutMs ?? OAUTH2_DEFAULT_TIMEOUT_MS); out.signal = signal; @@ -231,8 +219,7 @@ export const discoverProtectedResourceMetadata = ( resourceUrl: string, options: DiscoveryRequestOptions = {}, ): Effect.Effect< - | { readonly metadataUrl: string; readonly metadata: OAuthProtectedResourceMetadata } - | null, + { readonly metadataUrl: string; readonly metadata: OAuthProtectedResourceMetadata } | null, OAuthDiscoveryError > => Effect.gen(function* () { @@ -260,30 +247,35 @@ export const discoverProtectedResourceMetadata = ( } const text = await response.text(); if (text.length === 0) return "skip" as const; - return { status: response.status, body: JSON.parse(text) } as const; + return { status: response.status, body: text } as const; }, catch: (cause) => - discoveryError( - `Failed to fetch ${url}: ${cause instanceof Error ? cause.message : String(cause)}`, - { cause }, - ), + new OAuthDiscoveryError({ + message: `Failed to fetch ${url}`, + cause, + }), }); if (result === "skip") continue; if (!("body" in result)) { - return yield* Effect.fail( - discoveryError( - `Protected resource metadata returned status ${result.status}`, - { status: result.status }, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Protected resource metadata returned status ${result.status}`, + status: result.status, + }); } - const metadata = yield* decodeResourceMetadata(result.body).pipe( + const parsedBody = yield* Schema.decodeUnknownEffect(JsonValueSchema)(result.body).pipe( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Protected resource metadata is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Protected resource metadata is malformed: invalid JSON", + cause: err, + }), + ), + ); + const metadata = yield* decodeResourceMetadata(parsedBody).pipe( + Effect.mapError( + (err) => + new OAuthDiscoveryError({ + message: "Protected resource metadata is malformed: invalid shape", cause: err, }), ), @@ -308,9 +300,7 @@ const wellKnownUrlFor = ( ): string => { // Mirrors the library's own well-known composition so the URL we // surface matches what was actually fetched. - const suffix = algorithm === "oauth2" - ? "oauth-authorization-server" - : "openid-configuration"; + const suffix = algorithm === "oauth2" ? "oauth-authorization-server" : "openid-configuration"; return issuerPath && issuerPath !== "/" ? `${issuerOrigin}/.well-known/${suffix}${issuerPath}` : `${issuerOrigin}/.well-known/${suffix}`; @@ -320,11 +310,10 @@ export const discoverAuthorizationServerMetadata = ( issuer: string, options: DiscoveryRequestOptions = {}, ): Effect.Effect< - | { - readonly metadataUrl: string; - readonly metadata: OAuthAuthorizationServerMetadata; - } - | null, + { + readonly metadataUrl: string; + readonly metadata: OAuthAuthorizationServerMetadata; + } | null, OAuthDiscoveryError > => Effect.gen(function* () { @@ -349,13 +338,11 @@ export const discoverAuthorizationServerMetadata = ( }; }, catch: (cause) => { - if (cause instanceof OAuthDiscoveryError) return cause; - return discoveryError( - `Discovery (${algorithm}) failed for ${issuer}: ${ - cause instanceof Error ? cause.message : String(cause) - }`, - { cause }, - ); + if (Predicate.isTagged("OAuthDiscoveryError")(cause)) return cause; + return new OAuthDiscoveryError({ + message: `Discovery (${algorithm}) failed for ${issuer}`, + cause, + }); }, }).pipe( // If one algorithm fails mid-roundtrip (network, parse, issuer @@ -370,9 +357,7 @@ export const discoverAuthorizationServerMetadata = ( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Authorization server metadata is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Authorization server metadata is malformed: invalid shape", cause: err, }), ), @@ -431,29 +416,19 @@ const buildDcrBody = (m: DynamicClientMetadata): Record => { return body; }; -const interpretDcrFailure = ( - status: number, - text: string, -): DcrErrorBody | DcrTransport => { +const interpretDcrFailure = (status: number, text: string): DcrErrorBody | DcrTransport => { // RFC 6749 error envelope: `{error, error_description?}` with 4xx. if (status >= 400 && status < 500) { - const parsed = Result.try({ - try: () => (text ? (JSON.parse(text) as unknown) : null), - catch: () => null, - }); - const body = Result.isSuccess(parsed) ? parsed.success : null; - if ( - body && - typeof body === "object" && - "error" in body && - typeof body.error === "string" && - body.error.length > 0 - ) { - const desc = - "error_description" in body && typeof body.error_description === "string" - ? body.error_description - : undefined; - return new DcrErrorBody({ status, error: body.error, error_description: desc }); + const parsedJson = Schema.decodeUnknownOption(JsonValueSchema)(text); + const parsed = Option.isSome(parsedJson) + ? Schema.decodeUnknownOption(DcrErrorBodySchema)(parsedJson.value) + : Option.none(); + if (Option.isSome(parsed)) { + return new DcrErrorBody({ + status, + error: parsed.value.error, + error_description: parsed.value.error_description, + }); } } return new DcrTransport({ @@ -468,10 +443,7 @@ export const registerDynamicClient = ( ): Effect.Effect => Effect.gen(function* () { const url = new URL(input.registrationEndpoint); - if ( - url.protocol !== "https:" && - !isLoopbackHttpUrl(input.registrationEndpoint) - ) { + if (url.protocol !== "https:" && !isLoopbackHttpUrl(input.registrationEndpoint)) { return yield* new DcrTransport({ message: `registration_endpoint must be HTTPS or a loopback HTTP URL (got ${url.protocol}//${url.host})`, }); @@ -497,7 +469,7 @@ export const registerDynamicClient = ( }), catch: (cause) => new DcrTransport({ - message: `Dynamic Client Registration request failed: ${cause instanceof Error ? cause.message : String(cause)}`, + message: "Dynamic Client Registration request failed", cause, }), }); @@ -505,9 +477,15 @@ export const registerDynamicClient = ( // Accept both 200 and 201 as success — RFC 7591 mandates 201, but // Todoist (and others) return 200 OK with the client information body. if (response.status !== 200 && response.status !== 201) { - const text = yield* Effect.promise(() => - response.text().catch(() => ""), - ); + const text = yield* Effect.tryPromise({ + try: () => response.text(), + catch: (cause) => + new DcrTransport({ + message: "Dynamic Client Registration error response could not be read", + status: response.status, + cause, + }), + }).pipe(Effect.catchTag("DcrTransport", () => Effect.succeed(""))); return yield* interpretDcrFailure(response.status, text); } @@ -520,22 +498,21 @@ export const registerDynamicClient = ( cause, }), }); - const json = yield* Effect.try({ - try: () => JSON.parse(text) as unknown, - catch: (cause) => - new DcrTransport({ - message: "Dynamic Client Registration response was not valid JSON", - status: response.status, - cause, - }), - }); + const json = yield* Schema.decodeUnknownEffect(JsonValueSchema)(text).pipe( + Effect.mapError( + (cause) => + new DcrTransport({ + message: "Dynamic Client Registration response was not valid JSON", + status: response.status, + cause, + }), + ), + ); return yield* decodeClientInformation(json).pipe( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Dynamic Client Registration response is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Dynamic Client Registration response is malformed: invalid shape", cause: err, }), ), @@ -544,16 +521,18 @@ export const registerDynamicClient = ( Effect.catchTags({ DcrErrorBody: (err) => Effect.fail( - discoveryError( - `Dynamic Client Registration failed: ${err.error}${ + new OAuthDiscoveryError({ + message: `Dynamic Client Registration failed: ${err.error}${ err.error_description ? ` — ${err.error_description}` : "" }`, - { status: err.status, cause: err }, - ), + status: err.status, + cause: err, + }), ), DcrTransport: (err) => Effect.fail( - discoveryError(`Dynamic Client Registration failed: ${err.message}`, { + new OAuthDiscoveryError({ + message: "Dynamic Client Registration failed", status: err.status, cause: err.cause ?? err, }), @@ -630,11 +609,10 @@ export const beginDynamicAuthorization = ( const authorizationServerUrl = (() => { if (prior.authorizationServerUrl) return prior.authorizationServerUrl; - const fromResource = - resource && resource.metadata.authorization_servers?.[0]; + const fromResource = resource && resource.metadata.authorization_servers?.[0]; if (fromResource) return fromResource; - const u = new URL(input.endpoint); - return `${u.protocol}//${u.host}`; + const u = parseUrlOption(input.endpoint); + return u ? `${u.protocol}//${u.host}` : input.endpoint; })(); const authServer = @@ -643,35 +621,26 @@ export const beginDynamicAuthorization = ( metadata: prior.authorizationServerMetadata, metadataUrl: prior.authorizationServerMetadataUrl, } - : yield* discoverAuthorizationServerMetadata( - authorizationServerUrl, - options, - ); + : yield* discoverAuthorizationServerMetadata(authorizationServerUrl, options); if (!authServer) { - return yield* Effect.fail( - discoveryError( - `No OAuth authorization server metadata at ${authorizationServerUrl}`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `No OAuth authorization server metadata at ${authorizationServerUrl}`, + }); } const pkceMethods = authServer.metadata.code_challenge_methods_supported ?? []; if (pkceMethods.length > 0 && !pkceMethods.includes("S256")) { - return yield* Effect.fail( - discoveryError( - `Authorization server does not support PKCE S256 (advertised: ${pkceMethods.join(", ")})`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Authorization server does not support PKCE S256 (advertised: ${pkceMethods.join(", ")})`, + }); } const responseTypes = authServer.metadata.response_types_supported ?? []; if (responseTypes.length > 0 && !responseTypes.includes("code")) { - return yield* Effect.fail( - discoveryError( - `Authorization server does not support response_type=code (advertised: ${responseTypes.join(", ")})`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Authorization server does not support response_type=code (advertised: ${responseTypes.join(", ")})`, + }); } const baseClientMetadata: DynamicClientMetadata = { @@ -689,9 +658,10 @@ export const beginDynamicAuthorization = ( const reg = authServer.metadata.registration_endpoint; if (!reg) { return Effect.fail( - discoveryError( - "Authorization server does not advertise registration_endpoint — cannot auto-register a client", - ), + new OAuthDiscoveryError({ + message: + "Authorization server does not advertise registration_endpoint — cannot auto-register a client", + }), ); } return registerDynamicClient( @@ -701,9 +671,7 @@ export const beginDynamicAuthorization = ( })()); const codeVerifier = createPkceCodeVerifier(); - const codeChallenge = yield* Effect.promise(() => - createPkceCodeChallenge(codeVerifier), - ); + const codeChallenge = yield* Effect.promise(() => createPkceCodeChallenge(codeVerifier)); const scopes = input.scopes ?? authServer.metadata.scopes_supported ?? []; const authorizationUrl = buildAuthorizationUrl({ diff --git a/packages/core/sdk/src/oauth-service.ts b/packages/core/sdk/src/oauth-service.ts index 79cc1e058..1cc65ad81 100644 --- a/packages/core/sdk/src/oauth-service.ts +++ b/packages/core/sdk/src/oauth-service.ts @@ -35,13 +35,9 @@ // every strategy because refresh semantics are strategy-independent. // --------------------------------------------------------------------------- -import { Effect, Schema } from "effect"; +import { Effect, Option, Predicate, Schema } from "effect"; -import type { - DBAdapter, - StorageFailure, - TypedAdapter, -} from "@executor-js/storage-core"; +import type { DBAdapter, StorageFailure, TypedAdapter } from "@executor-js/storage-core"; import { ConnectionRefreshError, @@ -52,9 +48,7 @@ import { type ConnectionRefreshResult, type ConnectionRef, } from "./connections"; -import type { - ConnectionProviderNotRegisteredError, -} from "./errors"; +import type { ConnectionProviderNotRegisteredError } from "./errors"; import type { CoreSchema } from "./core-schema"; import { ConnectionId, ScopeId, SecretId } from "./ids"; import { SetSecretInput, type SecretRef } from "./secrets"; @@ -100,6 +94,8 @@ import { const OAuthAuthorizationServerMetadataJson = Schema.Record(Schema.String, Schema.Unknown); const OAuthClientInformationJson = Schema.Record(Schema.String, Schema.Unknown); +const UnknownRecord = Schema.Record(Schema.String, Schema.Unknown); +const JsonValueSchema = Schema.fromJsonString(Schema.Unknown); const DynamicDcrSessionPayload = Schema.Struct({ kind: Schema.Literal("dynamic-dcr"), @@ -110,9 +106,7 @@ const DynamicDcrSessionPayload = Schema.Struct({ authorizationServerMetadata: OAuthAuthorizationServerMetadataJson, clientInformation: OAuthClientInformationJson, resourceMetadataUrl: Schema.NullOr(Schema.String), - resourceMetadata: Schema.NullOr( - Schema.Record(Schema.String, Schema.Unknown), - ), + resourceMetadata: Schema.NullOr(Schema.Record(Schema.String, Schema.Unknown)), scopes: Schema.Array(Schema.String), }); @@ -122,7 +116,9 @@ const AuthorizationCodeSessionPayload = Schema.Struct({ codeVerifier: Schema.String, authorizationEndpoint: Schema.String, tokenEndpoint: Schema.String, - issuerUrl: Schema.NullOr(Schema.String).pipe(Schema.withDecodingDefaultType(Effect.succeed(null))), + issuerUrl: Schema.NullOr(Schema.String).pipe( + Schema.withDecodingDefaultType(Effect.succeed(null)), + ), clientIdSecretId: Schema.String, clientSecretSecretId: Schema.NullOr(Schema.String), scopes: Schema.Array(Schema.String), @@ -144,22 +140,22 @@ const encodeSessionPayload = Schema.encodeSync(OAuthSessionPayload); const coerceJson = (value: unknown): unknown => { if (typeof value !== "string") return value; - try { - return JSON.parse(value); - } catch { - return value; - } + const parsed = Schema.decodeUnknownOption(JsonValueSchema)(value); + return Option.isSome(parsed) ? parsed.value : value; }; const stringArray = (value: unknown): readonly string[] => - Array.isArray(value) - ? value.filter((scope): scope is string => typeof scope === "string") - : []; + Array.isArray(value) ? value.filter((scope): scope is string => typeof scope === "string") : []; const originOrNull = (value: unknown): string | null => { if (typeof value !== "string") return null; + return parseUrlOption(value)?.origin ?? null; +}; + +const parseUrlOption = (value: string): URL | null => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform URL parser try { - return new URL(value).origin; + return new URL(value); } catch { return null; } @@ -167,8 +163,8 @@ const originOrNull = (value: unknown): string | null => { const decodeProviderState = (value: unknown): OAuthProviderState => { const raw = coerceJson(value); - const record = - raw && typeof raw === "object" ? (raw as Record) : null; + const decodedRecord = Schema.decodeUnknownOption(UnknownRecord)(raw); + const record = Option.isSome(decodedRecord) ? decodedRecord.value : null; if (record && !("kind" in record) && "flow" in record && "tokenUrl" in record) { const flow = record.flow; @@ -196,12 +192,7 @@ const decodeProviderState = (value: unknown): OAuthProviderState => { } } - if ( - record && - !("kind" in record) && - "clientIdSecretId" in record && - "scopes" in record - ) { + if (record && !("kind" in record) && "clientIdSecretId" in record && "scopes" in record) { const scopes = stringArray(record.scopes); return Schema.decodeUnknownSync(OAuthProviderStateSchema)({ kind: "authorization-code", @@ -214,48 +205,38 @@ const decodeProviderState = (value: unknown): OAuthProviderState => { }); } - if ( - record && - !("kind" in record) && - "clientInformation" in record && - "endpoint" in record - ) { - const clientInformation = - record.clientInformation && typeof record.clientInformation === "object" - ? (record.clientInformation as Record) - : null; + if (record && !("kind" in record) && "clientInformation" in record && "endpoint" in record) { + const decodedClientInformation = Schema.decodeUnknownOption(UnknownRecord)( + record.clientInformation, + ); + const clientInformation = Option.isSome(decodedClientInformation) + ? decodedClientInformation.value + : null; + const decodedAuthorizationServerMetadata = Schema.decodeUnknownOption(UnknownRecord)( + record.authorizationServerMetadata, + ); + const authorizationServerMetadata = Option.isSome(decodedAuthorizationServerMetadata) + ? decodedAuthorizationServerMetadata.value + : null; return Schema.decodeUnknownSync(OAuthProviderStateSchema)({ kind: "dynamic-dcr", tokenEndpoint: typeof record.tokenEndpoint === "string" ? record.tokenEndpoint - : record.authorizationServerMetadata && - typeof record.authorizationServerMetadata === "object" && - typeof (record.authorizationServerMetadata as Record) - .token_endpoint === "string" - ? ((record.authorizationServerMetadata as Record) - .token_endpoint as string) + : typeof authorizationServerMetadata?.token_endpoint === "string" + ? authorizationServerMetadata.token_endpoint : "", issuerUrl: - record.authorizationServerMetadata && - typeof record.authorizationServerMetadata === "object" && - typeof (record.authorizationServerMetadata as Record).issuer === - "string" - ? ((record.authorizationServerMetadata as Record) - .issuer as string) + typeof authorizationServerMetadata?.issuer === "string" + ? authorizationServerMetadata.issuer : null, authorizationServerUrl: - typeof record.authorizationServerUrl === "string" - ? record.authorizationServerUrl - : null, + typeof record.authorizationServerUrl === "string" ? record.authorizationServerUrl : null, authorizationServerMetadataUrl: typeof record.authorizationServerMetadataUrl === "string" ? record.authorizationServerMetadataUrl : null, - clientId: - typeof clientInformation?.client_id === "string" - ? clientInformation.client_id - : "", + clientId: typeof clientInformation?.client_id === "string" ? clientInformation.client_id : "", clientSecretSecretId: null, clientAuth: "body", scope: null, @@ -288,10 +269,7 @@ export interface OAuthServiceDeps { * `complete` (and from `start` for `client-credentials`). */ readonly connectionsCreate: ( input: CreateConnectionInput, - ) => Effect.Effect< - ConnectionRef, - ConnectionProviderNotRegisteredError | StorageFailure - >; + ) => Effect.Effect; /** Random session id generator. Tests override to make outputs * deterministic. */ readonly newSessionId?: () => string; @@ -304,9 +282,9 @@ const defaultSessionId = (): string => { if (crypto?.randomUUID) return `oauth2_session_${crypto.randomUUID()}`; const bytes = new Uint8Array(16); crypto.getRandomValues(bytes); - return `oauth2_session_${Array.from(bytes, (byte) => - byte.toString(16).padStart(2, "0"), - ).join("")}`; + return `oauth2_session_${Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join( + "", + )}`; }; const secretIdPart = (value: string): string => @@ -328,11 +306,7 @@ const oauthSecretId = ( const scopedSessionId = (scopeId: string, sessionId: string): string => `${sessionId}_${secretIdPart(scopeId).slice(0, 24)}`; -const terminalRefreshErrors = new Set([ - "invalid_grant", - "invalid_client", - "unauthorized_client", -]); +const terminalRefreshErrors = new Set(["invalid_grant", "invalid_client", "unauthorized_client"]); // --------------------------------------------------------------------------- // Service factory @@ -347,19 +321,16 @@ export const makeOAuth2Service = ( // ------------------------------------------------------------------- // probe // ------------------------------------------------------------------- - const probe = ( - input: OAuthProbeInput, - ): Effect.Effect => + const probe = (input: OAuthProbeInput): Effect.Effect => Effect.gen(function* () { - const resource = yield* discoverProtectedResourceMetadata( - input.endpoint, - { resourceHeaders: input.headers, resourceQueryParams: input.queryParams }, - ).pipe( - Effect.catchTag("OAuthDiscoveryError", (err) => + const resource = yield* discoverProtectedResourceMetadata(input.endpoint, { + resourceHeaders: input.headers, + resourceQueryParams: input.queryParams, + }).pipe( + Effect.catchTag("OAuthDiscoveryError", () => Effect.fail( new OAuthProbeError({ - message: `Protected resource metadata probe failed: ${err.message}`, - + message: "Protected resource metadata probe failed", }), ), ), @@ -368,29 +339,19 @@ export const makeOAuth2Service = ( const authorizationServerUrl = (() => { const fromResource = resource?.metadata.authorization_servers?.[0]; if (fromResource) return fromResource; - try { - const u = new URL(input.endpoint); - return `${u.protocol}//${u.host}`; - } catch { - return null; - } + const u = parseUrlOption(input.endpoint); + return u ? `${u.protocol}//${u.host}` : null; })(); const authServer = authorizationServerUrl - ? yield* discoverAuthorizationServerMetadata( - authorizationServerUrl, - ).pipe( - Effect.catchTag("OAuthDiscoveryError", () => - Effect.succeed(null), - ), + ? yield* discoverAuthorizationServerMetadata(authorizationServerUrl).pipe( + Effect.catchTag("OAuthDiscoveryError", () => Effect.succeed(null)), ) : null; const supportsDynamicRegistration = !!( authServer?.metadata.registration_endpoint && - (authServer.metadata.token_endpoint_auth_methods_supported ?? []).includes( - "none", - ) + (authServer.metadata.token_endpoint_auth_methods_supported ?? []).includes("none") ); // Bearer challenge probe — POST the endpoint unauth, look for @@ -400,47 +361,40 @@ export const makeOAuth2Service = ( // challenge"). const isBearerChallengeEndpoint = yield* Effect.tryPromise({ try: async (): Promise => { - const controller = new AbortController(); - const timer = setTimeout(() => controller.abort(), 6_000); - try { - const probeUrl = new URL(input.endpoint); - for (const [key, value] of Object.entries(input.queryParams ?? {})) { - probeUrl.searchParams.set(key, value); - } - const response = await fetch(probeUrl.toString(), { - method: "POST", - headers: { - ...(input.headers ?? {}), - "content-type": "application/json", - accept: "application/json, text/event-stream", - }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: 1, - method: "initialize", - params: { - protocolVersion: "2025-06-18", - capabilities: {}, - clientInfo: { name: "executor-probe", version: "0" }, - }, - }), - signal: controller.signal, - }); - if (response.status !== 401) return false; - const wwwAuth = - response.headers.get("www-authenticate") ?? - response.headers.get("WWW-Authenticate"); - return !!wwwAuth && /^\s*bearer\b/i.test(wwwAuth); - } finally { - clearTimeout(timer); + const probeUrl = parseUrlOption(input.endpoint); + if (!probeUrl) return false; + for (const [key, value] of Object.entries(input.queryParams ?? {})) { + probeUrl.searchParams.set(key, value); } + const response = await fetch(probeUrl.toString(), { + method: "POST", + headers: { + ...(input.headers ?? {}), + "content-type": "application/json", + accept: "application/json, text/event-stream", + }, + body: JSON.stringify({ + jsonrpc: "2.0", + id: 1, + method: "initialize", + params: { + protocolVersion: "2025-06-18", + capabilities: {}, + clientInfo: { name: "executor-probe", version: "0" }, + }, + }), + signal: AbortSignal.timeout(6_000), + }); + if (response.status !== 401) return false; + const wwwAuth = + response.headers.get("www-authenticate") ?? response.headers.get("WWW-Authenticate"); + return !!wwwAuth && /^\s*bearer\b/i.test(wwwAuth); }, catch: () => null, }).pipe(Effect.catch(() => Effect.succeed(false))); return { - resourceMetadata: - (resource?.metadata as Record | undefined) ?? null, + resourceMetadata: (resource?.metadata as Record | undefined) ?? null, resourceMetadataUrl: resource?.metadataUrl ?? null, authorizationServerMetadata: (authServer?.metadata as Record | undefined) ?? null, @@ -459,20 +413,22 @@ export const makeOAuth2Service = ( strategy: OAuthDynamicDcrStrategy, ): Effect.Effect => Effect.gen(function* () { - const started = yield* beginDynamicAuthorization({ - endpoint: input.endpoint, - redirectUrl: input.redirectUrl, - state: "", - scopes: strategy.scopes, - }, { - resourceHeaders: input.headers, - resourceQueryParams: input.queryParams, - }).pipe( - Effect.catchTag("OAuthDiscoveryError", (err) => + const started = yield* beginDynamicAuthorization( + { + endpoint: input.endpoint, + redirectUrl: input.redirectUrl, + state: "", + scopes: strategy.scopes, + }, + { + resourceHeaders: input.headers, + resourceQueryParams: input.queryParams, + }, + ).pipe( + Effect.catchTag("OAuthDiscoveryError", () => Effect.fail( new OAuthStartError({ - message: `Dynamic authorization setup failed: ${err.message}`, - + message: "Dynamic authorization setup failed", }), ), ), @@ -492,10 +448,7 @@ export const makeOAuth2Service = ( authorizationUrl: started.state.authorizationServerMetadata.authorization_endpoint, clientId: started.state.clientInformation.client_id, redirectUrl: input.redirectUrl, - scopes: - strategy.scopes ?? - started.state.authorizationServerMetadata.scopes_supported ?? - [], + scopes: strategy.scopes ?? started.state.authorizationServerMetadata.scopes_supported ?? [], state: sessionId, codeChallenge, }); @@ -505,22 +458,20 @@ export const makeOAuth2Service = ( identityLabel: input.identityLabel ?? null, codeVerifier: started.codeVerifier, authorizationServerUrl: started.state.authorizationServerUrl, - authorizationServerMetadataUrl: - started.state.authorizationServerMetadataUrl, - authorizationServerMetadata: - started.state.authorizationServerMetadata as Record, + authorizationServerMetadataUrl: started.state.authorizationServerMetadataUrl, + authorizationServerMetadata: started.state.authorizationServerMetadata as Record< + string, + unknown + >, clientInformation: (() => { const value: unknown = started.state.clientInformation; return value as Record; })(), resourceMetadataUrl: started.state.resourceMetadataUrl, resourceMetadata: - (started.state.resourceMetadata as Record | null) ?? - null, + (started.state.resourceMetadata as Record | null) ?? null, scopes: [ - ...(strategy.scopes ?? - started.state.authorizationServerMetadata.scopes_supported ?? - []), + ...(strategy.scopes ?? started.state.authorizationServerMetadata.scopes_supported ?? []), ], }; @@ -544,25 +495,22 @@ export const makeOAuth2Service = ( ): Effect.Effect => Effect.gen(function* () { const clientId = yield* deps.secretsGet(strategy.clientIdSecretId).pipe( - Effect.mapError((err) => - // Storage failure propagates; null returns aren't errors — the - // branch below handles them. - err, + Effect.mapError( + (err) => + // Storage failure propagates; null returns aren't errors — the + // branch below handles them. + err, ), ); if (clientId === null) { - return yield* Effect.fail( - new OAuthStartError({ - message: `client_id secret "${strategy.clientIdSecretId}" not found`, - }), - ); + return yield* new OAuthStartError({ + message: `client_id secret "${strategy.clientIdSecretId}" not found`, + }); } const sessionId = scopedSessionId(input.tokenScope, newSessionId()); const codeVerifier = createPkceCodeVerifier(); - const codeChallenge = yield* Effect.promise(() => - createPkceCodeChallenge(codeVerifier), - ); + const codeChallenge = yield* Effect.promise(() => createPkceCodeChallenge(codeVerifier)); const authorizationUrl = buildAuthorizationUrl({ authorizationUrl: strategy.authorizationEndpoint, @@ -611,11 +559,9 @@ export const makeOAuth2Service = ( const clientId = yield* deps.secretsGet(strategy.clientIdSecretId); const clientSecret = yield* deps.secretsGet(strategy.clientSecretSecretId); if (clientId === null || clientSecret === null) { - return yield* Effect.fail( - new OAuthStartError({ - message: "client_id / client_secret secret not found", - }), - ); + return yield* new OAuthStartError({ + message: "client_id / client_secret secret not found", + }); } const tokens = yield* exchangeClientCredentials({ @@ -627,18 +573,15 @@ export const makeOAuth2Service = ( clientAuth: strategy.clientAuth ?? "body", }).pipe( Effect.mapError( - (err) => + () => new OAuthStartError({ - message: `Client credentials exchange failed: ${err.message}`, - + message: "Client credentials exchange failed", }), ), ); const expiresAt = - typeof tokens.expires_in === "number" - ? now() + tokens.expires_in * 1000 - : null; + typeof tokens.expires_in === "number" ? now() + tokens.expires_in * 1000 : null; const providerState: OAuthProviderState = { kind: "client-credentials", @@ -666,19 +609,17 @@ export const makeOAuth2Service = ( refreshToken: null, expiresAt, oauthScope: tokens.scope ?? null, - providerState: Schema.encodeSync(OAuthProviderStateSchema)( - providerState, - ) as Record, + providerState: Schema.encodeSync(OAuthProviderStateSchema)(providerState) as Record< + string, + unknown + >, }), ) .pipe( Effect.mapError( - (err) => + () => new OAuthStartError({ - message: `Failed to mint connection: ${ - err instanceof Error ? err.message : String(err) - }`, - + message: "Failed to mint connection", }), ), ); @@ -709,22 +650,24 @@ export const makeOAuth2Service = ( payload: OAuthSessionPayload; strategyKind: string; }): Effect.Effect => - deps.adapter.create({ - model: "oauth2_session", - data: { - id: args.sessionId, - scope_id: args.input.tokenScope, - plugin_id: args.input.pluginId, - strategy: args.strategyKind, - connection_id: args.input.connectionId, - token_scope: args.input.tokenScope, - redirect_url: args.input.redirectUrl, - payload: encodeSessionPayload(args.payload) as Record, - expires_at: now() + OAUTH2_SESSION_TTL_MS, - created_at: new Date(), - }, - forceAllowId: true, - }).pipe(Effect.asVoid); + deps.adapter + .create({ + model: "oauth2_session", + data: { + id: args.sessionId, + scope_id: args.input.tokenScope, + plugin_id: args.input.pluginId, + strategy: args.strategyKind, + connection_id: args.input.connectionId, + token_scope: args.input.tokenScope, + redirect_url: args.input.redirectUrl, + payload: encodeSessionPayload(args.payload) as Record, + expires_at: now() + OAUTH2_SESSION_TTL_MS, + created_at: new Date(), + }, + forceAllowId: true, + }) + .pipe(Effect.asVoid); // ------------------------------------------------------------------- // complete — exchange the code, mint the Connection, delete the session @@ -741,53 +684,45 @@ export const makeOAuth2Service = ( where: [{ field: "id", value: input.state }], }); if (!row) { - return yield* Effect.fail( - new OAuthSessionNotFoundError({ sessionId: input.state }), - ); + return yield* new OAuthSessionNotFoundError({ sessionId: input.state }); } const deleteSession = deps.adapter.delete({ model: "oauth2_session", where: [ { field: "id", value: input.state }, - { field: "scope_id", value: row.scope_id as string }, + { field: "scope_id", value: row.scope_id }, ], }); if (input.error) { yield* deleteSession; - return yield* Effect.fail( - new OAuthCompleteError({ - message: `Authorization server returned error: ${input.error}`, - code: input.error, - }), - ); + return yield* new OAuthCompleteError({ + message: `Authorization server returned error: ${input.error}`, + code: input.error, + }); } if (!input.code) { yield* deleteSession; - return yield* Effect.fail( - new OAuthCompleteError({ - message: "Missing authorization code", - }), - ); + return yield* new OAuthCompleteError({ + message: "Missing authorization code", + }); } const expiresAt = Number(row.expires_at as number | bigint); if (expiresAt <= now()) { yield* deleteSession; - return yield* Effect.fail( - new OAuthCompleteError({ - message: "OAuth session expired", - }), - ); + return yield* new OAuthCompleteError({ + message: "OAuth session expired", + }); } const payload = decodeSessionPayload(coerceJson(row.payload)); const endpoint = ""; // not stored on the row — the payload's own - // endpoint fields drive exchange; we just need - // a display string for the identity label. - const connectionId = row.connection_id as string; - const tokenScope = row.token_scope as string; - const redirectUrl = row.redirect_url as string; + // endpoint fields drive exchange; we just need + // a display string for the identity label. + const connectionId = row.connection_id; + const tokenScope = row.token_scope; + const redirectUrl = row.redirect_url; // Dispatch to the strategy-specific exchange. const exchangeResult = yield* (() => { @@ -795,11 +730,7 @@ export const makeOAuth2Service = ( case "dynamic-dcr": return exchangeDynamicDcr(payload, input.code, redirectUrl); case "authorization-code": - return exchangeAuthorizationCodeStrategy( - payload, - input.code, - redirectUrl, - ); + return exchangeAuthorizationCodeStrategy(payload, input.code, redirectUrl); } })().pipe(Effect.tapError(() => deleteSession)); @@ -828,11 +759,9 @@ export const makeOAuth2Service = ( .pipe( Effect.as(secretId), Effect.mapError( - (err) => + () => new OAuthCompleteError({ - message: `Failed to persist DCR client_secret: ${ - err instanceof Error ? err.message : String(err) - }`, + message: "Failed to persist DCR client_secret", }), ), ); @@ -842,21 +771,21 @@ export const makeOAuth2Service = ( payload.kind === "dynamic-dcr" ? { kind: "dynamic-dcr", - tokenEndpoint: (payload.authorizationServerMetadata as { - token_endpoint: string; - }).token_endpoint, + tokenEndpoint: ( + payload.authorizationServerMetadata as { + token_endpoint: string; + } + ).token_endpoint, issuerUrl: - (payload.authorizationServerMetadata as { issuer?: string }).issuer ?? - null, + (payload.authorizationServerMetadata as { issuer?: string }).issuer ?? null, authorizationServerUrl: payload.authorizationServerUrl, - authorizationServerMetadataUrl: - payload.authorizationServerMetadataUrl, - idTokenSigningAlgValuesSupported: - (payload.authorizationServerMetadata as { + authorizationServerMetadataUrl: payload.authorizationServerMetadataUrl, + idTokenSigningAlgValuesSupported: ( + payload.authorizationServerMetadata as { id_token_signing_alg_values_supported?: string[]; - }).id_token_signing_alg_values_supported, - clientId: (payload.clientInformation as { client_id: string }) - .client_id, + } + ).id_token_signing_alg_values_supported, + clientId: (payload.clientInformation as { client_id: string }).client_id, clientSecretSecretId: dynamicClientSecretSecretId, clientAuth: (payload.clientInformation as { token_endpoint_auth_method?: string }) @@ -901,19 +830,17 @@ export const makeOAuth2Service = ( : null, expiresAt: connectionExpiresAt, oauthScope: exchangeResult.tokens.scope ?? null, - providerState: Schema.encodeSync(OAuthProviderStateSchema)( - providerState, - ) as Record, + providerState: Schema.encodeSync(OAuthProviderStateSchema)(providerState) as Record< + string, + unknown + >, }), ) .pipe( Effect.mapError( - (err) => + () => new OAuthCompleteError({ - message: `Failed to mint connection: ${ - err instanceof Error ? err.message : String(err) - }`, - + message: "Failed to mint connection", }), ), ); @@ -962,19 +889,14 @@ export const makeOAuth2Service = ( redirectUrl, codeVerifier: payload.codeVerifier, code, - idTokenSigningAlgValuesSupported: - md.id_token_signing_alg_values_supported, - clientAuth: - ci.token_endpoint_auth_method === "client_secret_basic" - ? "basic" - : "body", + idTokenSigningAlgValuesSupported: md.id_token_signing_alg_values_supported, + clientAuth: ci.token_endpoint_auth_method === "client_secret_basic" ? "basic" : "body", }).pipe( Effect.mapError( (err) => new OAuthCompleteError({ - message: `Token exchange failed: ${err.message}`, + message: "Token exchange failed", code: err.error, - }), ), ); @@ -988,28 +910,21 @@ export const makeOAuth2Service = ( payload: Extract, code: string, redirectUrl: string, - ): Effect.Effect< - ExchangeResult, - OAuthCompleteError | StorageFailure - > => + ): Effect.Effect => Effect.gen(function* () { const clientId = yield* deps.secretsGet(payload.clientIdSecretId); if (clientId === null) { - return yield* Effect.fail( - new OAuthCompleteError({ - message: `client_id secret "${payload.clientIdSecretId}" not found`, - }), - ); + return yield* new OAuthCompleteError({ + message: `client_id secret "${payload.clientIdSecretId}" not found`, + }); } const clientSecret = payload.clientSecretSecretId ? yield* deps.secretsGet(payload.clientSecretSecretId) : null; if (payload.clientSecretSecretId && clientSecret === null) { - return yield* Effect.fail( - new OAuthCompleteError({ - message: `client_secret secret "${payload.clientSecretSecretId}" not found`, - }), - ); + return yield* new OAuthCompleteError({ + message: `client_secret secret "${payload.clientSecretSecretId}" not found`, + }); } const tokens = yield* exchangeAuthorizationCode({ @@ -1025,9 +940,8 @@ export const makeOAuth2Service = ( Effect.mapError( (err) => new OAuthCompleteError({ - message: `Token exchange failed: ${err.message}`, + message: "Token exchange failed", code: err.error, - }), ), ); @@ -1048,7 +962,7 @@ export const makeOAuth2Service = ( model: "oauth2_session", where: [ { field: "id", value: sessionId }, - { field: "scope_id", value: row.scope_id as string }, + { field: "scope_id", value: row.scope_id }, ], }); }); @@ -1071,9 +985,7 @@ export const makeOAuth2Service = ( catch: (cause) => new ConnectionRefreshError({ connectionId: input.connectionId, - message: `oauth2 providerState is malformed: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: "oauth2 providerState is malformed", cause, }), }); @@ -1095,22 +1007,16 @@ export const makeOAuth2Service = ( case "dynamic-dcr": return Effect.gen(function* () { const csec = state.clientSecretSecretId - ? yield* deps - .secretsGet(state.clientSecretSecretId) - .pipe( - Effect.mapError( - (cause) => - new ConnectionRefreshError({ - connectionId: input.connectionId, - message: `Failed to resolve DCR client_secret: ${ - cause instanceof Error - ? cause.message - : String(cause) - }`, - cause, - }), - ), - ) + ? yield* deps.secretsGet(state.clientSecretSecretId).pipe( + Effect.mapError( + (cause) => + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: "Failed to resolve DCR client_secret", + cause, + }), + ), + ) : null; if (state.clientSecretSecretId && csec === null) { return yield* new ConnectionRefreshError({ @@ -1124,22 +1030,16 @@ export const makeOAuth2Service = ( case "authorization-code": case "client-credentials": return Effect.gen(function* () { - const cid = yield* deps - .secretsGet(state.clientIdSecretId) - .pipe( - Effect.mapError( - (cause) => - new ConnectionRefreshError({ - connectionId: input.connectionId, - message: `Failed to resolve client_id secret: ${ - cause instanceof Error - ? cause.message - : String(cause) - }`, - cause, - }), - ), - ); + const cid = yield* deps.secretsGet(state.clientIdSecretId).pipe( + Effect.mapError( + (cause) => + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: "Failed to resolve client_id secret", + cause, + }), + ), + ); if (cid === null) { return yield* new ConnectionRefreshError({ connectionId: input.connectionId, @@ -1148,22 +1048,16 @@ export const makeOAuth2Service = ( }); } const csec = state.clientSecretSecretId - ? yield* deps - .secretsGet(state.clientSecretSecretId) - .pipe( - Effect.mapError( - (cause) => - new ConnectionRefreshError({ - connectionId: input.connectionId, - message: `Failed to resolve client_secret: ${ - cause instanceof Error - ? cause.message - : String(cause) - }`, - cause, - }), - ), - ) + ? yield* deps.secretsGet(state.clientSecretSecretId).pipe( + Effect.mapError( + (cause) => + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: "Failed to resolve client_secret", + cause, + }), + ), + ) : null; if (state.clientSecretSecretId && csec === null) { return yield* new ConnectionRefreshError({ @@ -1179,32 +1073,25 @@ export const makeOAuth2Service = ( const tokenEndpoint = yield* (() => { if (state.tokenEndpoint) return Effect.succeed(state.tokenEndpoint); - if ( - state.kind === "dynamic-dcr" && - state.authorizationServerUrl - ) { - return discoverAuthorizationServerMetadata( - state.authorizationServerUrl, - ).pipe( + if (state.kind === "dynamic-dcr" && state.authorizationServerUrl) { + return discoverAuthorizationServerMetadata(state.authorizationServerUrl).pipe( Effect.flatMap((metadata) => metadata?.metadata.token_endpoint ? Effect.succeed(metadata.metadata.token_endpoint) : Effect.fail( new ConnectionRefreshError({ connectionId: input.connectionId, - message: - "oauth2 legacy MCP providerState is missing token endpoint", + message: "oauth2 legacy MCP providerState is missing token endpoint", reauthRequired: true, }), ), ), Effect.mapError((cause) => - cause instanceof ConnectionRefreshError + Predicate.isTagged("ConnectionRefreshError")(cause) ? cause : new ConnectionRefreshError({ connectionId: input.connectionId, - message: - "Failed to discover token endpoint for legacy MCP OAuth connection", + message: "Failed to discover token endpoint for legacy MCP OAuth connection", reauthRequired: true, cause, }), @@ -1220,56 +1107,51 @@ export const makeOAuth2Service = ( ); })(); - const tokens = yield* (state.kind === "client-credentials" - ? exchangeClientCredentials({ - tokenUrl: tokenEndpoint, - clientId, - clientSecret: clientSecret ?? "", - scopes: state.scopes, - scopeSeparator: state.scopeSeparator, - clientAuth: state.clientAuth, - }) - : refreshAccessToken({ - tokenUrl: tokenEndpoint, - issuerUrl: - state.kind === "dynamic-dcr" || state.kind === "authorization-code" - ? (state.issuerUrl ?? undefined) - : undefined, - clientId, - clientSecret: clientSecret ?? undefined, - refreshToken: input.refreshToken!, - scopes: - state.kind === "dynamic-dcr" || state.kind === "authorization-code" - ? state.scopes - : undefined, - scopeSeparator: - state.kind === "dynamic-dcr" || state.kind === "authorization-code" - ? state.scopeSeparator - : undefined, - clientAuth: state.clientAuth, - idTokenSigningAlgValuesSupported: - state.kind === "dynamic-dcr" - ? state.idTokenSigningAlgValuesSupported - : undefined, - })).pipe( + const tokens = yield* ( + state.kind === "client-credentials" + ? exchangeClientCredentials({ + tokenUrl: tokenEndpoint, + clientId, + clientSecret: clientSecret ?? "", + scopes: state.scopes, + scopeSeparator: state.scopeSeparator, + clientAuth: state.clientAuth, + }) + : refreshAccessToken({ + tokenUrl: tokenEndpoint, + issuerUrl: + state.kind === "dynamic-dcr" || state.kind === "authorization-code" + ? (state.issuerUrl ?? undefined) + : undefined, + clientId, + clientSecret: clientSecret ?? undefined, + refreshToken: input.refreshToken!, + scopes: + state.kind === "dynamic-dcr" || state.kind === "authorization-code" + ? state.scopes + : undefined, + scopeSeparator: + state.kind === "dynamic-dcr" || state.kind === "authorization-code" + ? state.scopeSeparator + : undefined, + clientAuth: state.clientAuth, + idTokenSigningAlgValuesSupported: + state.kind === "dynamic-dcr" ? state.idTokenSigningAlgValuesSupported : undefined, + }) + ).pipe( Effect.mapError( (err) => new ConnectionRefreshError({ connectionId: input.connectionId, - message: `OAuth refresh failed: ${err.message}`, + message: "OAuth refresh failed", // Terminal RFC 6749 §5.2 errors mean retrying won't heal it. - reauthRequired: err.error - ? terminalRefreshErrors.has(err.error) - : false, - + reauthRequired: err.error ? terminalRefreshErrors.has(err.error) : false, }), ), ); const expiresAt = - typeof tokens.expires_in === "number" - ? now() + tokens.expires_in * 1000 - : null; + typeof tokens.expires_in === "number" ? now() + tokens.expires_in * 1000 : null; const result: ConnectionRefreshResult = { accessToken: tokens.access_token, @@ -1293,9 +1175,5 @@ export const makeOAuth2Service = ( const safeHostname = (value: string | null): string | null => { if (!value) return null; - try { - return new URL(value).host; - } catch { - return value; - } + return parseUrlOption(value)?.host ?? value; }; From 23439e6fe2c4d8dff267e543c2b8c49812e7b218 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:19:53 -0700 Subject: [PATCH 067/108] Use promiseExit in OAuth popup flow --- packages/react/src/plugins/oauth-sign-in.tsx | 146 ++++++++++--------- 1 file changed, 80 insertions(+), 66 deletions(-) diff --git a/packages/react/src/plugins/oauth-sign-in.tsx b/packages/react/src/plugins/oauth-sign-in.tsx index c931df54d..9f50ebd5a 100644 --- a/packages/react/src/plugins/oauth-sign-in.tsx +++ b/packages/react/src/plugins/oauth-sign-in.tsx @@ -1,5 +1,7 @@ import { useCallback, useEffect, useRef, useState } from "react"; import { useAtomSet } from "@effect/atom-react"; +import * as Effect from "effect/Effect"; +import * as Exit from "effect/Exit"; import { cancelOAuth, startOAuth } from "../api/atoms"; import { openOAuthPopup, type OAuthPopupResult } from "../api/oauth-popup"; @@ -80,8 +82,8 @@ export function useOAuthPopupFlow< startErrorMessage, } = options; const scopeId = useScope(); - const doStartOAuth = useAtomSet(startOAuth, { mode: "promise" }); - const doCancelOAuth = useAtomSet(cancelOAuth, { mode: "promise" }); + const doStartOAuth = useAtomSet(startOAuth, { mode: "promiseExit" }); + const doCancelOAuth = useAtomSet(cancelOAuth, { mode: "promiseExit" }); const [busy, setBusy] = useState(false); const [error, setError] = useState(null); const cleanupRef = useRef<(() => void) | null>(null); @@ -92,7 +94,7 @@ export function useOAuthPopupFlow< void doCancelOAuth({ params: { scopeId }, payload: { sessionId }, - }).catch(() => undefined); + }); }, [doCancelOAuth, scopeId], ); @@ -122,73 +124,81 @@ export function useOAuthPopupFlow< cancel(); setBusy(true); setError(null); - try { - const response = await input.run(); - if (response.authorizationUrl === null) { - const message = - noAuthorizationUrlMessage ?? "OAuth start did not produce an authorization URL"; - setBusy(false); - setError(message); - input.onError?.(message); - return; - } - - sessionRef.current = response.sessionId; - input.onAuthorizationStarted?.(response); - cleanupRef.current = openOAuthPopup({ - url: response.authorizationUrl, - popupName, - channelName: OAUTH_POPUP_MESSAGE_TYPE, - expectedSessionId: response.sessionId, - onResult: async (result: OAuthPopupResult) => { - cleanupRef.current = null; - sessionRef.current = null; + const startExit = await Effect.runPromiseExit( + Effect.tryPromise({ + try: input.run, + catch: (cause) => cause, + }), + ); + if (Exit.isFailure(startExit)) { + const message = startErrorMessage ?? "Failed to start sign-in"; + setBusy(false); + setError(message); + input.onError?.(message); + return; + } + const response = startExit.value; + if (response.authorizationUrl === null) { + const message = + noAuthorizationUrlMessage ?? "OAuth start did not produce an authorization URL"; + setBusy(false); + setError(message); + input.onError?.(message); + return; + } - if (!result.ok) { - setBusy(false); - setError(result.error); - input.onError?.(result.error); - return; - } + sessionRef.current = response.sessionId; + input.onAuthorizationStarted?.(response); + cleanupRef.current = openOAuthPopup({ + url: response.authorizationUrl, + popupName, + channelName: OAUTH_POPUP_MESSAGE_TYPE, + expectedSessionId: response.sessionId, + onResult: async (result: OAuthPopupResult) => { + cleanupRef.current = null; + sessionRef.current = null; - try { - await input.onSuccess(result); - setBusy(false); - } catch (e) { - const message = e instanceof Error ? e.message : "Failed to persist new connection"; - setBusy(false); - setError(message); - input.onError?.(message); - } - }, - onClosed: () => { - cleanupRef.current = null; - sessionRef.current = null; - cancelSession(response.sessionId); - const message = - popupClosedMessage ?? - "Sign-in cancelled - popup was closed before completing the flow."; + if (!result.ok) { setBusy(false); - setError(message); - input.onError?.(message); - }, - onOpenFailed: () => { - cleanupRef.current = null; - sessionRef.current = null; - cancelSession(response.sessionId); - const message = popupBlockedMessage ?? "Sign-in popup was blocked by the browser"; + setError(result.error); + input.onError?.(result.error); + return; + } + + const persisted = await Promise.resolve(input.onSuccess(result)).then( + () => true, + () => false, + ); + if (!persisted) { + const message = "Failed to persist new connection"; setBusy(false); setError(message); input.onError?.(message); - }, - }); - } catch (e) { - const message = - e instanceof Error ? e.message : (startErrorMessage ?? "Failed to start sign-in"); - setBusy(false); - setError(message); - input.onError?.(message); - } + return; + } + setBusy(false); + }, + onClosed: () => { + cleanupRef.current = null; + sessionRef.current = null; + cancelSession(response.sessionId); + const message = + popupClosedMessage ?? + "Sign-in cancelled - popup was closed before completing the flow."; + setBusy(false); + setError(message); + input.onError?.(message); + }, + onOpenFailed: () => { + cleanupRef.current = null; + sessionRef.current = null; + cancelSession(response.sessionId); + const message = popupBlockedMessage ?? "Sign-in popup was blocked by the browser"; + setBusy(false); + setError(message); + input.onError?.(message); + }, + }); }, [ cancel, @@ -214,10 +224,14 @@ export function useOAuthPopupFlow< ...input.payload, redirectUrl: input.payload.redirectUrl ?? oauthCallbackUrl(callbackPath), }, - }), + }).then((exit) => + Exit.isSuccess(exit) + ? exit.value + : Effect.runPromise(Effect.fail(startErrorMessage ?? "Failed to start sign-in")), + ), }); }, - [callbackPath, doStartOAuth, openAuthorization, scopeId], + [callbackPath, doStartOAuth, openAuthorization, scopeId, startErrorMessage], ); return { From 89bc9c9459f8aef358554cef3ed35a1861fe982a Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:19:58 -0700 Subject: [PATCH 068/108] Fix storage test boundary lint --- .../storage-core/src/testing/conformance.ts | 258 ++++++++---------- .../core/storage-core/src/testing/memory.ts | 59 ++-- 2 files changed, 137 insertions(+), 180 deletions(-) diff --git a/packages/core/storage-core/src/testing/conformance.ts b/packages/core/storage-core/src/testing/conformance.ts index 589b26119..6a7b80da9 100644 --- a/packages/core/storage-core/src/testing/conformance.ts +++ b/packages/core/storage-core/src/testing/conformance.ts @@ -13,7 +13,7 @@ import { describe, it } from "@effect/vitest"; import { expect } from "@effect/vitest"; -import { Effect, Result } from "effect"; +import { Data, Effect, Result } from "effect"; import type { DBAdapter } from "../adapter"; import type { DBSchema } from "../schema"; @@ -75,14 +75,13 @@ export type WithAdapter = ( fn: (adapter: DBAdapter) => Effect.Effect, ) => Effect.Effect; +class TransactionRollbackTestError extends Data.TaggedError("TransactionRollbackTestError")<{}> {} + // --------------------------------------------------------------------------- // Suite // --------------------------------------------------------------------------- -export const runAdapterConformance = ( - name: string, - withAdapter: WithAdapter, -): void => { +export const runAdapterConformance = (name: string, withAdapter: WithAdapter): void => { describe(`conformance: ${name}`, () => { const withDefaultsInput = ( value: unknown, @@ -136,9 +135,7 @@ export const runAdapterConformance = ( expect(found!.id).toBe(created.id); expect(found!.enabled).toBe(true); expect(found!.createdAt instanceof Date).toBe(true); - expect(found!.createdAt.toISOString()).toBe( - "2026-04-15T00:00:00.000Z", - ); + expect(found!.createdAt.toISOString()).toBe("2026-04-15T00:00:00.000Z"); expect(found!.metadata).toEqual({ slug: "gh", tags: ["a", "b"] }); }), ), @@ -161,7 +158,7 @@ export const runAdapterConformance = ( }); const found = yield* adapter.findOne<{ createdAt: Date }>({ model: "source", - where: [{ field: "id", value: row.id as string }], + where: [{ field: "id", value: row.id }], }); expect(found!.createdAt.toISOString()).toBe(d.toISOString()); }), @@ -381,10 +378,7 @@ export const runAdapterConformance = ( model: "source", where: [{ field: "priority", value: 5, operator: "gte" }], }); - expect(highPriority.map((r) => r.name).sort()).toEqual([ - "github-edge", - "gitlab", - ]); + expect(highPriority.map((r) => r.name).sort()).toEqual(["github-edge", "gitlab"]); }), ), ); @@ -516,9 +510,7 @@ export const runAdapterConformance = ( update: { touchedAt: explicitDate }, }); expect(updated).not.toBeNull(); - expect(updated!.touchedAt.toISOString()).toBe( - explicitDate.toISOString(), - ); + expect(updated!.touchedAt.toISOString()).toBe(explicitDate.toISOString()); // Sanity: omitting touchedAt should trigger onUpdate. const hookDriven = yield* adapter.update<{ id: string; @@ -530,9 +522,7 @@ export const runAdapterConformance = ( update: { name: "rename" }, }); expect(hookDriven).not.toBeNull(); - expect(hookDriven!.touchedAt.toISOString()).toBe( - "2099-01-01T00:00:00.000Z", - ); + expect(hookDriven!.touchedAt.toISOString()).toBe("2099-01-01T00:00:00.000Z"); }), ), ); @@ -546,7 +536,7 @@ export const runAdapterConformance = ( Effect.gen(function* () { yield* trx.create({ model: "tag", data: { label: "tx1" } }); yield* trx.create({ model: "tag", data: { label: "tx2" } }); - return yield* Effect.fail(new Error("boom")); + return yield* new TransactionRollbackTestError(); }), ) .pipe(Effect.result); @@ -571,128 +561,120 @@ export const runAdapterConformance = ( ), ); - it.effect( - "where: mixed AND/OR grouping follows upstream split-group semantics", - () => - // Locks in better-auth drizzle adapter's `convertWhereClause` - // semantics: AND-connector clauses and OR-connector clauses split - // into two groups, recombined as `(AND…) AND (OR…)`. For - // [{priority=1, AND}, {priority=10, OR}, {enabled=true, AND}], - // that's `(priority=1 AND enabled=true) AND (priority=10)` which - // can never match a single row — while a left-to-right fold - // would give `((priority=1 OR priority=10) AND enabled=true)` - // and return two rows. We assert the upstream reading. - withAdapter((adapter) => - Effect.gen(function* () { - yield* adapter.createMany({ - model: "source", - data: [ - { name: "lhs", priority: 1, enabled: true }, - { name: "rhs", priority: 10, enabled: true }, - { name: "off", priority: 1, enabled: false }, - ], - }); - const rows = yield* adapter.findMany<{ name: string }>({ - model: "source", - where: [ - { field: "priority", value: 1, connector: "AND" }, - { field: "priority", value: 10, connector: "OR" }, - { field: "enabled", value: true, connector: "AND" }, - ], - }); - // Upstream split-group: (priority=1 AND enabled=true) AND - // (priority=10). `lhs` has priority=1 (fails the OR group's - // priority=10 check) and `rhs` has priority=10 (fails the - // AND group's priority=1 check) — both reject. - expect(rows.map((r) => r.name)).toEqual([]); - - // Sanity: a pure disjunction still works. - const both = yield* adapter.findMany<{ name: string }>({ - model: "source", - where: [ - { field: "priority", value: 1, connector: "OR" }, - { field: "priority", value: 10, connector: "OR" }, - ], - sortBy: { field: "name", direction: "asc" }, - }); - expect(both.map((r) => r.name)).toEqual(["lhs", "off", "rhs"]); - }), - ), + it.effect("where: mixed AND/OR grouping follows upstream split-group semantics", () => + // Locks in better-auth drizzle adapter's `convertWhereClause` + // semantics: AND-connector clauses and OR-connector clauses split + // into two groups, recombined as `(AND…) AND (OR…)`. For + // [{priority=1, AND}, {priority=10, OR}, {enabled=true, AND}], + // that's `(priority=1 AND enabled=true) AND (priority=10)` which + // can never match a single row — while a left-to-right fold + // would give `((priority=1 OR priority=10) AND enabled=true)` + // and return two rows. We assert the upstream reading. + withAdapter((adapter) => + Effect.gen(function* () { + yield* adapter.createMany({ + model: "source", + data: [ + { name: "lhs", priority: 1, enabled: true }, + { name: "rhs", priority: 10, enabled: true }, + { name: "off", priority: 1, enabled: false }, + ], + }); + const rows = yield* adapter.findMany<{ name: string }>({ + model: "source", + where: [ + { field: "priority", value: 1, connector: "AND" }, + { field: "priority", value: 10, connector: "OR" }, + { field: "enabled", value: true, connector: "AND" }, + ], + }); + // Upstream split-group: (priority=1 AND enabled=true) AND + // (priority=10). `lhs` has priority=1 (fails the OR group's + // priority=10 check) and `rhs` has priority=10 (fails the + // AND group's priority=1 check) — both reject. + expect(rows.map((r) => r.name)).toEqual([]); + + // Sanity: a pure disjunction still works. + const both = yield* adapter.findMany<{ name: string }>({ + model: "source", + where: [ + { field: "priority", value: 1, connector: "OR" }, + { field: "priority", value: 10, connector: "OR" }, + ], + sortBy: { field: "name", direction: "asc" }, + }); + expect(both.map((r) => r.name)).toEqual(["lhs", "off", "rhs"]); + }), + ), ); - it.effect( - "findMany resolves join: source → source_tag (one-to-many)", - () => - withAdapter((adapter) => - Effect.gen(function* () { - const src = yield* adapter.create<{ id: string; name: string }>({ - model: "source", - data: { name: "joined-source" }, - }); - yield* adapter.createMany({ - model: "source_tag", - data: [ - { sourceId: src.id, note: "first" }, - { sourceId: src.id, note: "second" }, - ], - }); - - const many = yield* adapter.findMany<{ - id: string; - name: string; - source_tag: ReadonlyArray<{ note: string; sourceId: string }>; - }>({ - model: "source", - where: [{ field: "id", value: src.id }], - join: { source_tag: true }, - }); - expect(many).toHaveLength(1); - const parent = many[0]!; - expect(parent.name).toBe("joined-source"); - expect(Array.isArray(parent.source_tag)).toBe(true); - expect(parent.source_tag).toHaveLength(2); - expect( - parent.source_tag.map((t) => t.note).sort(), - ).toEqual(["first", "second"]); - }), - ), + it.effect("findMany resolves join: source → source_tag (one-to-many)", () => + withAdapter((adapter) => + Effect.gen(function* () { + const src = yield* adapter.create<{ id: string; name: string }>({ + model: "source", + data: { name: "joined-source" }, + }); + yield* adapter.createMany({ + model: "source_tag", + data: [ + { sourceId: src.id, note: "first" }, + { sourceId: src.id, note: "second" }, + ], + }); + + const many = yield* adapter.findMany<{ + id: string; + name: string; + source_tag: ReadonlyArray<{ note: string; sourceId: string }>; + }>({ + model: "source", + where: [{ field: "id", value: src.id }], + join: { source_tag: true }, + }); + expect(many).toHaveLength(1); + const parent = many[0]!; + expect(parent.name).toBe("joined-source"); + expect(Array.isArray(parent.source_tag)).toBe(true); + expect(parent.source_tag).toHaveLength(2); + expect(parent.source_tag.map((t) => t.note).sort()).toEqual(["first", "second"]); + }), + ), ); - it.effect( - "findOne resolves join: source_tag → source (one-to-one)", - () => - withAdapter((adapter) => - Effect.gen(function* () { - const src = yield* adapter.create<{ id: string; name: string }>({ - model: "source", - data: { name: "owner" }, - }); - const child = yield* adapter.create<{ - id: string; - sourceId: string; - note: string; - }>({ - model: "source_tag", - data: { sourceId: src.id, note: "only" }, - }); - - const found = yield* adapter.findOne<{ - id: string; - note: string; - sourceId: string; - source: { id: string; name: string } | null; - }>({ - model: "source_tag", - where: [{ field: "id", value: child.id }], - join: { source: true }, - }); - expect(found).not.toBeNull(); - expect(found!.note).toBe("only"); - expect(found!.source).not.toBeNull(); - expect(found!.source!.id).toBe(src.id); - expect(found!.source!.name).toBe("owner"); - }), - ), + it.effect("findOne resolves join: source_tag → source (one-to-one)", () => + withAdapter((adapter) => + Effect.gen(function* () { + const src = yield* adapter.create<{ id: string; name: string }>({ + model: "source", + data: { name: "owner" }, + }); + const child = yield* adapter.create<{ + id: string; + sourceId: string; + note: string; + }>({ + model: "source_tag", + data: { sourceId: src.id, note: "only" }, + }); + + const found = yield* adapter.findOne<{ + id: string; + note: string; + sourceId: string; + source: { id: string; name: string } | null; + }>({ + model: "source_tag", + where: [{ field: "id", value: child.id }], + join: { source: true }, + }); + expect(found).not.toBeNull(); + expect(found!.note).toBe("only"); + expect(found!.source).not.toBeNull(); + expect(found!.source!.id).toBe(src.id); + expect(found!.source!.name).toBe("owner"); + }), + ), ); it.effect("nested writes inside a transaction see the tx state", () => diff --git a/packages/core/storage-core/src/testing/memory.ts b/packages/core/storage-core/src/testing/memory.ts index 9d81d6278..c3458a57d 100644 --- a/packages/core/storage-core/src/testing/memory.ts +++ b/packages/core/storage-core/src/testing/memory.ts @@ -31,11 +31,7 @@ type Row = Record; type Store = Record; type Comparable = string | number | boolean | Date; -const compare = ( - a: unknown, - b: unknown, - op: "gt" | "gte" | "lt" | "lte", -): boolean => { +const compare = (a: unknown, b: unknown, op: "gt" | "gte" | "lt" | "lte"): boolean => { if ( !( typeof a === "string" || @@ -43,12 +39,7 @@ const compare = ( typeof a === "boolean" || a instanceof Date ) || - !( - typeof b === "string" || - typeof b === "number" || - typeof b === "boolean" || - b instanceof Date - ) + !(typeof b === "string" || typeof b === "number" || typeof b === "boolean" || b instanceof Date) ) { return false; } @@ -74,27 +65,25 @@ const evalClause = (record: Row, clause: CleanedWhere): boolean => { const isInsensitive = mode === "insensitive" && (typeof value === "string" || - (Array.isArray(value) && - (value as unknown[]).every((v) => typeof v === "string"))); + (Array.isArray(value) && (value as unknown[]).every((v) => typeof v === "string"))); const lhs = record[field]; - const lowerStr = (v: unknown) => - typeof v === "string" ? v.toLowerCase() : v; + const lowerStr = (v: unknown) => (typeof v === "string" ? v.toLowerCase() : v); const cmp = (a: unknown, b: unknown): boolean => isInsensitive ? lowerStr(a) === lowerStr(b) : a === b; switch (operator) { case "in": + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: sync test adapter predicate preserves invalid where-clause failure semantics if (!Array.isArray(value)) throw new Error("Value must be an array"); return (value as unknown[]).some((v) => cmp(lhs, v)); case "not_in": + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: sync test adapter predicate preserves invalid where-clause failure semantics if (!Array.isArray(value)) throw new Error("Value must be an array"); return !(value as unknown[]).some((v) => cmp(lhs, v)); case "contains": { if (typeof lhs !== "string" || typeof value !== "string") return false; - return isInsensitive - ? lhs.toLowerCase().includes(value.toLowerCase()) - : lhs.includes(value); + return isInsensitive ? lhs.toLowerCase().includes(value.toLowerCase()) : lhs.includes(value); } case "starts_with": { if (typeof lhs !== "string" || typeof value !== "string") return false; @@ -104,9 +93,7 @@ const evalClause = (record: Row, clause: CleanedWhere): boolean => { } case "ends_with": { if (typeof lhs !== "string" || typeof value !== "string") return false; - return isInsensitive - ? lhs.toLowerCase().endsWith(value.toLowerCase()) - : lhs.endsWith(value); + return isInsensitive ? lhs.toLowerCase().endsWith(value.toLowerCase()) : lhs.endsWith(value); } case "ne": return !cmp(lhs, value); @@ -135,14 +122,10 @@ const evalClause = (record: Row, clause: CleanedWhere): boolean => { const matchAll = (record: Row, where: readonly CleanedWhere[]): boolean => { if (where.length === 0) return true; if (where.length === 1) return evalClause(record, where[0]!); - const andGroup = where.filter( - (w) => w.connector === "AND" || !w.connector, - ); + const andGroup = where.filter((w) => w.connector === "AND" || !w.connector); const orGroup = where.filter((w) => w.connector === "OR"); - const andResult = - andGroup.length === 0 ? true : andGroup.every((w) => evalClause(record, w)); - const orResult = - orGroup.length === 0 ? true : orGroup.some((w) => evalClause(record, w)); + const andResult = andGroup.length === 0 ? true : andGroup.every((w) => evalClause(record, w)); + const orResult = orGroup.length === 0 ? true : orGroup.some((w) => evalClause(record, w)); return andResult && orResult; }; @@ -167,9 +150,7 @@ export interface MakeMemoryAdapterOptions { readonly generateId?: () => string; } -export const makeMemoryAdapter = ( - options: MakeMemoryAdapterOptions, -): DBAdapter => { +export const makeMemoryAdapter = (options: MakeMemoryAdapterOptions): DBAdapter => { let store: Store = {}; const tableFor = (model: string): Row[] => { @@ -186,9 +167,7 @@ export const makeMemoryAdapter = ( const out: Row = { ...base }; for (const [target, cfg] of Object.entries(join)) { const targetRows = tableFor(target); - const matches = targetRows.filter( - (r) => r[cfg.on.to] === base[cfg.on.from], - ); + const matches = targetRows.filter((r) => r[cfg.on.to] === base[cfg.on.from]); if (cfg.relation === "one-to-one") { out[target] = matches[0] ?? null; } else { @@ -328,11 +307,9 @@ export const makeMemoryAdapter = ( // Snapshot-based transaction: clone on entry, restore on failure. const txFn: DBAdapterFactoryConfig["transaction"] = ( - cb: (trx: Parameters[0] extends ( - t: infer T, - ) => unknown - ? T - : never) => Effect.Effect, + cb: ( + trx: Parameters[0] extends (t: infer T) => unknown ? T : never, + ) => Effect.Effect, ) => Effect.gen(function* () { const snapshot = cloneStore(store); @@ -353,9 +330,7 @@ export const makeMemoryAdapter = ( supportsDates: true, supportsBooleans: true, supportsArrays: true, - customIdGenerator: options.generateId - ? () => options.generateId!() - : undefined, + customIdGenerator: options.generateId ? () => options.generateId!() : undefined, transaction: txFn, }, adapter: custom, From 4256b958608bb1bb9ad4164279a86b67ef0bcf55 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:20:55 -0700 Subject: [PATCH 069/108] Fix OpenAPI plugin typed boundaries --- packages/plugins/openapi/src/sdk/parse.ts | 52 ++++----- packages/plugins/openapi/src/sdk/plugin.ts | 116 ++++++++++----------- 2 files changed, 80 insertions(+), 88 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/parse.ts b/packages/plugins/openapi/src/sdk/parse.ts index 50247a6a7..d98512cc5 100644 --- a/packages/plugins/openapi/src/sdk/parse.ts +++ b/packages/plugins/openapi/src/sdk/parse.ts @@ -40,9 +40,9 @@ export const fetchSpecText = Effect.fn("OpenApi.fetchSpecText")(function* ( const response = yield* client.execute(request).pipe( Effect.timeout(Duration.seconds(20)), Effect.mapError( - (cause) => + (_cause) => new OpenApiParseError({ - message: `Failed to fetch OpenAPI document: ${cause instanceof Error ? cause.message : String(cause)}`, + message: "Failed to fetch OpenAPI document", }), ), ); @@ -53,9 +53,9 @@ export const fetchSpecText = Effect.fn("OpenApi.fetchSpecText")(function* ( } return yield* response.text.pipe( Effect.mapError( - (cause) => + (_cause) => new OpenApiParseError({ - message: `Failed to read OpenAPI document body: ${cause instanceof Error ? cause.message : String(cause)}`, + message: "Failed to read OpenAPI document body", }), ), ); @@ -79,13 +79,7 @@ export const resolveSpecText = (input: string, credentials?: SpecFetchCredential * the 128MB Cloudflare Workers memory cap. */ export const parse = Effect.fn("OpenApi.parse")(function* (text: string) { - const api = yield* Effect.try({ - try: () => parseTextToObject(text), - catch: (error) => - new OpenApiParseError({ - message: `Failed to parse OpenAPI document: ${error instanceof Error ? error.message : String(error)}`, - }), - }); + const api = yield* parseTextToObject(text); if (!isOpenApi3(api)) { return yield* new OpenApiExtractionErrorFromParse({ @@ -104,20 +98,28 @@ export const parse = Effect.fn("OpenApi.parse")(function* (text: string) { const isOpenApi3 = (doc: OpenAPI.Document): doc is OpenAPIV3.Document | OpenAPIV3_1.Document => "openapi" in doc && typeof doc.openapi === "string" && doc.openapi.startsWith("3."); -const parseTextToObject = (text: string): OpenAPI.Document => { - const trimmed = text.trim(); - if (trimmed.length === 0) throw new Error("OpenAPI document is empty"); +const parseTextToObject = (text: string): Effect.Effect => + Effect.gen(function* () { + const trimmed = text.trim(); + if (trimmed.length === 0) { + return yield* new OpenApiParseError({ + message: "OpenAPI document is empty", + }); + } - let parsed: unknown; - try { - parsed = JSON.parse(trimmed); - } catch { - parsed = YAML.parse(trimmed); - } + const parsed = yield* Effect.try({ + try: () => YAML.parse(trimmed) as unknown, + catch: () => + new OpenApiParseError({ + message: "Failed to parse OpenAPI document", + }), + }); - if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { - throw new Error("OpenAPI document must parse to an object"); - } + if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { + return yield* new OpenApiParseError({ + message: "OpenAPI document must parse to an object", + }); + } - return parsed as OpenAPI.Document; -}; + return parsed as OpenAPI.Document; + }); diff --git a/packages/plugins/openapi/src/sdk/plugin.ts b/packages/plugins/openapi/src/sdk/plugin.ts index 70d5130c8..e313286f8 100644 --- a/packages/plugins/openapi/src/sdk/plugin.ts +++ b/packages/plugins/openapi/src/sdk/plugin.ts @@ -1,4 +1,4 @@ -import { Effect, Option, Schema } from "effect"; +import { Effect, Option, Predicate, Schema } from "effect"; import type { Layer } from "effect"; import { FetchHttpClient, HttpClient } from "effect/unstable/http"; @@ -376,13 +376,15 @@ const resolveEffectiveSourceConfig = ( base: StoredSource, ): Effect.Effect => Effect.gen(function* () { - const rank = new Map(ctx.scopes.map((scope, index) => [scope.id as string, index] as const)); + const rank = new Map( + ctx.scopes.map((scope, index) => [scope.id, index] as const), + ); const baseRank = rank.get(base.scope) ?? Infinity; let fallback: StoredSource | null = null; for (let index = baseRank + 1; index < ctx.scopes.length; index++) { const scope = ctx.scopes[index]; if (!scope) continue; - fallback = yield* ctx.storage.getSource(base.namespace, scope.id as string); + fallback = yield* ctx.storage.getSource(base.namespace, scope.id); if (fallback) break; } @@ -435,13 +437,13 @@ const resolveConfiguredHeaders = ( value.slot, ); if (binding?.value.kind === "secret") { - const secret = yield* ctx.secrets.get(binding.value.secretId as string).pipe( - Effect.mapError((err) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" - ? new OpenApiOAuthError({ - message: `Secret not found for header "${name}"`, - }) - : err, + const secret = yield* ctx.secrets.get(binding.value.secretId).pipe( + Effect.catchTag("SecretOwnedByConnectionError", () => + Effect.fail( + new OpenApiOAuthError({ + message: `Secret not found for header "${name}"`, + }), + ), ), ); if (secret === null) { @@ -460,10 +462,8 @@ const resolveConfiguredHeaders = ( if (legacy) { const fallback = yield* resolveHeaders({ [name]: legacy }, ctx.secrets).pipe( Effect.map((headers) => headers[name]!), - Effect.mapError((err) => - err instanceof OpenApiOAuthError - ? err - : new OpenApiOAuthError({ message: err.message }), + Effect.catchTag("OpenApiInvocationError", () => + Effect.fail(new OpenApiOAuthError({ message: "Secret resolution failed" })), ), ); resolved[name] = fallback; @@ -488,14 +488,14 @@ const resolveHeaderValues = ( message: `Secret not found for "${name}"`, }), onError: (err, name) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" + Predicate.isTagged("SecretOwnedByConnectionError")(err) ? new OpenApiOAuthError({ message: `Secret not found for "${name}"`, }) : err, }).pipe( Effect.mapError((err) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" + Predicate.isTagged("SecretOwnedByConnectionError")(err) ? new OpenApiOAuthError({ message: "Secret resolution failed" }) : err, ), @@ -518,7 +518,7 @@ const resolveOAuthConnectionId = ( params.oauth2.connectionSlot, ); if (binding?.value.kind === "connection") { - const connectionId = binding.value.connectionId as string; + const connectionId = binding.value.connectionId; const connection = yield* ctx.connections.get(connectionId); return connection ? connectionId : null; } @@ -763,7 +763,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { const configFile = options?.configFile; return { - previewSpec: (input) => + previewSpec: (input: string | OpenApiPreviewInput) => Effect.gen(function* () { const previewInput = typeof input === "string" ? { spec: input } : input; const credentials = yield* resolveSpecFetchCredentials( @@ -776,7 +776,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { return yield* previewSpec(specText).pipe(Effect.provide(httpClientLayer)); }), - addSpec: (config) => + addSpec: (config: OpenApiSpecConfig) => Effect.gen(function* () { const result = yield* addSpecInternal(config); if (configFile) { @@ -785,7 +785,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { return result; }), - removeSpec: (namespace, scope) => + removeSpec: (namespace: string, scope: string) => Effect.gen(function* () { yield* ctx.transaction( Effect.gen(function* () { @@ -798,7 +798,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { } }), - getSource: (namespace, scope) => + getSource: (namespace: string, scope: string) => Effect.gen(function* () { const source = yield* ctx.storage.getSource(namespace, scope); if (!source) return null; @@ -809,7 +809,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { }; }), - updateSource: (namespace, scope, input) => + updateSource: (namespace: string, scope: string, input: OpenApiUpdateSourceInput) => Effect.gen(function* () { const existing = yield* ctx.storage.getSource(namespace, scope); if (!existing) return; @@ -847,14 +847,14 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { } }), - listSourceBindings: (sourceId, sourceScope) => + listSourceBindings: (sourceId: string, sourceScope: string) => ctx.storage.listSourceBindings(sourceId, sourceScope), - setSourceBinding: (input) => ctx.storage.setSourceBinding(input), + setSourceBinding: (input: OpenApiSourceBindingInput) => ctx.storage.setSourceBinding(input), - removeSourceBinding: (sourceId, sourceScope, slot, scope) => + removeSourceBinding: (sourceId: string, sourceScope: string, slot: string, scope: string) => ctx.storage.removeSourceBinding(sourceId, sourceScope, slot, scope), - } satisfies OpenApiPluginExtension; + }; }, staticSources: (self) => [ @@ -909,7 +909,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { handler: ({ ctx, args }) => self.addSpec({ ...(args as AddSourceInput), - scope: ctx.scopes.at(-1)!.id as string, + scope: ctx.scopes.at(-1)!.id, }), }, ], @@ -923,16 +923,18 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { // openapi_operation + openapi_source rows live at the same // scope, so pin every store lookup to it instead of relying // on the scoped adapter's stack-wide fall-through. - const toolScope = toolRow.scope_id as string; + const toolScope = toolRow.scope_id; const op = yield* ctx.storage.getOperationByToolId(toolRow.id, toolScope); if (!op) { - return yield* Effect.fail( - new Error(`No OpenAPI operation found for tool "${toolRow.id}"`), - ); + return yield* new OpenApiExtractionError({ + message: `No OpenAPI operation found for tool "${toolRow.id}"`, + }); } const source = yield* ctx.storage.getSource(op.sourceId, toolScope); if (!source) { - return yield* Effect.fail(new Error(`No OpenAPI source found for "${op.sourceId}"`)); + return yield* new OpenApiExtractionError({ + message: `No OpenAPI source found for "${op.sourceId}"`, + }); } const effective = yield* resolveEffectiveSourceConfig(ctx, source); @@ -942,10 +944,8 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { sourceScope: effective.headersSource.scope, headers: config.headers ?? {}, legacyHeaders: effective.headersSource.legacy?.headers, - }).pipe(Effect.mapError((err) => new Error(err.message))); - const resolvedQueryParams = yield* resolveHeaderValues(ctx, config.queryParams).pipe( - Effect.mapError((err) => new Error(err.message)), - ); + }); + const resolvedQueryParams = yield* resolveHeaderValues(ctx, config.queryParams); // If the source has OAuth2 auth, resolve a guaranteed-fresh // access token from the backing Connection and inject the @@ -959,22 +959,18 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { legacyOAuth2: effective.oauth2Source.legacy?.oauth2, }); if (!connectionId) { - return yield* Effect.fail( - new Error(`OAuth configuration for "${op.sourceId}" is missing a connection binding`), - ); + return yield* new OpenApiOAuthError({ + message: `OAuth configuration for "${op.sourceId}" is missing a connection binding`, + }); } - const accessToken = yield* ctx.connections - .accessToken(connectionId) - .pipe( - Effect.mapError( - (err) => - new Error( - `OAuth connection resolution failed: ${ - "message" in err ? (err as { message: string }).message : String(err) - }`, - ), - ), - ); + const accessToken = yield* ctx.connections.accessToken(connectionId).pipe( + Effect.mapError( + () => + new OpenApiOAuthError({ + message: "OAuth connection resolution failed", + }), + ), + ); resolvedHeaders.authorization = `Bearer ${accessToken}`; } @@ -1000,7 +996,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { // and we don't fall through to the wrong scope's bindings. const scopes = new Set(); for (const row of toolRows as readonly ToolRow[]) { - scopes.add(row.scope_id as string); + scopes.add(row.scope_id); } // One listOperationsBySource per scope is independent storage // work; run them in parallel so a shadowed source doesn't @@ -1021,7 +1017,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { const out: Record = {}; for (const row of toolRows as readonly ToolRow[]) { - const binding = byScope.get(row.scope_id as string)?.get(row.id); + const binding = byScope.get(row.scope_id)?.get(row.id); if (binding) { out[row.id] = annotationsForOperation(binding.method, binding.pathTemplate); } @@ -1041,9 +1037,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { usagesForSecret: ({ ctx, args }) => Effect.gen(function* () { const bindings = yield* ctx.storage.findBindingsBySecret(args.secretId); - const childRows = yield* ctx.storage.findChildRowsBySecret( - args.secretId, - ); + const childRows = yield* ctx.storage.findChildRowsBySecret(args.secretId); const sourceKeys = new Set(); for (const b of bindings) { @@ -1062,8 +1056,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { scopeId: ScopeId.make(b.scopeId), ownerKind: "openapi-source-binding", ownerId: b.sourceId, - ownerName: - sources.get(`${b.sourceScopeId}:${b.sourceId}`) ?? null, + ownerName: sources.get(`${b.sourceScopeId}:${b.sourceId}`) ?? null, slot: `binding:${b.slot}`, }), ); @@ -1085,9 +1078,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { usagesForConnection: ({ ctx, args }) => Effect.gen(function* () { - const bindings = yield* ctx.storage.findBindingsByConnection( - args.connectionId, - ); + const bindings = yield* ctx.storage.findBindingsByConnection(args.connectionId); const sourceKeys = new Set(); for (const b of bindings) { sourceKeys.add(`${b.sourceScopeId}:${b.sourceId}`); @@ -1100,8 +1091,7 @@ export const openApiPlugin = definePlugin((options?: OpenApiPluginOptions) => { scopeId: ScopeId.make(b.scopeId), ownerKind: "openapi-source-binding", ownerId: b.sourceId, - ownerName: - sources.get(`${b.sourceScopeId}:${b.sourceId}`) ?? null, + ownerName: sources.get(`${b.sourceScopeId}:${b.sourceId}`) ?? null, slot: `binding:${b.slot}`, }), ); From b91524bbac794aa9e68716df15e0f359f1d356ea Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:21:37 -0700 Subject: [PATCH 070/108] Fix MCP SDK store and test boundaries --- packages/plugins/mcp/src/sdk/binding-store.ts | 144 +++----- packages/plugins/mcp/src/sdk/discover.ts | 38 ++- packages/plugins/mcp/src/sdk/plugin.test.ts | 321 ++++++++---------- 3 files changed, 217 insertions(+), 286 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/binding-store.ts b/packages/plugins/mcp/src/sdk/binding-store.ts index 898340c29..8602e4883 100644 --- a/packages/plugins/mcp/src/sdk/binding-store.ts +++ b/packages/plugins/mcp/src/sdk/binding-store.ts @@ -16,13 +16,9 @@ // and is owned by `ctx.oauth`. // --------------------------------------------------------------------------- -import { Effect, Schema } from "effect"; +import { Effect, Option, Schema } from "effect"; -import { - defineSchema, - type StorageDeps, - type StorageFailure, -} from "@executor-js/sdk/core"; +import { defineSchema, type StorageDeps, type StorageFailure } from "@executor-js/sdk/core"; import { McpToolBinding, @@ -119,16 +115,19 @@ const encodeSourceData = Schema.encodeSync(McpStoredSourceData); const decodeBinding = Schema.decodeUnknownSync(McpToolBinding); const encodeBinding = Schema.encodeSync(McpToolBinding); +const decodeJson = Schema.decodeUnknownOption(Schema.fromJsonString(Schema.Unknown)); const coerceJson = (value: unknown): unknown => { if (typeof value !== "string") return value; - try { - return JSON.parse(value); - } catch { - return value; - } + return Option.getOrElse(decodeJson(value), () => value); }; +const hasStringFields = ( + row: Record, + fields: Fields, +): row is Record & Record => + fields.every((field) => typeof row[field] === "string"); + // --- auth column packing/unpacking ------------------------------------------ interface AuthColumns { @@ -162,7 +161,7 @@ const authToColumns = (auth: McpConnectionAuth): AuthColumns => { }; const columnsToAuth = (row: Record): McpConnectionAuth => { - const kind = row.auth_kind as string; + const kind = row.auth_kind; if (kind === "header" && typeof row.auth_secret_id === "string") { const prefix = row.auth_secret_prefix as string | null | undefined; return { @@ -179,9 +178,7 @@ const columnsToAuth = (row: Record): McpConnectionAuth => { kind: "oauth2", connectionId: row.auth_connection_id, ...(cid ? { clientIdSecretId: cid } : {}), - ...(csec !== undefined && csec !== null - ? { clientSecretSecretId: csec } - : {}), + ...(csec !== undefined && csec !== null ? { clientSecretSecretId: csec } : {}), }; } return { kind: "none" }; @@ -236,12 +233,11 @@ const rowsToValueMap = ( ): Record => { const out: Record = {}; for (const row of rows) { - const name = row.name as string; + if (typeof row.name !== "string") continue; + const name = row.name; if (row.kind === "secret" && typeof row.secret_id === "string") { const prefix = row.secret_prefix as string | undefined | null; - out[name] = prefix - ? { secretId: row.secret_id, prefix } - : { secretId: row.secret_id }; + out[name] = prefix ? { secretId: row.secret_id, prefix } : { secretId: row.secret_id }; } else if (row.kind === "text" && typeof row.text_value === "string") { out[name] = row.text_value; } @@ -288,7 +284,10 @@ export interface McpBindingStore { namespace: string, scope: string, ) => Effect.Effect< - ReadonlyArray<{ readonly toolId: string; readonly binding: McpToolBinding }>, + ReadonlyArray<{ + readonly toolId: string; + readonly binding: McpToolBinding; + }>, StorageFailure >; @@ -303,7 +302,10 @@ export interface McpBindingStore { readonly putBindings: ( namespace: string, scope: string, - entries: ReadonlyArray<{ readonly toolId: string; readonly binding: McpToolBinding }>, + entries: ReadonlyArray<{ + readonly toolId: string; + readonly binding: McpToolBinding; + }>, ) => Effect.Effect; readonly removeBindingsByNamespace: ( @@ -320,10 +322,7 @@ export interface McpBindingStore { scope: string, ) => Effect.Effect; readonly putSource: (source: McpStoredSource) => Effect.Effect; - readonly removeSource: ( - namespace: string, - scope: string, - ) => Effect.Effect; + readonly removeSource: (namespace: string, scope: string) => Effect.Effect; // --------------------------------------------------------------------- // Usage lookups — back `usagesForSecret` / `usagesForConnection`. @@ -332,9 +331,7 @@ export interface McpBindingStore { /** Source rows whose flattened auth columns reference the given * secret id. The `slot` field on each result tags which column * matched so the caller can produce a precise Usage.slot. */ - readonly findSourcesBySecret: ( - secretId: string, - ) => Effect.Effect< + readonly findSourcesBySecret: (secretId: string) => Effect.Effect< readonly { readonly namespace: string; readonly scope_id: string; @@ -345,9 +342,7 @@ export interface McpBindingStore { >; /** Source rows whose oauth2 auth points at the given connection id. */ - readonly findSourcesByConnection: ( - connectionId: string, - ) => Effect.Effect< + readonly findSourcesByConnection: (connectionId: string) => Effect.Effect< readonly { readonly namespace: string; readonly scope_id: string; @@ -379,9 +374,7 @@ export interface McpBindingStore { // Factory // --------------------------------------------------------------------------- -export const makeMcpStore = ({ - adapter: db, -}: StorageDeps): McpBindingStore => { +export const makeMcpStore = ({ adapter: db }: StorageDeps): McpBindingStore => { return { listBindingsBySource: (namespace, scope) => Effect.gen(function* () { @@ -486,18 +479,11 @@ export const makeMcpStore = ({ yield* deleteSourceChildren(source.namespace, source.scope); const auth: McpConnectionAuth = - source.config.transport === "remote" - ? source.config.auth - : { kind: "none" }; + source.config.transport === "remote" ? source.config.auth : { kind: "none" }; const authCols = authToColumns(auth); - const headers = - source.config.transport === "remote" - ? source.config.headers - : undefined; + const headers = source.config.transport === "remote" ? source.config.headers : undefined; const queryParams = - source.config.transport === "remote" - ? source.config.queryParams - : undefined; + source.config.transport === "remote" ? source.config.queryParams : undefined; // The encoded config keeps every plugin-private field but // strips auth/headers/queryParams — those moved to columns/ @@ -520,11 +506,7 @@ export const makeMcpStore = ({ forceAllowId: true, }); - const headerRows = valueMapToRows( - source.namespace, - source.scope, - headers, - ); + const headerRows = valueMapToRows(source.namespace, source.scope, headers); if (headerRows.length > 0) { yield* db.createMany({ model: "mcp_source_header", @@ -532,11 +514,7 @@ export const makeMcpStore = ({ forceAllowId: true, }); } - const paramRows = valueMapToRows( - source.namespace, - source.scope, - queryParams, - ); + const paramRows = valueMapToRows(source.namespace, source.scope, queryParams); if (paramRows.length > 0) { yield* db.createMany({ model: "mcp_source_query_param", @@ -579,15 +557,11 @@ export const makeMcpStore = ({ }), db.findMany({ model: "mcp_source", - where: [ - { field: "auth_client_id_secret_id", value: secretId }, - ], + where: [{ field: "auth_client_id_secret_id", value: secretId }], }), db.findMany({ model: "mcp_source", - where: [ - { field: "auth_client_secret_secret_id", value: secretId }, - ], + where: [{ field: "auth_client_secret_secret_id", value: secretId }], }), ], { concurrency: "unbounded" }, @@ -596,19 +570,18 @@ export const makeMcpStore = ({ for (const r of [...byHeader, ...byClientId, ...byClientSecret]) { dedup.set(`${r.scope_id}:${r.id}`, r); } - return [...dedup.values()].map((row) => ({ - namespace: row.id as string, - scope_id: row.scope_id as string, - name: row.name as string, - slot: - (byHeader as readonly Record[]).includes(row) + return [...dedup.values()] + .filter((row) => hasStringFields(row, ["id", "scope_id", "name"])) + .map((row) => ({ + namespace: row.id, + scope_id: row.scope_id, + name: row.name, + slot: (byHeader as readonly Record[]).includes(row) ? "auth.header" - : (byClientId as readonly Record[]).includes( - row, - ) + : (byClientId as readonly Record[]).includes(row) ? "auth.oauth2.client_id" : "auth.oauth2.client_secret", - })); + })); }), findSourcesByConnection: (connectionId) => @@ -620,9 +593,9 @@ export const makeMcpStore = ({ .pipe( Effect.map((rows) => rows.map((r) => ({ - namespace: r.id as string, - scope_id: r.scope_id as string, - name: r.name as string, + namespace: r.id, + scope_id: r.scope_id, + name: r.name, slot: "auth.oauth2.connection", })), ), @@ -646,15 +619,15 @@ export const makeMcpStore = ({ return [ ...headers.map((r) => ({ kind: "header" as const, - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, + source_id: r.source_id, + scope_id: r.scope_id, + name: r.name, })), ...params.map((r) => ({ kind: "query_param" as const, - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, + source_id: r.source_id, + scope_id: r.scope_id, + name: r.name, })), ]; }), @@ -666,8 +639,8 @@ export const makeMcpStore = ({ const requested = new Set(keys); const out = new Map(); for (const r of rows) { - const key = `${r.scope_id as string}:${r.id as string}`; - if (requested.has(key)) out.set(key, r.name as string); + const key = `${r.scope_id}:${r.id}`; + if (requested.has(key)) out.set(key, r.name); } return out; }), @@ -679,10 +652,7 @@ export const makeMcpStore = ({ function deleteSourceChildren(namespace: string, scope: string) { return Effect.gen(function* () { - for (const model of [ - "mcp_source_header", - "mcp_source_query_param", - ] as const) { + for (const model of ["mcp_source_header", "mcp_source_query_param"] as const) { yield* db.deleteMany({ model, where: [ @@ -740,9 +710,7 @@ export const makeMcpStore = ({ // Keeps the remaining structural fields (transport, endpoint, etc.) in // the JSON config column. Per-transport: only the remote variant has // these fields, so this is a no-op for stdio. -const stripExtractedFields = ( - encoded: Record, -): Record => { +const stripExtractedFields = (encoded: Record): Record => { if (encoded.transport !== "remote") return encoded; const { auth, headers, queryParams, ...rest } = encoded; void auth; diff --git a/packages/plugins/mcp/src/sdk/discover.ts b/packages/plugins/mcp/src/sdk/discover.ts index 58f6d7278..c24b6b93c 100644 --- a/packages/plugins/mcp/src/sdk/discover.ts +++ b/packages/plugins/mcp/src/sdk/discover.ts @@ -27,10 +27,10 @@ export const discoverTools = ( // Acquire connection const connection = yield* connector.pipe( Effect.mapError( - (err) => + ({ message }) => new McpToolDiscoveryError({ stage: "connect", - message: `Failed connecting to MCP server: ${err.message}`, + message: `Failed connecting to MCP server: ${message}`, }), ), ); @@ -38,23 +38,19 @@ export const discoverTools = ( // List tools const listResult = yield* Effect.tryPromise({ try: () => connection.client.listTools(), - catch: (cause) => + catch: () => new McpToolDiscoveryError({ stage: "list_tools", - message: `Failed listing MCP tools: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: "Failed listing MCP tools", }), }); if (!isListToolsResult(listResult)) { - yield* Effect.promise(() => connection.close().catch(() => {})); - return yield* Effect.fail( - new McpToolDiscoveryError({ - stage: "list_tools", - message: "MCP listTools response did not match the expected schema", - }), - ); + yield* closeConnection(connection); + return yield* new McpToolDiscoveryError({ + stage: "list_tools", + message: "MCP listTools response did not match the expected schema", + }); } const manifest = extractManifestFromListToolsResult(listResult, { @@ -62,7 +58,21 @@ export const discoverTools = ( }); // Close the connection after discovery - yield* Effect.promise(() => connection.close().catch(() => {})); + yield* closeConnection(connection); return manifest; }); + +const closeConnection = (connection: { + readonly close: () => Promise; +}): Effect.Effect => + Effect.ignore( + Effect.tryPromise({ + try: () => connection.close(), + catch: () => + new McpToolDiscoveryError({ + stage: "list_tools", + message: "Failed closing MCP connection", + }), + }), + ); diff --git a/packages/plugins/mcp/src/sdk/plugin.test.ts b/packages/plugins/mcp/src/sdk/plugin.test.ts index cfd2ead8c..c9c880657 100644 --- a/packages/plugins/mcp/src/sdk/plugin.test.ts +++ b/packages/plugins/mcp/src/sdk/plugin.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Result } from "effect"; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { z } from "zod"; @@ -17,11 +17,7 @@ import { } from "@executor-js/sdk"; import { mcpPlugin } from "./plugin"; -import { - extractManifestFromListToolsResult, - deriveMcpNamespace, - joinToolPath, -} from "./manifest"; +import { extractManifestFromListToolsResult, deriveMcpNamespace, joinToolPath } from "./manifest"; import { serveMcpServer } from "./test-utils"; // --------------------------------------------------------------------------- @@ -36,14 +32,12 @@ const makeMemorySecretsPlugin = () => { const provider: SecretProvider = { key: "memory", writable: true, - get: (id, scope) => - Effect.sync(() => store.get(`${scope}${id}`) ?? null), + get: (id, scope) => Effect.sync(() => store.get(`${scope}${id}`) ?? null), set: (id, value, scope) => Effect.sync(() => { store.set(`${scope}${id}`, value); }), - delete: (id, scope) => - Effect.sync(() => store.delete(`${scope}${id}`)), + delete: (id, scope) => Effect.sync(() => store.delete(`${scope}${id}`)), list: () => Effect.sync(() => Array.from(store.keys()).map((k) => { @@ -158,9 +152,7 @@ describe("deriveMcpNamespace", () => { it.effect("derives from command", () => Effect.sync(() => { - expect(deriveMcpNamespace({ command: "/usr/local/bin/my-mcp-server" })).toBe( - "my_mcp_server", - ); + expect(deriveMcpNamespace({ command: "/usr/local/bin/my-mcp-server" })).toBe("my_mcp_server"); }), ); @@ -214,9 +206,7 @@ describe("mcpPlugin", () => { it.effect("sources list is initially empty", () => Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); const sources = yield* executor.sources.list(); expect(sources).toHaveLength(0); }), @@ -224,9 +214,7 @@ describe("mcpPlugin", () => { it.effect("tools list is initially empty", () => Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); const tools = yield* executor.tools.list(); expect(tools).toHaveLength(0); }), @@ -238,9 +226,7 @@ describe("mcpPlugin", () => { // still propagates to the caller so boot-time sync logs the reason. it.effect("registers source with 0 tools when discovery fails", () => Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); const result = yield* executor.mcp .addSource({ @@ -256,7 +242,7 @@ describe("mcpPlugin", () => { }) .pipe(Effect.result); - expect(result._tag).toBe("Failure"); + expect(Result.isFailure(result)).toBe(true); const sources = yield* executor.sources.list(); const broken = sources.find((s) => s.id === "broken_source"); @@ -304,7 +290,11 @@ describe("mcpPlugin", () => { readonly remoteTransport: "auto"; readonly namespace: string; }) => Effect.Effect, - args: { readonly scope: string; readonly name: string; readonly endpoint: string }, + args: { + readonly scope: string; + readonly name: string; + readonly endpoint: string; + }, ) => addSource({ transport: "remote", @@ -326,28 +316,28 @@ describe("mcpPlugin", () => { // Org-level base source — discovery fails but row persists. yield* seedShadowed(executor.mcp.addSource, { - scope: ORG_SCOPE as string, + scope: ORG_SCOPE, name: "Org Source", endpoint: "http://127.0.0.1:1/org-mcp", }); // Per-user shadow with the same namespace. yield* seedShadowed(executor.mcp.addSource, { - scope: USER_SCOPE as string, + scope: USER_SCOPE, name: "User Source", endpoint: "http://127.0.0.1:1/user-mcp", }); - const userView = yield* executor.mcp.getSource("shared", USER_SCOPE as string); - const orgView = yield* executor.mcp.getSource("shared", ORG_SCOPE as string); + const userView = yield* executor.mcp.getSource("shared", USER_SCOPE); + const orgView = yield* executor.mcp.getSource("shared", ORG_SCOPE); // Both rows must coexist — the store's scope-pinned getters // return the exact row regardless of the scope stack's // fall-through order. expect(userView?.name).toBe("User Source"); - expect(userView?.scope).toBe(USER_SCOPE as string); + expect(userView?.scope).toBe(USER_SCOPE); expect(orgView?.name).toBe("Org Source"); - expect(orgView?.scope).toBe(ORG_SCOPE as string); + expect(orgView?.scope).toBe(ORG_SCOPE); }), ); @@ -361,20 +351,20 @@ describe("mcpPlugin", () => { ); yield* seedShadowed(executor.mcp.addSource, { - scope: ORG_SCOPE as string, + scope: ORG_SCOPE, name: "Org Source", endpoint: "http://127.0.0.1:1/org-mcp", }); yield* seedShadowed(executor.mcp.addSource, { - scope: USER_SCOPE as string, + scope: USER_SCOPE, name: "User Source", endpoint: "http://127.0.0.1:1/user-mcp", }); - yield* executor.mcp.removeSource("shared", USER_SCOPE as string); + yield* executor.mcp.removeSource("shared", USER_SCOPE); - const userView = yield* executor.mcp.getSource("shared", USER_SCOPE as string); - const orgView = yield* executor.mcp.getSource("shared", ORG_SCOPE as string); + const userView = yield* executor.mcp.getSource("shared", USER_SCOPE); + const orgView = yield* executor.mcp.getSource("shared", ORG_SCOPE); expect(userView).toBeNull(); expect(orgView?.name).toBe("Org Source"); @@ -391,23 +381,23 @@ describe("mcpPlugin", () => { ); yield* seedShadowed(executor.mcp.addSource, { - scope: ORG_SCOPE as string, + scope: ORG_SCOPE, name: "Org Source", endpoint: "http://127.0.0.1:1/org-mcp", }); yield* seedShadowed(executor.mcp.addSource, { - scope: USER_SCOPE as string, + scope: USER_SCOPE, name: "User Source", endpoint: "http://127.0.0.1:1/user-mcp", }); - yield* executor.mcp.updateSource("shared", USER_SCOPE as string, { + yield* executor.mcp.updateSource("shared", USER_SCOPE, { name: "User Renamed", endpoint: "http://127.0.0.1:1/user-new-mcp", }); - const userView = yield* executor.mcp.getSource("shared", USER_SCOPE as string); - const orgView = yield* executor.mcp.getSource("shared", ORG_SCOPE as string); + const userView = yield* executor.mcp.getSource("shared", USER_SCOPE); + const orgView = yield* executor.mcp.getSource("shared", ORG_SCOPE); expect(userView?.name).toBe("User Renamed"); expect(userView?.config.transport).toBe("remote"); @@ -460,20 +450,15 @@ describe("mcpPlugin", () => { // perspective — it returns Failure because discovery failed, but // crucially the source row was persisted so the list surfaces // it for subsequent sign-in. - expect(result._tag).toBe("Failure"); + expect(Result.isFailure(result)).toBe(true); - const stored = yield* executor.mcp.getSource( - "deferred_oauth", - "test-scope", - ); + const stored = yield* executor.mcp.getSource("deferred_oauth", "test-scope"); expect(stored).not.toBeNull(); expect(stored?.config.transport).toBe("remote"); if (stored?.config.transport !== "remote") return; expect(stored.config.auth.kind).toBe("oauth2"); if (stored.config.auth.kind !== "oauth2") return; - expect(stored.config.auth.connectionId).toBe( - "mcp-oauth2-deferred_oauth", - ); + expect(stored.config.auth.connectionId).toBe("mcp-oauth2-deferred_oauth"); // Source is visible in the shell list too. const sources = yield* executor.sources.list(); @@ -512,128 +497,112 @@ describe("mcpPlugin", () => { // connection was ever minted, so the check should be false — // i.e. the button would render "Sign in". const connections = yield* executor.connections.list(); - const connectionMatch = connections.find( - (c) => c.id === "mcp-oauth2-needs_auth", - ); + const connectionMatch = connections.find((c) => c.id === "mcp-oauth2-needs_auth"); expect(connectionMatch).toBeUndefined(); - const stored = yield* executor.mcp.getSource( - "needs_auth", - "test-scope", - ); + const stored = yield* executor.mcp.getSource("needs_auth", "test-scope"); expect(stored?.config.transport).toBe("remote"); if (stored?.config.transport !== "remote") return; expect(stored.config.auth.kind).toBe("oauth2"); }), ); - it.effect( - "signing in as a user transitions the source to connected", - () => - Effect.gen(function* () { - const USER_SCOPE_ID = ScopeId.make("user-scope"); - const ORG_SCOPE_ID = ScopeId.make("org-scope"); - const scopes = [ - new Scope({ - id: USER_SCOPE_ID, - name: "user", - createdAt: new Date(), - }), - new Scope({ - id: ORG_SCOPE_ID, - name: "org", - createdAt: new Date(), - }), - ] as const; - const executor = yield* createExecutor( - makeTestConfig({ - scopes, - plugins: [makeMemorySecretsPlugin()(), mcpPlugin()] as const, + it.effect("signing in as a user transitions the source to connected", () => + Effect.gen(function* () { + const USER_SCOPE_ID = ScopeId.make("user-scope"); + const ORG_SCOPE_ID = ScopeId.make("org-scope"); + const scopes = [ + new Scope({ + id: USER_SCOPE_ID, + name: "user", + createdAt: new Date(), + }), + new Scope({ + id: ORG_SCOPE_ID, + name: "org", + createdAt: new Date(), + }), + ] as const; + const executor = yield* createExecutor( + makeTestConfig({ + scopes, + plugins: [makeMemorySecretsPlugin()(), mcpPlugin()] as const, + }), + ); + + // Admin saves the oauth2 source at the org scope — no tokens + // yet. + yield* executor.mcp + .addSource({ + transport: "remote", + scope: ORG_SCOPE_ID, + name: "Team MCP", + endpoint: "http://127.0.0.1:1/team-mcp", + remoteTransport: "auto", + namespace: "team_mcp", + auth: { + kind: "oauth2", + connectionId: "mcp-oauth2-team_mcp", + }, + }) + .pipe(Effect.result); + + // Before sign-in: no connection exists at all. + const pre = yield* executor.connections.list(); + expect(pre.find((c) => c.id === "mcp-oauth2-team_mcp")).toBeUndefined(); + + // User signs in — the SignInButton flow produces a minted + // connection against the same stable id, pinned to the user + // scope. This simulates what `completeOAuth` does internally, + // including persisting provider state. + const connectionId = ConnectionId.make("mcp-oauth2-team_mcp"); + yield* executor.connections.create( + new CreateConnectionInput({ + id: connectionId, + scope: USER_SCOPE_ID, + provider: "mcp:oauth2", + identityLabel: "user@example.com", + accessToken: new TokenMaterial({ + secretId: SecretId.make(`${connectionId}.access_token`), + name: "MCP Access Token", + value: "access-token-value", }), - ); - - // Admin saves the oauth2 source at the org scope — no tokens - // yet. - yield* executor.mcp - .addSource({ - transport: "remote", - scope: ORG_SCOPE_ID as string, - name: "Team MCP", + refreshToken: null, + expiresAt: null, + oauthScope: null, + providerState: { endpoint: "http://127.0.0.1:1/team-mcp", - remoteTransport: "auto", - namespace: "team_mcp", - auth: { - kind: "oauth2", - connectionId: "mcp-oauth2-team_mcp", - }, - }) - .pipe(Effect.result); - - // Before sign-in: no connection exists at all. - const pre = yield* executor.connections.list(); - expect( - pre.find((c) => c.id === "mcp-oauth2-team_mcp"), - ).toBeUndefined(); - - // User signs in — the SignInButton flow produces a minted - // connection against the same stable id, pinned to the user - // scope. This simulates what `completeOAuth` does internally, - // including persisting provider state. - const connectionId = ConnectionId.make("mcp-oauth2-team_mcp"); - yield* executor.connections.create( - new CreateConnectionInput({ - id: connectionId, - scope: USER_SCOPE_ID, - provider: "mcp:oauth2", - identityLabel: "user@example.com", - accessToken: new TokenMaterial({ - secretId: SecretId.make(`${connectionId}.access_token`), - name: "MCP Access Token", - value: "access-token-value", - }), - refreshToken: null, - expiresAt: null, - oauthScope: null, - providerState: { - endpoint: "http://127.0.0.1:1/team-mcp", - tokenType: "Bearer", - clientInformation: { client_id: "fake" }, - authorizationServerUrl: null, - authorizationServerMetadata: null, - resourceMetadataUrl: null, - resourceMetadata: null, - }, - }), - ); - - // After sign-in: the connection exists and its access token - // resolves. Source auth config is unchanged — the - // connectionId pointer now has a live backing row. - const post = yield* executor.connections.list(); - const match = post.find((c) => c.id === "mcp-oauth2-team_mcp"); - expect(match).toBeDefined(); - expect(match?.scopeId).toBe(USER_SCOPE_ID); - - const accessToken = yield* executor.connections.accessToken( - connectionId, - ); - expect(accessToken).toBe("access-token-value"); - - // Source auth still points at the same connectionId — no - // migration needed, the UI flipped "Sign in" → "Reconnect" by - // virtue of the connection existing. - const stored = yield* executor.mcp.getSource( - "team_mcp", - ORG_SCOPE_ID as string, - ); - expect(stored?.config.transport).toBe("remote"); - if (stored?.config.transport !== "remote") return; - expect(stored.config.auth.kind).toBe("oauth2"); - if (stored.config.auth.kind !== "oauth2") return; - expect(stored.config.auth.connectionId).toBe( - "mcp-oauth2-team_mcp", - ); - }), + tokenType: "Bearer", + clientInformation: { client_id: "fake" }, + authorizationServerUrl: null, + authorizationServerMetadata: null, + resourceMetadataUrl: null, + resourceMetadata: null, + }, + }), + ); + + // After sign-in: the connection exists and its access token + // resolves. Source auth config is unchanged — the + // connectionId pointer now has a live backing row. + const post = yield* executor.connections.list(); + const match = post.find((c) => c.id === "mcp-oauth2-team_mcp"); + expect(match).toBeDefined(); + expect(match?.scopeId).toBe(USER_SCOPE_ID); + + const accessToken = yield* executor.connections.accessToken(connectionId); + expect(accessToken).toBe("access-token-value"); + + // Source auth still points at the same connectionId — no + // migration needed, the UI flipped "Sign in" → "Reconnect" by + // virtue of the connection existing. + const stored = yield* executor.mcp.getSource("team_mcp", ORG_SCOPE_ID); + expect(stored?.config.transport).toBe("remote"); + if (stored?.config.transport !== "remote") return; + expect(stored.config.auth.kind).toBe("oauth2"); + if (stored.config.auth.kind !== "oauth2") return; + expect(stored.config.auth.connectionId).toBe("mcp-oauth2-team_mcp"); + }), ); // ------------------------------------------------------------------------- @@ -645,9 +614,7 @@ describe("mcpPlugin", () => { it.effect("usagesForSecret aggregates header-auth + headers child rows", () => Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); yield* executor.mcp .addSource({ @@ -666,17 +633,13 @@ describe("mcpPlugin", () => { }) .pipe(Effect.result); - const usages = yield* executor.secrets.usages( - SecretId.make("shared-key"), - ); + const usages = yield* executor.secrets.usages(SecretId.make("shared-key")); expect(usages.length).toBe(2); const slots = usages.map((u) => u.slot).sort(); expect(slots).toEqual(["auth.header", "header:X-Trace"]); expect(usages.every((u) => u.pluginId === "mcp")).toBe(true); - const otherUsages = yield* executor.secrets.usages( - SecretId.make("other-secret"), - ); + const otherUsages = yield* executor.secrets.usages(SecretId.make("other-secret")); expect(otherUsages.length).toBe(1); expect(otherUsages[0].slot).toBe("query_param:ping"); }), @@ -684,9 +647,7 @@ describe("mcpPlugin", () => { it.effect("usagesForConnection finds oauth2-bound mcp sources", () => Effect.gen(function* () { - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); yield* executor.mcp .addSource({ @@ -699,9 +660,7 @@ describe("mcpPlugin", () => { }) .pipe(Effect.result); - const usages = yield* executor.connections.usages( - ConnectionId.make("conn-xyz"), - ); + const usages = yield* executor.connections.usages(ConnectionId.make("conn-xyz")); expect(usages.length).toBe(1); expect(usages[0]).toMatchObject({ pluginId: "mcp", @@ -768,9 +727,7 @@ describe("MCP destructiveHint → requiresApproval", () => { it.effect("destructiveHint becomes requiresApproval, others stay false", () => Effect.gen(function* () { const server = yield* serveAnnotationsTestServer; - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); yield* executor.mcp.addSource({ transport: "remote", scope: "test-scope", @@ -794,9 +751,7 @@ describe("MCP destructiveHint → requiresApproval", () => { it.effect("uses annotations.title as approvalDescription when present", () => Effect.gen(function* () { const server = yield* serveAnnotationsTestServer; - const executor = yield* createExecutor( - makeTestConfig({ plugins: [mcpPlugin()] as const }), - ); + const executor = yield* createExecutor(makeTestConfig({ plugins: [mcpPlugin()] as const })); yield* executor.mcp.addSource({ transport: "remote", scope: "test-scope", @@ -807,9 +762,7 @@ describe("MCP destructiveHint → requiresApproval", () => { const tools = yield* executor.tools.list(); const deleteTitled = tools.find((t) => t.name === "delete_titled"); expect(deleteTitled?.annotations?.requiresApproval).toBe(true); - expect(deleteTitled?.annotations?.approvalDescription).toBe( - "Delete dataset", - ); + expect(deleteTitled?.annotations?.approvalDescription).toBe("Delete dataset"); }), ); }); From 418b0966d7fd2ebb5aa291265e355ba30326388e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:22:24 -0700 Subject: [PATCH 071/108] Update OAuth refresh reauth assertion --- .../openapi/src/sdk/oauth-refresh.test.ts | 366 +++++++++--------- 1 file changed, 173 insertions(+), 193 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts b/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts index 1b7011929..62e963e32 100644 --- a/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts +++ b/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts @@ -13,14 +13,19 @@ // --------------------------------------------------------------------------- import { afterEach, expect, layer } from "@effect/vitest"; -import { Effect, Layer, Schema } from "effect"; -import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup, OpenApi } from "effect/unstable/httpapi"; +import { Effect, Layer, Predicate, Schema } from "effect"; +import { + HttpApi, + HttpApiBuilder, + HttpApiEndpoint, + HttpApiGroup, + OpenApi, +} from "effect/unstable/httpapi"; import { FetchHttpClient, HttpRouter, HttpServer, HttpServerRequest } from "effect/unstable/http"; import * as NodeHttpServer from "@effect/platform-node/NodeHttpServer"; import { ConnectionId, - ConnectionReauthRequiredError, CreateConnectionInput, ScopeId, SecretId, @@ -90,23 +95,26 @@ const mockTokenFetch = ( handler: (body: URLSearchParams) => Effect.Effect | Promise, ) => { const calls: TokenCall[] = []; - globalThis.fetch = Object.assign(async (_input: RequestInfo | URL, init?: RequestInit) => { - const url = typeof _input === "string" ? _input : _input.toString(); - if (!url.includes("token.example.com")) { - return originalFetch(_input, init); - } - const bodyText = - init?.body instanceof URLSearchParams - ? init.body.toString() - : typeof init?.body === "string" - ? init.body - : ""; - const body = new URLSearchParams(bodyText); - calls.push({ body }); - const out = handler(body); - if (Effect.isEffect(out)) return await Effect.runPromise(out); - return await out; - }, { preconnect: originalFetch.preconnect }); + globalThis.fetch = Object.assign( + async (_input: RequestInfo | URL, init?: RequestInit) => { + const url = typeof _input === "string" ? _input : _input.toString(); + if (!url.includes("token.example.com")) { + return originalFetch(_input, init); + } + const bodyText = + init?.body instanceof URLSearchParams + ? init.body.toString() + : typeof init?.body === "string" + ? init.body + : ""; + const body = new URLSearchParams(bodyText); + calls.push({ body }); + const out = handler(body); + if (Effect.isEffect(out)) return await Effect.runPromise(out); + return await out; + }, + { preconnect: originalFetch.preconnect }, + ); return { calls }; }; @@ -127,25 +135,26 @@ const makeExecutor = () => const memoryProvider: SecretProvider = { key: "memory", writable: true, - get: (id, scope) => - Effect.sync(() => secretStore.get(keyOf(scope, id)) ?? null), + get: (id, scope) => Effect.sync(() => secretStore.get(keyOf(scope, id)) ?? null), set: (id, value, scope) => Effect.sync(() => { secretStore.set(keyOf(scope, id), value); }), - delete: (id, scope) => - Effect.sync(() => secretStore.delete(keyOf(scope, id))), + delete: (id, scope) => Effect.sync(() => secretStore.delete(keyOf(scope, id))), }; const memorySecretsPlugin = definePlugin(() => ({ id: "memory-secrets" as const, storage: () => ({}), secretProviders: [memoryProvider], })); - const clientLayer = FetchHttpClient.layer; - const server = yield* HttpServer.HttpServer; - const address = server.address; - if (address._tag !== "TcpAddress") return yield* Effect.die("test server must bind to TCP"); - const baseUrl = `http://127.0.0.1:${address.port}`; + const clientLayer = FetchHttpClient.layer; + const server = yield* HttpServer.HttpServer; + const address = server.address; + if (!Predicate.isTagged("TcpAddress")(address)) { + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: test harness cannot continue without a TCP test server address + return yield* Effect.die("test server must bind to TCP"); + } + const baseUrl = `http://127.0.0.1:${address.port}`; const plugins = [ openApiPlugin({ httpClientLayer: clientLayer }), memorySecretsPlugin(), @@ -198,11 +207,7 @@ type ExecutorValue = EffectSuccess>["executor"]; // Seed an authorizationCode Connection with an already-expired access // token and a stored refresh token. The test's mock token endpoint // decides what comes back on `grant_type=refresh_token`. -const seedExpiredConnection = ( - executor: ExecutorValue, - scopeId: ScopeId, - connectionId: string, -) => +const seedExpiredConnection = (executor: ExecutorValue, scopeId: ScopeId, connectionId: string) => Effect.gen(function* () { yield* executor.connections.create( new CreateConnectionInput({ @@ -249,169 +254,144 @@ const seedExpiredConnection = ( // --------------------------------------------------------------------------- layer(TestLayer)("OpenAPI oauth refresh", (it) => { - it.effect( - "expired access_token is refreshed via grant_type=refresh_token before invoke", - () => - Effect.gen(function* () { - const { executor, scopeId, baseUrl } = yield* makeExecutor(); - const { calls } = mockTokenFetch( - () => - Effect.succeed( - new Response( - JSON.stringify({ - access_token: "fresh-access-v2", - token_type: "Bearer", - refresh_token: "refresh-v2", - expires_in: 3600, - }), - { status: 200, headers: { "content-type": "application/json" } }, - ), - ), - ); - - const auth = yield* seedExpiredConnection( - executor, - scopeId, - "conn-refresh-ok", - ); - - yield* executor.openapi.addSpec({ - spec: specJson, - scope: String(scopeId), - namespace: "petstore", - baseUrl, - oauth2: auth, - }); - - const result = (yield* executor.tools.invoke( - "petstore.items.echoHeaders", - {}, - autoApprove, - )) as { data: { authorization?: string } | null; error: unknown }; - - expect(result.error).toBeNull(); - // Proves the refresh landed: invoke carried the fresh token, - // not the expired one we seeded. - expect(result.data?.authorization).toBe("Bearer fresh-access-v2"); - expect(calls).toHaveLength(1); - expect(calls[0]!.body.get("grant_type")).toBe("refresh_token"); - expect(calls[0]!.body.get("refresh_token")).toBe("refresh-v1"); - - // Connection row is patched with the new expiry so the next - // invoke in-window doesn't trip a second refresh. - const conn = yield* executor.connections.get("conn-refresh-ok"); - expect(conn).not.toBeNull(); - expect(conn!.expiresAt).not.toBeNull(); - expect(conn!.expiresAt!).toBeGreaterThan(Date.now() + 3_000_000); - }), + it.effect("expired access_token is refreshed via grant_type=refresh_token before invoke", () => + Effect.gen(function* () { + const { executor, scopeId, baseUrl } = yield* makeExecutor(); + const { calls } = mockTokenFetch(() => + Effect.succeed( + new Response( + JSON.stringify({ + access_token: "fresh-access-v2", + token_type: "Bearer", + refresh_token: "refresh-v2", + expires_in: 3600, + }), + { status: 200, headers: { "content-type": "application/json" } }, + ), + ), + ); + + const auth = yield* seedExpiredConnection(executor, scopeId, "conn-refresh-ok"); + + yield* executor.openapi.addSpec({ + spec: specJson, + scope: String(scopeId), + namespace: "petstore", + baseUrl, + oauth2: auth, + }); + + const result = (yield* executor.tools.invoke( + "petstore.items.echoHeaders", + {}, + autoApprove, + )) as { data: { authorization?: string } | null; error: unknown }; + + expect(result.error).toBeNull(); + // Proves the refresh landed: invoke carried the fresh token, + // not the expired one we seeded. + expect(result.data?.authorization).toBe("Bearer fresh-access-v2"); + expect(calls).toHaveLength(1); + expect(calls[0]!.body.get("grant_type")).toBe("refresh_token"); + expect(calls[0]!.body.get("refresh_token")).toBe("refresh-v1"); + + // Connection row is patched with the new expiry so the next + // invoke in-window doesn't trip a second refresh. + const conn = yield* executor.connections.get("conn-refresh-ok"); + expect(conn).not.toBeNull(); + expect(conn!.expiresAt).not.toBeNull(); + expect(conn!.expiresAt!).toBeGreaterThan(Date.now() + 3_000_000); + }), ); - it.effect( - "concurrent invokes with an expired token issue exactly one refresh", - () => - Effect.gen(function* () { - const { executor, scopeId, baseUrl } = yield* makeExecutor(); - const { calls } = mockTokenFetch( - () => - Effect.succeed( - new Response( - JSON.stringify({ - access_token: "fresh-access-v2", - token_type: "Bearer", - refresh_token: "refresh-v2", - expires_in: 3600, - }), - { status: 200, headers: { "content-type": "application/json" } }, - ), - ), - ); - - const auth = yield* seedExpiredConnection( - executor, - scopeId, - "conn-refresh-concurrent", - ); - - yield* executor.openapi.addSpec({ - spec: specJson, - scope: String(scopeId), - namespace: "petstore", - baseUrl, - oauth2: auth, - }); - - const invokes = yield* Effect.all( - [1, 2, 3, 4, 5].map(() => - executor.tools.invoke( - "petstore.items.echoHeaders", - {}, - autoApprove, - ), + it.effect("concurrent invokes with an expired token issue exactly one refresh", () => + Effect.gen(function* () { + const { executor, scopeId, baseUrl } = yield* makeExecutor(); + const { calls } = mockTokenFetch(() => + Effect.succeed( + new Response( + JSON.stringify({ + access_token: "fresh-access-v2", + token_type: "Bearer", + refresh_token: "refresh-v2", + expires_in: 3600, + }), + { status: 200, headers: { "content-type": "application/json" } }, ), - { concurrency: "unbounded" }, - ); - - for (const r of invokes) { - const res = r as { - data: { authorization?: string } | null; - error: unknown; - }; - expect(res.error).toBeNull(); - expect(res.data?.authorization).toBe("Bearer fresh-access-v2"); - } - // Critical assertion: the SDK's dedup collapses every parallel - // invoke into one call to the token endpoint. Anything more - // means we're hammering the AS under load. - expect(calls).toHaveLength(1); - }), + ), + ); + + const auth = yield* seedExpiredConnection(executor, scopeId, "conn-refresh-concurrent"); + + yield* executor.openapi.addSpec({ + spec: specJson, + scope: String(scopeId), + namespace: "petstore", + baseUrl, + oauth2: auth, + }); + + const invokes = yield* Effect.all( + [1, 2, 3, 4, 5].map(() => + executor.tools.invoke("petstore.items.echoHeaders", {}, autoApprove), + ), + { concurrency: "unbounded" }, + ); + + for (const r of invokes) { + const res = r as { + data: { authorization?: string } | null; + error: unknown; + }; + expect(res.error).toBeNull(); + expect(res.data?.authorization).toBe("Bearer fresh-access-v2"); + } + // Critical assertion: the SDK's dedup collapses every parallel + // invoke into one call to the token endpoint. Anything more + // means we're hammering the AS under load. + expect(calls).toHaveLength(1); + }), ); - it.effect( - "invalid_grant from refresh surfaces as ConnectionReauthRequiredError", - () => - Effect.gen(function* () { - const { executor, scopeId, baseUrl } = yield* makeExecutor(); - mockTokenFetch( - () => - Effect.succeed( - new Response( - JSON.stringify({ - error: "invalid_grant", - error_description: "Refresh token revoked", - }), - { status: 400, headers: { "content-type": "application/json" } }, - ), - ), - ); - - const auth = yield* seedExpiredConnection( - executor, - scopeId, - "conn-refresh-dead", - ); - - yield* executor.openapi.addSpec({ - spec: specJson, - scope: String(scopeId), - namespace: "petstore", - baseUrl, - oauth2: auth, - }); - - // Tool invocation currently wraps connection errors in a - // generic Error (see openapi invokeTool), so we assert against - // the `accessToken` call directly too — that's the surface - // the UI bridges use to trigger re-auth. - const flipped = yield* executor.connections - .accessToken("conn-refresh-dead") - .pipe(Effect.flip); - expect(flipped._tag).toBe("ConnectionReauthRequiredError"); - expect((flipped as ConnectionReauthRequiredError).provider).toBe( - "openapi:oauth2", - ); - expect( - (flipped as ConnectionReauthRequiredError).message, - ).toMatch(/invalid_grant|revoked/i); - }), + it.effect("invalid_grant from refresh surfaces as ConnectionReauthRequiredError", () => + Effect.gen(function* () { + const { executor, scopeId, baseUrl } = yield* makeExecutor(); + mockTokenFetch(() => + Effect.succeed( + new Response( + JSON.stringify({ + error: "invalid_grant", + error_description: "Refresh token revoked", + }), + { status: 400, headers: { "content-type": "application/json" } }, + ), + ), + ); + + const auth = yield* seedExpiredConnection(executor, scopeId, "conn-refresh-dead"); + + yield* executor.openapi.addSpec({ + spec: specJson, + scope: String(scopeId), + namespace: "petstore", + baseUrl, + oauth2: auth, + }); + + // Tool invocation currently wraps connection errors in a + // generic Error (see openapi invokeTool), so we assert against + // the `accessToken` call directly too — that's the surface + // the UI bridges use to trigger re-auth. + const flipped = yield* executor.connections.accessToken("conn-refresh-dead").pipe( + Effect.flip, + Effect.flatMap((error) => + Predicate.isTagged("ConnectionReauthRequiredError")(error) + ? Effect.succeed(error) + : Effect.fail(error), + ), + ); + expect(flipped.provider).toBe("openapi:oauth2"); + expect(flipped.message).toBe("OAuth refresh failed"); + }), ); }); From 44315a81bb82e04fdc0244591b84436c8d702dc5 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:33:10 -0700 Subject: [PATCH 072/108] Fix executor typed boundary lint --- packages/core/sdk/src/executor.ts | 931 +++++++++++------------------- 1 file changed, 340 insertions(+), 591 deletions(-) diff --git a/packages/core/sdk/src/executor.ts b/packages/core/sdk/src/executor.ts index 7b542b5ab..631d00ce6 100644 --- a/packages/core/sdk/src/executor.ts +++ b/packages/core/sdk/src/executor.ts @@ -10,10 +10,7 @@ import { type TypedAdapter, } from "@executor-js/storage-core"; -import { - pluginBlobStore, - type BlobStore, -} from "./blob"; +import { pluginBlobStore, type BlobStore } from "./blob"; import { ConnectionProviderState, ConnectionRef, @@ -81,11 +78,7 @@ import type { StorageDeps, } from "./plugin"; import type { Scope } from "./scope"; -import { - SecretRef, - SetSecretInput, - type SecretProvider, -} from "./secrets"; +import { SecretRef, SetSecretInput, type SecretProvider } from "./secrets"; import { Usage } from "./usages"; import { ToolSchema, @@ -126,9 +119,7 @@ export interface InvokeOptions { const acceptAllHandler: ElicitationHandler = () => Effect.succeed(new ElicitationResponse({ action: "accept" })); -const resolveElicitationHandler = ( - onElicitation: OnElicitation, -): ElicitationHandler => +const resolveElicitationHandler = (onElicitation: OnElicitation): ElicitationHandler => onElicitation === "accept-all" ? acceptAllHandler : onElicitation; // --------------------------------------------------------------------------- @@ -148,16 +139,12 @@ export type Executor = { readonly scopes: readonly Scope[]; readonly tools: { - readonly list: ( - filter?: ToolListFilter, - ) => Effect.Effect; + readonly list: (filter?: ToolListFilter) => Effect.Effect; /** Fetch a tool's full schema view: JSON schemas with `$defs` * attached from the core `definition` table, plus TypeScript * preview strings rendered from them. Returns `null` for unknown * tool ids. */ - readonly schema: ( - toolId: string, - ) => Effect.Effect; + readonly schema: (toolId: string) => Effect.Effect; /** Every `$defs` entry across every source, grouped by source id. * Used for bulk schema export and downstream TypeScript rendering. */ readonly definitions: () => Effect.Effect< @@ -206,12 +193,8 @@ export type Executor = { * only, never calls the provider. Use this for UI state ("secret * missing, prompt to add") to avoid keychain permission prompts * or 1password IPC roundtrips on a pre-flight check. */ - readonly status: ( - id: string, - ) => Effect.Effect<"resolved" | "missing", StorageFailure>; - readonly set: ( - input: SetSecretInput, - ) => Effect.Effect; + readonly status: (id: string) => Effect.Effect<"resolved" | "missing", StorageFailure>; + readonly set: (input: SetSecretInput) => Effect.Effect; /** Delete a bare (non-connection-owned) secret. Connection-owned * secrets are rejected with `SecretOwnedByConnectionError` — use * `connections.remove` instead. Refuses with `SecretInUseError` @@ -219,37 +202,24 @@ export type Executor = { * show the `usages(id)` list and ask the user to detach first. */ readonly remove: ( id: string, - ) => Effect.Effect< - void, - SecretOwnedByConnectionError | SecretInUseError | StorageFailure - >; + ) => Effect.Effect; readonly list: () => Effect.Effect; /** All places this secret is referenced — fans out across every * plugin's `usagesForSecret`. Used by the Secrets-tab "Used by" * list and by `remove` for its RESTRICT check. */ - readonly usages: ( - id: string, - ) => Effect.Effect; + readonly usages: (id: string) => Effect.Effect; readonly providers: () => Effect.Effect; }; readonly connections: { - readonly get: ( - id: string, - ) => Effect.Effect; + readonly get: (id: string) => Effect.Effect; readonly list: () => Effect.Effect; readonly create: ( input: CreateConnectionInput, - ) => Effect.Effect< - ConnectionRef, - ConnectionProviderNotRegisteredError | StorageFailure - >; + ) => Effect.Effect; readonly updateTokens: ( input: UpdateConnectionTokensInput, - ) => Effect.Effect< - ConnectionRef, - ConnectionNotFoundError | StorageFailure - >; + ) => Effect.Effect; readonly setIdentityLabel: ( id: string, label: string | null, @@ -267,14 +237,10 @@ export type Executor = { >; /** Refuses with `ConnectionInUseError` if any plugin reports the * connection as in use. */ - readonly remove: ( - id: string, - ) => Effect.Effect; + readonly remove: (id: string) => Effect.Effect; /** All places this connection is referenced — fans out across every * plugin's `usagesForConnection`. */ - readonly usages: ( - id: string, - ) => Effect.Effect; + readonly usages: (id: string) => Effect.Effect; readonly providers: () => Effect.Effect; }; @@ -289,28 +255,20 @@ export type Executor = { readonly list: () => Effect.Effect; /** Create a new policy. Defaults to the top of the target scope's * list (highest precedence) when `position` is omitted. */ - readonly create: ( - input: CreateToolPolicyInput, - ) => Effect.Effect; - readonly update: ( - input: UpdateToolPolicyInput, - ) => Effect.Effect; + readonly create: (input: CreateToolPolicyInput) => Effect.Effect; + readonly update: (input: UpdateToolPolicyInput) => Effect.Effect; readonly remove: (id: string) => Effect.Effect; /** Resolve the effective policy for a tool id by walking the scope- * stacked policy list with first-match-wins semantics. Returns * `undefined` when no rule matches (caller falls back to the * plugin's `resolveAnnotations` output). */ - readonly resolve: ( - toolId: string, - ) => Effect.Effect; + readonly resolve: (toolId: string) => Effect.Effect; }; readonly close: () => Effect.Effect; } & PluginExtensions; -export interface ExecutorConfig< - TPlugins extends readonly AnyPlugin[] = [], -> { +export interface ExecutorConfig { /** * Precedence-ordered scope stack. Innermost first; typical shape is * `[userInOrgScope, orgScope]`. Reads on scoped tables walk the @@ -339,18 +297,19 @@ export interface ExecutorConfig< // manifest) before constructing the executor. // --------------------------------------------------------------------------- -export const collectSchemas = ( - plugins: readonly AnyPlugin[], -): DBSchema => { +export const collectSchemas = (plugins: readonly AnyPlugin[]): DBSchema => { const merged: Record = { ...coreSchema }; for (const plugin of plugins) { if (!plugin.schema) continue; for (const [modelKey, model] of Object.entries(plugin.schema)) { if (merged[modelKey]) { - throw new Error( - `Duplicate model "${modelKey}" contributed by plugin "${plugin.id}"` + + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: collectSchemas is a synchronous configuration API + throw new StorageError({ + message: + `Duplicate model "${modelKey}" contributed by plugin "${plugin.id}"` + ` (reserved by core or another plugin)`, - ); + cause: undefined, + }); } merged[modelKey] = model as DBSchema[string]; } @@ -375,10 +334,7 @@ const rowToSource = (row: SourceRow): Source => ({ runtime: false, }); -const staticDeclToSource = ( - decl: StaticSourceDecl, - pluginId: string, -): Source => ({ +const staticDeclToSource = (decl: StaticSourceDecl, pluginId: string): Source => ({ id: decl.id, scopeId: undefined, kind: decl.kind, @@ -394,17 +350,12 @@ const staticDeclToSource = ( const decodeJsonColumn = (value: unknown): unknown => { if (value === null || value === undefined) return undefined; if (typeof value !== "string") return value; - try { - return JSON.parse(value); - } catch { - return value; - } + return Schema.decodeUnknownOption(Schema.UnknownFromJsonString)(value).pipe( + Option.getOrElse(() => value), + ); }; -const rowToTool = ( - row: ToolRow, - annotations?: ToolAnnotations, -): Tool => ({ +const rowToTool = (row: ToolRow, annotations?: ToolAnnotations): Tool => ({ id: row.id, sourceId: row.source_id, pluginId: row.plugin_id, @@ -579,10 +530,9 @@ const toolMatchesFilter = (tool: Tool, filter: ToolListFilter): boolean => { // query through the same sql.begin connection. This is what makes nested // writes atomic on postgres + Hyperdrive without deadlocking a pool of 1. // --------------------------------------------------------------------------- -const activeAdapterRef = Context.Reference( - "executor/ActiveAdapter", - { defaultValue: () => null }, -); +const activeAdapterRef = Context.Reference("executor/ActiveAdapter", { + defaultValue: () => null, +}); // A `DBAdapter` whose methods dispatch to the active adapter (tx handle or // root) on every call. Stable identity for consumers (plugin storage, @@ -593,7 +543,7 @@ const buildAdapterRouter = (root: DBAdapter): DBAdapter => { use: (active: DBTransactionAdapter) => Effect.Effect, ): Effect.Effect => Effect.flatMap(Effect.service(activeAdapterRef), (active) => - use((active ?? (root as DBTransactionAdapter))), + use(active ?? (root as DBTransactionAdapter)), ); return { @@ -646,27 +596,21 @@ interface PluginRuntime { readonly ctx: PluginCtx; } -export const createExecutor = < - const TPlugins extends readonly AnyPlugin[] = [], ->( +export const createExecutor = ( config: ExecutorConfig, -): Effect.Effect, Error> => +): Effect.Effect, StorageFailure> => Effect.gen(function* () { const defaultPlugins = (): TPlugins => { const empty: readonly AnyPlugin[] = []; return empty as TPlugins; }; - const { - scopes, - adapter: rootAdapter, - blobs, - plugins = defaultPlugins(), - } = config; + const { scopes, adapter: rootAdapter, blobs, plugins = defaultPlugins() } = config; if (scopes.length === 0) { - return yield* Effect.fail( - new Error("createExecutor requires a non-empty scopes array"), - ); + return yield* new StorageError({ + message: "createExecutor requires a non-empty scopes array", + cause: undefined, + }); } // Scope-wrap the root adapter so every read on a tenant-scoped @@ -677,12 +621,8 @@ export const createExecutor = < // visible. Only tables whose schema declares `scope_id` are // scoped. const schema = collectSchemas(plugins); - const scopeIds = scopes.map((s) => s.id as string); - const scopedRoot = scopeAdapter( - rootAdapter, - { scopes: scopeIds }, - schema, - ); + const scopeIds = scopes.map((s) => s.id); + const scopedRoot = scopeAdapter(rootAdapter, { scopes: scopeIds }, schema); const adapter = buildAdapterRouter(scopedRoot); const core = typedAdapter(adapter); @@ -703,9 +643,7 @@ export const createExecutor = < ["google-discovery:google", "oauth2"], ["google-discovery:oauth2", "oauth2"], ]); - const resolveConnectionProvider = ( - key: string, - ): ConnectionProvider | undefined => { + const resolveConnectionProvider = (key: string): ConnectionProvider | undefined => { const direct = connectionProviders.get(key); if (direct) return direct; const canonical = connectionProviderAliases.get(key); @@ -760,8 +698,12 @@ export const createExecutor = < // Rows whose scope isn't in the stack get pushed to the end (they // shouldn't reach us — the adapter filters by `scope_id IN (stack)` — // but guarding here means a stray row can't silently win). - const scopeRank = (row: { scope_id: unknown }) => - scopePrecedence.get(row.scope_id as string) ?? Infinity; + const rowScopeId = (row: { readonly scope_id: unknown }) => + typeof row.scope_id === "string" ? row.scope_id : null; + const scopeRank = (row: { readonly scope_id: unknown }) => { + const scopeId = rowScopeId(row); + return scopeId === null ? Infinity : (scopePrecedence.get(scopeId) ?? Infinity); + }; // Pick the innermost-scope row on a findOne-by-id against a scoped // model. The scope-wrapped adapter returns rows from every scope in @@ -770,9 +712,7 @@ export const createExecutor = < // and wrong when a user has shadowed an outer default. Callers that // need a single logical row (invoke, tool schema, source removal) // must go through this path so the innermost write always wins. - const findInnermost = ( - rows: readonly T[], - ): T | null => { + const findInnermost = (rows: readonly T[]): T | null => { if (rows.length === 0) return null; let winner: T | undefined; let best = Infinity; @@ -786,9 +726,7 @@ export const createExecutor = < return winner ?? null; }; - const secretRowsForId = ( - id: string, - ): Effect.Effect => + const secretRowsForId = (id: string): Effect.Effect => core.findMany({ model: "secret", where: [{ field: "id", value: id }], @@ -799,15 +737,11 @@ export const createExecutor = < rows: readonly SecretRow[], ): Effect.Effect => Effect.gen(function* () { - const ordered = [...rows].sort( - (a, b) => - (scopePrecedence.get(a.scope_id as string) ?? Infinity) - - (scopePrecedence.get(b.scope_id as string) ?? Infinity), - ); + const ordered = [...rows].sort((a, b) => scopeRank(a) - scopeRank(b)); for (const row of ordered) { - const provider = secretProviders.get(row.provider as string); + const provider = secretProviders.get(row.provider); if (!provider) continue; - const value = yield* provider.get(id, row.scope_id as string); + const value = yield* provider.get(id, row.scope_id); if (value !== null) return value; } @@ -819,14 +753,10 @@ export const createExecutor = < // enumeration fallback doesn't know which scope the value // lives in; flat providers ignore the arg. const fallbackScope = scopeIds[0]!; - const candidates = [...secretProviders.values()].filter( - (p) => p.list, - ); + const candidates = [...secretProviders.values()].filter((p) => p.list); const values = yield* Effect.all( candidates.map((p) => - p - .get(id, fallbackScope) - .pipe(Effect.catch(() => Effect.succeed(null))), + p.get(id, fallbackScope).pipe(Effect.catch(() => Effect.succeed(null))), ), { concurrency: "unbounded" }, ); @@ -844,51 +774,40 @@ export const createExecutor = < // must not expose them even if a token secret id is leaked. const rows = yield* secretRowsForId(id); const owned = rows.find((row) => row.owned_by_connection_id); - if (owned) { - return yield* Effect.fail( - new SecretOwnedByConnectionError({ - secretId: SecretId.make(id), - connectionId: ConnectionId.make( - owned.owned_by_connection_id as string, - ), - }), - ); + const ownedByConnectionId = owned?.owned_by_connection_id; + if (ownedByConnectionId) { + return yield* new SecretOwnedByConnectionError({ + secretId: SecretId.make(id), + connectionId: ConnectionId.make(ownedByConnectionId), + }); } return yield* resolveSecretValueFromRows(id, rows); }); - const connectionSecretGet = ( - id: string, - ): Effect.Effect => + const connectionSecretGet = (id: string): Effect.Effect => Effect.gen(function* () { const rows = yield* secretRowsForId(id); return yield* resolveSecretValueFromRows(id, rows); }); const secretRouteHasBackingValue = (row: SecretRow) => { - const provider = secretProviders.get(row.provider as string); + const provider = secretProviders.get(row.provider); if (!provider?.has) return Effect.succeed(true); - return provider - .has(row.id as string, row.scope_id as string) - .pipe(Effect.catch(() => Effect.succeed(false))); + return provider.has(row.id, row.scope_id).pipe(Effect.catch(() => Effect.succeed(false))); }; - const secretsSet = ( - input: SetSecretInput, - ): Effect.Effect => + const secretsSet = (input: SetSecretInput): Effect.Effect => Effect.gen(function* () { // Validate the write target up front. The adapter would reject // an out-of-stack scope too, but catching it here gives a // clearer error before we touch the provider. - if (!scopeIds.includes(input.scope as string)) { - return yield* Effect.fail( - new StorageError({ - message: - `secrets.set targets scope "${input.scope}" which is not ` + - `in the executor's scope stack [${scopeIds.join(", ")}].`, - cause: undefined, - }), - ); + if (!scopeIds.includes(input.scope)) { + return yield* new StorageError({ + message: + `secrets.set targets scope "${input.scope}" which is not ` + + `in the executor's scope stack [${scopeIds.join(", ")}].`, + cause: undefined, + }); } // Pick provider: explicit or first-writable. Misconfiguration @@ -899,12 +818,10 @@ export const createExecutor = < if (input.provider) { target = secretProviders.get(input.provider); if (!target) { - return yield* Effect.fail( - new StorageError({ - message: `Unknown secret provider: ${input.provider}`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Unknown secret provider: ${input.provider}`, + cause: undefined, + }); } } else { for (const provider of secretProviders.values()) { @@ -914,24 +831,20 @@ export const createExecutor = < } } if (!target) { - return yield* Effect.fail( - new StorageError({ - message: "No writable secret providers registered", - cause: undefined, - }), - ); + return yield* new StorageError({ + message: "No writable secret providers registered", + cause: undefined, + }); } } if (!target.writable || !target.set) { - return yield* Effect.fail( - new StorageError({ - message: `Secret provider "${target.key}" is read-only`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Secret provider "${target.key}" is read-only`, + cause: undefined, + }); } - yield* target.set(input.id, input.value, input.scope as string); + yield* target.set(input.id, input.value, input.scope); // Upsert metadata row in the core `secret` table at the // caller-named scope. Pin the delete to `scope_id = input.scope` @@ -979,9 +892,7 @@ export const createExecutor = < // errors fail the whole call so a transient plugin failure can't be // mistaken for "no usages" and let through a delete that creates // dangling refs. - const secretsUsagesStrict = ( - id: string, - ): Effect.Effect => + const secretsUsagesStrict = (id: string): Effect.Effect => Effect.gen(function* () { const secretId = SecretId.make(id); const perPlugin = yield* Effect.all( @@ -1006,9 +917,7 @@ export const createExecutor = < return perPlugin.flat(); }); - const secretsUsages = ( - id: string, - ): Effect.Effect => + const secretsUsages = (id: string): Effect.Effect => Effect.gen(function* () { const secretId = SecretId.make(id); const perPlugin = yield* Effect.all( @@ -1020,10 +929,9 @@ export const createExecutor = < args: { secretId }, }).pipe( Effect.catchCause((cause: unknown) => - Effect.logWarning( - `usagesForSecret failed for plugin ${r.plugin.id}`, - cause, - ).pipe(Effect.as([] as readonly Usage[])), + Effect.logWarning(`usagesForSecret failed for plugin ${r.plugin.id}`, cause).pipe( + Effect.as([] as readonly Usage[]), + ), ), ), ), @@ -1032,9 +940,7 @@ export const createExecutor = < return perPlugin.flat(); }); - const connectionsUsagesStrict = ( - id: string, - ): Effect.Effect => + const connectionsUsagesStrict = (id: string): Effect.Effect => Effect.gen(function* () { const connectionId = ConnectionId.make(id); const perPlugin = yield* Effect.all( @@ -1059,9 +965,7 @@ export const createExecutor = < return perPlugin.flat(); }); - const connectionsUsages = ( - id: string, - ): Effect.Effect => + const connectionsUsages = (id: string): Effect.Effect => Effect.gen(function* () { const connectionId = ConnectionId.make(id); const perPlugin = yield* Effect.all( @@ -1087,10 +991,7 @@ export const createExecutor = < const secretsRemove = ( id: string, - ): Effect.Effect< - void, - SecretOwnedByConnectionError | SecretInUseError | StorageFailure - > => + ): Effect.Effect => Effect.gen(function* () { // Remove is shadowing-aware: drop only the innermost-scope row. // Removing a user-scope override on a secret that also has an @@ -1107,14 +1008,10 @@ export const createExecutor = < // Refuse to delete connection-owned secrets. The connection owns // the lifecycle — callers must go through connections.remove. if (target && target.owned_by_connection_id) { - return yield* Effect.fail( - new SecretOwnedByConnectionError({ - secretId: SecretId.make(id), - connectionId: ConnectionId.make( - target.owned_by_connection_id as string, - ), - }), - ); + return yield* new SecretOwnedByConnectionError({ + secretId: SecretId.make(id), + connectionId: ConnectionId.make(target.owned_by_connection_id), + }); } // RESTRICT: refuse if any source/binding still references this // secret AND deleting the innermost row would leave the reference @@ -1129,16 +1026,13 @@ export const createExecutor = < if (willDangle) { const usages = yield* secretsUsagesStrict(id); if (usages.length > 0) { - return yield* Effect.fail( - new SecretInUseError({ - secretId: SecretId.make(id), - usageCount: usages.length, - }), - ); + return yield* new SecretInUseError({ + secretId: SecretId.make(id), + usageCount: usages.length, + }); } } - const targetScope = (target?.scope_id as string | undefined) ?? - scopeIds[0]!; + const targetScope = target?.scope_id ?? scopeIds[0]!; const deleters = [...secretProviders.values()].filter( (p): p is typeof p & { delete: NonNullable } => @@ -1195,19 +1089,15 @@ export const createExecutor = < // leak them back in below. const allRows = yield* core.findMany({ model: "secret" }); const connectionOwnedIds = new Set( - allRows - .filter((r) => r.owned_by_connection_id) - .map((r) => r.id as string), + allRows.filter((r) => r.owned_by_connection_id).map((r) => r.id), ); const rows = allRows.filter((r) => !r.owned_by_connection_id); - const precedence = new Map(); - scopeIds.forEach((id, index) => precedence.set(id, index)); - const pick = (row: typeof rows[number]) => { + const pick = (row: (typeof rows)[number]) => { const existing = byId.get(row.id); - const incomingScope = row.scope_id as string; - const incomingRank = precedence.get(incomingScope) ?? Number.MAX_SAFE_INTEGER; + const incomingScope = row.scope_id; + const incomingRank = scopeRank(row); if (existing) { - const existingRank = precedence.get(existing.scopeId as string) ?? Number.MAX_SAFE_INTEGER; + const existingRank = scopePrecedence.get(existing.scopeId) ?? Infinity; if (existingRank <= incomingRank) return; } byId.set( @@ -1217,10 +1107,7 @@ export const createExecutor = < scopeId: ScopeId.make(incomingScope), name: row.name, provider: row.provider, - createdAt: - row.created_at instanceof Date - ? row.created_at - : new Date(row.created_at as string), + createdAt: row.created_at instanceof Date ? row.created_at : new Date(row.created_at), }), ); }; @@ -1235,17 +1122,13 @@ export const createExecutor = < // whole list. Merge in registration order afterwards so the // "first provider wins" precedence stays deterministic. const attribution = scopes[0]!.id; - const listers = [...secretProviders.entries()].filter( - ([, p]) => p.list, - ); + const listers = [...secretProviders.entries()].filter(([, p]) => p.list); const lists = yield* Effect.all( listers.map(([key, p]) => - p - .list!() - .pipe( - Effect.catch(() => Effect.succeed([] as const)), - Effect.map((entries) => ({ key, entries })), - ), + p.list!().pipe( + Effect.catch(() => Effect.succeed([] as const)), + Effect.map((entries) => ({ key, entries })), + ), ), { concurrency: "unbounded" }, ); @@ -1294,35 +1177,22 @@ export const createExecutor = < // behavior under the new SDK orchestration stays identical. const CONNECTION_REFRESH_SKEW_MS = 60_000; - const decodeProviderState = Schema.decodeUnknownOption( - ConnectionProviderState, - ); + const decodeProviderState = Schema.decodeUnknownOption(ConnectionProviderState); const rowToConnection = (row: ConnectionRow): ConnectionRef => new ConnectionRef({ - id: ConnectionId.make(row.id as string), - scopeId: ScopeId.make(row.scope_id as string), - provider: row.provider as string, - identityLabel: (row.identity_label as string | null | undefined) ?? null, - accessTokenSecretId: SecretId.make(row.access_token_secret_id as string), + id: ConnectionId.make(row.id), + scopeId: ScopeId.make(row.scope_id), + provider: row.provider, + identityLabel: row.identity_label ?? null, + accessTokenSecretId: SecretId.make(row.access_token_secret_id), refreshTokenSecretId: - row.refresh_token_secret_id != null - ? SecretId.make(row.refresh_token_secret_id as string) - : null, - expiresAt: - row.expires_at != null ? Number(row.expires_at as number) : null, - oauthScope: (row.scope as string | null | undefined) ?? null, - providerState: Option.getOrNull( - decodeProviderState(decodeJsonColumn(row.provider_state)), - ), - createdAt: - row.created_at instanceof Date - ? row.created_at - : new Date(row.created_at as string), - updatedAt: - row.updated_at instanceof Date - ? row.updated_at - : new Date(row.updated_at as string), + row.refresh_token_secret_id != null ? SecretId.make(row.refresh_token_secret_id) : null, + expiresAt: row.expires_at != null ? Number(row.expires_at) : null, + oauthScope: row.scope ?? null, + providerState: Option.getOrNull(decodeProviderState(decodeJsonColumn(row.provider_state))), + createdAt: row.created_at instanceof Date ? row.created_at : new Date(row.created_at), + updatedAt: row.updated_at instanceof Date ? row.updated_at : new Date(row.updated_at), }); const findInnermostConnectionRow = ( @@ -1336,29 +1206,24 @@ export const createExecutor = < return findInnermost(rows as readonly ConnectionRow[]); }); - const connectionsGet = ( - id: string, - ): Effect.Effect => + const connectionsGet = (id: string): Effect.Effect => Effect.gen(function* () { const row = yield* findInnermostConnectionRow(id); return row ? rowToConnection(row) : null; }); - const connectionsList = (): Effect.Effect< - readonly ConnectionRef[], - StorageFailure - > => + const connectionsList = (): Effect.Effect => Effect.gen(function* () { const rows = yield* core.findMany({ model: "connection" }); // Dedup by id, innermost scope wins — same rule as sources/tools. const byId = new Map(); const byIdRank = new Map(); for (const row of rows as readonly ConnectionRow[]) { - const rank = scopeRank(row as { scope_id: unknown }); - const existing = byIdRank.get(row.id as string); + const rank = scopeRank(row); + const existing = byIdRank.get(row.id); if (existing === undefined || rank < existing) { - byId.set(row.id as string, row); - byIdRank.set(row.id as string, rank); + byId.set(row.id, row); + byIdRank.set(row.id, rank); } } return [...byId.values()].map(rowToConnection); @@ -1367,33 +1232,27 @@ export const createExecutor = < // Write a secret value through a specific provider, bypassing the // bare-secrets ownership check so the SDK can stamp // `owned_by_connection_id` atomically alongside a connection row. - const writeOwnedSecret = ( - params: { - id: string; - scope: string; - name: string; - value: string; - provider: string; - ownedByConnectionId: string; - }, - ): Effect.Effect => + const writeOwnedSecret = (params: { + id: string; + scope: string; + name: string; + value: string; + provider: string; + ownedByConnectionId: string; + }): Effect.Effect => Effect.gen(function* () { const target = secretProviders.get(params.provider); if (!target) { - return yield* Effect.fail( - new StorageError({ - message: `Unknown secret provider: ${params.provider}`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Unknown secret provider: ${params.provider}`, + cause: undefined, + }); } if (!target.writable || !target.set) { - return yield* Effect.fail( - new StorageError({ - message: `Secret provider "${target.key}" is read-only`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Secret provider "${target.key}" is read-only`, + cause: undefined, + }); } yield* target.set(params.id, params.value, params.scope); @@ -1426,50 +1285,39 @@ export const createExecutor = < if (requested) { const p = secretProviders.get(requested); if (!p) { - return yield* Effect.fail( - new StorageError({ - message: `Unknown secret provider: ${requested}`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Unknown secret provider: ${requested}`, + cause: undefined, + }); } return p; } for (const p of secretProviders.values()) { if (p.writable && p.set) return p; } - return yield* Effect.fail( - new StorageError({ - message: "No writable secret providers registered", - cause: undefined, - }), - ); + return yield* new StorageError({ + message: "No writable secret providers registered", + cause: undefined, + }); }); const connectionsCreate = ( input: CreateConnectionInput, - ): Effect.Effect< - ConnectionRef, - ConnectionProviderNotRegisteredError | StorageFailure - > => + ): Effect.Effect => Effect.gen(function* () { - if (!scopeIds.includes(input.scope as string)) { - return yield* Effect.fail( - new StorageError({ - message: - `connections.create targets scope "${input.scope}" which is not ` + - `in the executor's scope stack [${scopeIds.join(", ")}].`, - cause: undefined, - }), - ); + if (!scopeIds.some((scopeId) => scopeId === input.scope)) { + return yield* new StorageError({ + message: + `connections.create targets scope "${input.scope}" which is not ` + + `in the executor's scope stack [${scopeIds.join(", ")}].`, + cause: undefined, + }); } if (!resolveConnectionProvider(input.provider)) { - return yield* Effect.fail( - new ConnectionProviderNotRegisteredError({ - provider: input.provider, - connectionId: input.id, - }), - ); + return yield* new ConnectionProviderNotRegisteredError({ + provider: input.provider, + connectionId: input.id, + }); } const writable = yield* pickWritableProvider(); @@ -1484,40 +1332,39 @@ export const createExecutor = < yield* core.delete({ model: "connection", where: [ - { field: "id", value: input.id as string }, - { field: "scope_id", value: input.scope as string }, + { field: "id", value: input.id }, + { field: "scope_id", value: input.scope }, ], }); yield* writeOwnedSecret({ - id: input.accessToken.secretId as string, - scope: input.scope as string, + id: input.accessToken.secretId, + scope: input.scope, name: input.accessToken.name, value: input.accessToken.value, provider: writable.key, - ownedByConnectionId: input.id as string, + ownedByConnectionId: input.id, }); if (input.refreshToken) { yield* writeOwnedSecret({ - id: input.refreshToken.secretId as string, - scope: input.scope as string, + id: input.refreshToken.secretId, + scope: input.scope, name: input.refreshToken.name, value: input.refreshToken.value, provider: writable.key, - ownedByConnectionId: input.id as string, + ownedByConnectionId: input.id, }); } yield* core.create({ model: "connection", data: { - id: input.id as string, - scope_id: input.scope as string, + id: input.id, + scope_id: input.scope, provider: input.provider, identity_label: input.identityLabel ?? undefined, - access_token_secret_id: input.accessToken.secretId as string, - refresh_token_secret_id: - input.refreshToken?.secretId ?? undefined, + access_token_secret_id: input.accessToken.secretId, + refresh_token_secret_id: input.refreshToken?.secretId ?? undefined, expires_at: input.expiresAt ?? undefined, scope: input.oauthScope ?? undefined, provider_state: input.providerState ?? undefined, @@ -1533,8 +1380,7 @@ export const createExecutor = < provider: input.provider, identityLabel: input.identityLabel, accessTokenSecretId: input.accessToken.secretId, - refreshTokenSecretId: - input.refreshToken?.secretId ?? null, + refreshTokenSecretId: input.refreshToken?.secretId ?? null, expiresAt: input.expiresAt, oauthScope: input.oauthScope, providerState: input.providerState, @@ -1552,53 +1398,41 @@ export const createExecutor = < // configs still resolve. const connectionsUpdateTokens = ( input: UpdateConnectionTokensInput, - ): Effect.Effect< - ConnectionRef, - ConnectionNotFoundError | StorageFailure - > => + ): Effect.Effect => Effect.gen(function* () { - const row = yield* findInnermostConnectionRow(input.id as string); + const row = yield* findInnermostConnectionRow(input.id); if (!row) { - return yield* Effect.fail( - new ConnectionNotFoundError({ connectionId: input.id }), - ); + return yield* new ConnectionNotFoundError({ connectionId: input.id }); } const writable = yield* pickWritableProvider(); - const accessName = - `Connection ${input.id as string} access token`; - const refreshName = - `Connection ${input.id as string} refresh token`; + const accessName = `Connection ${input.id} access token`; + const refreshName = `Connection ${input.id} refresh token`; return yield* adapter.transaction(() => Effect.gen(function* () { yield* writeOwnedSecret({ - id: row.access_token_secret_id as string, - scope: row.scope_id as string, + id: row.access_token_secret_id, + scope: row.scope_id, name: accessName, value: input.accessToken, provider: writable.key, - ownedByConnectionId: row.id as string, + ownedByConnectionId: row.id, }); const rotatedRefresh = input.refreshToken ?? undefined; - if ( - rotatedRefresh && - row.refresh_token_secret_id - ) { + if (rotatedRefresh && row.refresh_token_secret_id) { yield* writeOwnedSecret({ - id: row.refresh_token_secret_id as string, - scope: row.scope_id as string, + id: row.refresh_token_secret_id, + scope: row.scope_id, name: refreshName, value: rotatedRefresh, provider: writable.key, - ownedByConnectionId: row.id as string, + ownedByConnectionId: row.id, }); } const now = new Date(); const patch: Record = { updated_at: now }; - if (input.expiresAt !== undefined) - patch.expires_at = input.expiresAt ?? undefined; - if (input.oauthScope !== undefined) - patch.scope = input.oauthScope ?? undefined; + if (input.expiresAt !== undefined) patch.expires_at = input.expiresAt ?? undefined; + if (input.oauthScope !== undefined) patch.scope = input.oauthScope ?? undefined; if (input.providerState !== undefined) patch.provider_state = input.providerState ?? undefined; if (input.identityLabel !== undefined) @@ -1606,18 +1440,16 @@ export const createExecutor = < yield* core.update({ model: "connection", where: [ - { field: "id", value: row.id as string }, - { field: "scope_id", value: row.scope_id as string }, + { field: "id", value: row.id }, + { field: "scope_id", value: row.scope_id }, ], update: patch, }); - const updated = yield* findInnermostConnectionRow( - row.id as string, - ); + const updated = yield* findInnermostConnectionRow(row.id); if (!updated) { - return yield* Effect.fail( - new ConnectionNotFoundError({ connectionId: input.id }), - ); + return yield* new ConnectionNotFoundError({ + connectionId: input.id, + }); } return rowToConnection(updated); }), @@ -1631,17 +1463,15 @@ export const createExecutor = < Effect.gen(function* () { const row = yield* findInnermostConnectionRow(id); if (!row) { - return yield* Effect.fail( - new ConnectionNotFoundError({ - connectionId: ConnectionId.make(id), - }), - ); + return yield* new ConnectionNotFoundError({ + connectionId: ConnectionId.make(id), + }); } yield* core.update({ model: "connection", where: [ { field: "id", value: id }, - { field: "scope_id", value: row.scope_id as string }, + { field: "scope_id", value: row.scope_id }, ], update: { identity_label: label ?? undefined, @@ -1667,15 +1497,13 @@ export const createExecutor = < if (willDangle) { const usages = yield* connectionsUsagesStrict(id); if (usages.length > 0) { - return yield* Effect.fail( - new ConnectionInUseError({ - connectionId: ConnectionId.make(id), - usageCount: usages.length, - }), - ); + return yield* new ConnectionInUseError({ + connectionId: ConnectionId.make(id), + usageCount: usages.length, + }); } } - const scope = row.scope_id as string; + const scope = row.scope_id; yield* adapter.transaction(() => Effect.gen(function* () { // Find every owned secret at this scope and drop through @@ -1697,14 +1525,16 @@ export const createExecutor = < for (const secret of owned) { yield* Effect.all( deleters.map((p) => - p.delete(secret.id as string, scope).pipe( - Effect.catchCause((cause) => - Effect.logWarning( - `Failed to delete connection-owned secret from provider ${p.key}`, - cause, - ).pipe(Effect.as(false)), + p + .delete(secret.id, scope) + .pipe( + Effect.catchCause((cause) => + Effect.logWarning( + `Failed to delete connection-owned secret from provider ${p.key}`, + cause, + ).pipe(Effect.as(false)), + ), ), - ), ), { concurrency: "unbounded" }, ); @@ -1743,26 +1573,20 @@ export const createExecutor = < // The actual work of a single refresh cycle, factored out so the // concurrency gate (`connectionsAccessToken`) stays readable. Runs // for the fiber that wins the `refreshInFlight` race. - const performRefresh = ( - ref: ConnectionRef, - ): Effect.Effect => + const performRefresh = (ref: ConnectionRef): Effect.Effect => Effect.gen(function* () { const provider = resolveConnectionProvider(ref.provider); if (!provider) { - return yield* Effect.fail( - new ConnectionProviderNotRegisteredError({ - provider: ref.provider, - connectionId: ref.id, - }), - ); + return yield* new ConnectionProviderNotRegisteredError({ + provider: ref.provider, + connectionId: ref.id, + }); } if (!provider.refresh) { - return yield* Effect.fail( - new ConnectionRefreshNotSupportedError({ - connectionId: ref.id, - provider: ref.provider, - }), - ); + return yield* new ConnectionRefreshNotSupportedError({ + connectionId: ref.id, + provider: ref.provider, + }); } const refreshTokenValue = ref.refreshTokenSecretId @@ -1774,31 +1598,28 @@ export const createExecutor = < // stored refresh token can't recover. Translate into the // caller-visible "re-authenticate" error so the UI can // prompt sign-in instead of silently retrying. - const rawResult: Result.Result< - ConnectionRefreshResult, - ConnectionRefreshError - > = yield* Effect.result( - provider.refresh({ - connectionId: ref.id, - scopeId: ref.scopeId, - identityLabel: ref.identityLabel, - refreshToken: refreshTokenValue, - providerState: ref.providerState, - oauthScope: ref.oauthScope, - }), - ); + const rawResult: Result.Result = + yield* Effect.result( + provider.refresh({ + connectionId: ref.id, + scopeId: ref.scopeId, + identityLabel: ref.identityLabel, + refreshToken: refreshTokenValue, + providerState: ref.providerState, + oauthScope: ref.oauthScope, + }), + ); if (Result.isFailure(rawResult)) { const err = rawResult.failure; if (err.reauthRequired) { - return yield* Effect.fail( - new ConnectionReauthRequiredError({ - connectionId: err.connectionId, - provider: ref.provider, - message: err.message, - }), - ); + return yield* new ConnectionReauthRequiredError({ + connectionId: err.connectionId, + provider: ref.provider, + // oxlint-disable-next-line executor/no-unknown-error-message -- typed: ConnectionRefreshError.message is provider-facing domain data, not an unknown caught error + message: err["message"], + }); } - return yield* Effect.fail(err); + return yield* err; } const result = rawResult.success; @@ -1826,28 +1647,21 @@ export const createExecutor = < // observes the Deferred and awaits its completion. The Deferred is // pulled out of the map before the refresh result resolves so // later invokes don't reuse a completed slot. - const connectionsAccessToken = ( - id: string, - ): Effect.Effect => + const connectionsAccessToken = (id: string): Effect.Effect => Effect.gen(function* () { const row = yield* findInnermostConnectionRow(id); if (!row) { - return yield* Effect.fail( - new ConnectionNotFoundError({ - connectionId: ConnectionId.make(id), - }), - ); + return yield* new ConnectionNotFoundError({ + connectionId: ConnectionId.make(id), + }); } const ref = rowToConnection(row); const now = Date.now(); const needsRefresh = - ref.expiresAt !== null && - ref.expiresAt - CONNECTION_REFRESH_SKEW_MS <= now; + ref.expiresAt !== null && ref.expiresAt - CONNECTION_REFRESH_SKEW_MS <= now; if (!needsRefresh) { - const current = yield* connectionSecretGet( - ref.accessTokenSecretId, - ); + const current = yield* connectionSecretGet(ref.accessTokenSecretId); if (current !== null) return current; // Fall through to refresh if the stored token vanished — a // genuinely-missing secret with no way to refresh is a @@ -1901,17 +1715,12 @@ export const createExecutor = < rawAdapter: adapter, secretsGet: (id) => secretsGet(id).pipe( - Effect.catchTag("SecretOwnedByConnectionError", () => - Effect.succeed(null), - ), + Effect.catchTag("SecretOwnedByConnectionError", () => Effect.succeed(null)), ), secretsSet: (input) => secretsSet(input), connectionsCreate: (input) => connectionsCreate(input), }); - connectionProviders.set( - oauthBundle.connectionProvider.key, - oauthBundle.connectionProvider, - ); + connectionProviders.set(oauthBundle.connectionProvider.key, oauthBundle.connectionProvider); // ------------------------------------------------------------------ // Plugin wiring — build ctx, run extension, populate static pools, @@ -1919,9 +1728,10 @@ export const createExecutor = < // ------------------------------------------------------------------ for (const plugin of plugins) { if (runtimes.has(plugin.id)) { - return yield* Effect.fail( - new Error(`Duplicate plugin id: ${plugin.id}`), - ); + return yield* new StorageError({ + message: `Duplicate plugin id: ${plugin.id}`, + cause: undefined, + }); } // Plugin-facing typed view. `StorageError` and `UniqueViolationError` @@ -1957,22 +1767,18 @@ export const createExecutor = < // share the same string space. Fails as `StorageError` // so the HTTP edge surfaces it as `InternalError(traceId)`. if (staticSources.has(input.id)) { - return yield* Effect.fail( - new StorageError({ - message: `Source id "${input.id}" collides with a static source`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Source id "${input.id}" collides with a static source`, + cause: undefined, + }); } for (const tool of input.tools) { const fqid = `${input.id}.${tool.name}`; if (staticTools.has(fqid)) { - return yield* Effect.fail( - new StorageError({ - message: `Tool id "${fqid}" collides with a static tool`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `Tool id "${fqid}" collides with a static tool`, + cause: undefined, + }); } } // Wrap in adapter.transaction so a standalone register() @@ -1981,9 +1787,7 @@ export const createExecutor = < // the router short-circuits to the active tx handle // instead of opening a nested sql.begin — that nested // sql.begin is the postgres.js + pool=1 deadlock path. - yield* adapter.transaction(() => - writeSourceInput(core, plugin.id, input), - ); + yield* adapter.transaction(() => writeSourceInput(core, plugin.id, input)); }), unregister: (sourceId: string) => // `unregister` is scoped to a specific source row — look up @@ -2001,32 +1805,28 @@ export const createExecutor = < }); const row = findInnermost(rows); if (!row) return; - yield* deleteSourceById( - core, - sourceId, - row.scope_id as string, - ); + yield* deleteSourceById(core, sourceId, row.scope_id); }), ), update: (input) => - core.update({ - model: "source", - where: [ - { field: "id", value: input.id }, - { field: "scope_id", value: input.scope }, - ], - update: { - ...(input.name !== undefined ? { name: input.name } : {}), - ...(input.url !== undefined ? { url: input.url ?? undefined } : {}), - updated_at: new Date(), - }, - }).pipe(Effect.asVoid), + core + .update({ + model: "source", + where: [ + { field: "id", value: input.id }, + { field: "scope_id", value: input.scope }, + ], + update: { + ...(input.name !== undefined ? { name: input.name } : {}), + ...(input.url !== undefined ? { url: input.url ?? undefined } : {}), + updated_at: new Date(), + }, + }) + .pipe(Effect.asVoid), }, definitions: { register: (input: DefinitionsInput) => - adapter.transaction(() => - writeDefinitions(core, plugin.id, input), - ), + adapter.transaction(() => writeDefinitions(core, plugin.id, input)), }, }, secrets: { @@ -2040,8 +1840,7 @@ export const createExecutor = < list: () => connectionsListForCtx(), create: (input) => connectionsCreate(input), updateTokens: (input) => connectionsUpdateTokens(input), - setIdentityLabel: (id, label) => - connectionsSetIdentityLabel(id, label), + setIdentityLabel: (id, label) => connectionsSetIdentityLabel(id, label), accessToken: (id) => connectionsAccessToken(id), remove: (id) => connectionsRemove(id), }, @@ -2052,45 +1851,36 @@ export const createExecutor = < // propagate unchanged; storage failures also stay typed // (`StorageFailure`) so the HTTP edge wrapper can translate them. transaction: (effect: Effect.Effect) => - adapter.transaction(() => effect) as Effect.Effect< - A, - E | StorageFailure - >, + adapter.transaction(() => effect) as Effect.Effect, }; // Build extension FIRST so it's available as `self` when resolving // staticSources. Field ordering in the plugin spec matters — TS // infers TExtension from `extension`'s return type, then NoInfer // locks `self` to that inferred type on `staticSources`. - const extension: object = plugin.extension - ? plugin.extension(ctx) - : {}; + const extension: object = plugin.extension ? plugin.extension(ctx) : {}; if (plugin.extension) { extensions[plugin.id] = extension; } // Resolve static declarations to the in-memory pools. NO DB WRITES. - const decls = plugin.staticSources - ? plugin.staticSources(extension) - : []; + const decls = plugin.staticSources ? plugin.staticSources(extension) : []; for (const source of decls) { if (staticSources.has(source.id)) { - return yield* Effect.fail( - new Error( - `Duplicate static source id: ${source.id} (plugin ${plugin.id})`, - ), - ); + return yield* new StorageError({ + message: `Duplicate static source id: ${source.id} (plugin ${plugin.id})`, + cause: undefined, + }); } staticSources.set(source.id, { source, pluginId: plugin.id }); for (const tool of source.tools) { const fqid = `${source.id}.${tool.name}`; if (staticTools.has(fqid)) { - return yield* Effect.fail( - new Error( - `Duplicate static tool id: ${fqid} (plugin ${plugin.id})`, - ), - ); + return yield* new StorageError({ + message: `Duplicate static tool id: ${fqid} (plugin ${plugin.id})`, + cause: undefined, + }); } staticTools.set(fqid, { source, @@ -2111,11 +1901,10 @@ export const createExecutor = < const providers = Effect.isEffect(raw) ? yield* raw : raw; for (const provider of providers) { if (secretProviders.has(provider.key)) { - return yield* Effect.fail( - new Error( - `Duplicate secret provider key: ${provider.key} (from plugin ${plugin.id})`, - ), - ); + return yield* new StorageError({ + message: `Duplicate secret provider key: ${provider.key} (from plugin ${plugin.id})`, + cause: undefined, + }); } secretProviders.set(provider.key, provider); } @@ -2129,11 +1918,10 @@ export const createExecutor = < const providers = Effect.isEffect(raw) ? yield* raw : raw; for (const provider of providers) { if (connectionProviders.has(provider.key)) { - return yield* Effect.fail( - new Error( - `Duplicate connection provider key: ${provider.key} (from plugin ${plugin.id})`, - ), - ); + return yield* new StorageError({ + message: `Duplicate connection provider key: ${provider.key} (from plugin ${plugin.id})`, + cause: undefined, + }); } connectionProviders.set(provider.key, provider); } @@ -2151,7 +1939,7 @@ export const createExecutor = < // two rows — their override and the outer default — which is // inconsistent with how `secrets.list` and every other list // surface dedup shadowed entries. - const byId = new Map(); + const byId = new Map(); const byIdRank = new Map(); for (const row of dynamic) { const rank = scopeRank(row); @@ -2201,10 +1989,7 @@ export const createExecutor = < [...groups], ([key, groupRows]) => Effect.gen(function* () { - const [pluginId, sourceId] = key.split("\u0000") as [ - string, - string, - ]; + const [pluginId, sourceId] = key.split("\u0000") as [string, string]; const runtime = runtimes.get(pluginId); if (!runtime?.plugin.resolveAnnotations) return undefined; return yield* runtime.plugin.resolveAnnotations({ @@ -2228,14 +2013,12 @@ export const createExecutor = < Effect.gen(function* () { const dynamic = yield* core.findMany({ model: "tool", - where: filter?.sourceId - ? [{ field: "source_id", value: filter.sourceId }] - : undefined, + where: filter?.sourceId ? [{ field: "source_id", value: filter.sourceId }] : undefined, }); // Dedup by tool id, innermost scope winning — same reason as // `listSources` above: a shadowed id must surface as one entry // (the inner one), not two. - const byId = new Map(); + const byId = new Map(); const byIdRank = new Map(); for (const row of dynamic) { const rank = scopeRank(row); @@ -2261,9 +2044,7 @@ export const createExecutor = < for (const row of dynamicDeduped) { out.push(rowToTool(row, annotations.get(row.id))); } - const filtered = filter - ? out.filter((t) => toolMatchesFilter(t, filter)) - : out; + const filtered = filter ? out.filter((t) => toolMatchesFilter(t, filter)) : out; // Drop tools blocked by user policy unless the caller explicitly // asked to see them (the settings UI does, agent surfaces don't). @@ -2306,7 +2087,7 @@ export const createExecutor = < model: "definition", where: [{ field: "source_id", value: sourceId }], }); - const winners = new Map(); + const winners = new Map(); for (const row of defRows) { const rank = scopeRank(row); const existing = winners.get(row.name); @@ -2432,7 +2213,7 @@ export const createExecutor = < const toolsDefinitions = () => Effect.gen(function* () { const rows = yield* core.findMany({ model: "definition" }); - const winners = new Map(); + const winners = new Map(); for (const row of rows) { const key = `${row.source_id}\u0000${row.name}`; const rank = scopeRank(row); @@ -2453,19 +2234,13 @@ export const createExecutor = < return out; }); - const defaultElicitationHandler = resolveElicitationHandler( - config.onElicitation, - ); + const defaultElicitationHandler = resolveElicitationHandler(config.onElicitation); const pickHandler = (options: InvokeOptions | undefined): ElicitationHandler => options?.onElicitation ? resolveElicitationHandler(options.onElicitation) : defaultElicitationHandler; - const buildElicit = ( - toolId: string, - args: unknown, - handler: ElicitationHandler, - ): Elicit => { + const buildElicit = (toolId: string, args: unknown, handler: ElicitationHandler): Elicit => { return (request: ElicitationRequest) => Effect.gen(function* () { const tid = ToolId.make(toolId); @@ -2494,8 +2269,7 @@ export const createExecutor = < // used as today. // ------------------------------------------------------------------ - const loadAllPolicies = () => - core.findMany({ model: "tool_policy" }); + const loadAllPolicies = () => core.findMany({ model: "tool_policy" }); const resolveToolPolicyForId = (toolId: string) => Effect.gen(function* () { @@ -2539,13 +2313,13 @@ export const createExecutor = < } }); - const invokeTool = ( - toolId: string, - args: unknown, - options?: InvokeOptions, - ) => { + const invokeTool = (toolId: string, args: unknown, options?: InvokeOptions) => { const handler = pickHandler(options); return Effect.gen(function* () { + const formatInvocationCauseMessage = (cause: unknown): string => { + // oxlint-disable-next-line executor/no-instanceof-error, executor/no-unknown-error-message -- boundary: preserve public invoke error message wrapping for unknown plugin failures + return cause instanceof Error ? cause.message : String(cause); + }; const wrapInvocationError = ( effect: Effect.Effect, ): Effect.Effect => @@ -2554,8 +2328,7 @@ export const createExecutor = < (cause) => new ToolInvocationError({ toolId: ToolId.make(toolId), - message: - cause instanceof Error ? cause.message : String(cause), + message: formatInvocationCauseMessage(cause), cause, }), ), @@ -2583,13 +2356,9 @@ export const createExecutor = < "executor.source_kind": staticEntry.source.kind, "executor.plugin_id": staticEntry.pluginId, }); - yield* enforceApproval( - staticEntry.tool.annotations, - toolId, - args, - policy, - handler, - ).pipe(Effect.withSpan("executor.tool.enforce_approval")); + yield* enforceApproval(staticEntry.tool.annotations, toolId, args, policy, handler).pipe( + Effect.withSpan("executor.tool.enforce_approval"), + ); return yield* wrapInvocationError( staticEntry.tool.handler({ ctx: staticEntry.ctx, @@ -2702,14 +2471,10 @@ export const createExecutor = < yield* runtime.plugin.removeSource({ ctx: runtime.ctx, sourceId, - scope: sourceRow.scope_id as string, + scope: sourceRow.scope_id, }); } - yield* deleteSourceById( - core, - sourceId, - sourceRow.scope_id as string, - ); + yield* deleteSourceById(core, sourceId, sourceRow.scope_id); }), ); }); @@ -2730,7 +2495,7 @@ export const createExecutor = < yield* runtime.plugin.refreshSource({ ctx: runtime.ctx, sourceId, - scope: sourceRow.scope_id as string, + scope: sourceRow.scope_id, }); } }); @@ -2739,9 +2504,7 @@ export const createExecutor = < // `detect` hook. Collect all non-null results. Plugin-level detect // implementations should swallow fetch errors and return null, so // one flaky plugin doesn't block the whole dispatch. - const detectionConfidenceScore = ( - confidence: SourceDetectionResult["confidence"], - ) => { + const detectionConfidenceScore = (confidence: SourceDetectionResult["confidence"]) => { switch (confidence) { case "high": return 3; @@ -2763,24 +2526,19 @@ export const createExecutor = < if (result) results.push(result); } return results.sort( - (a, b) => - detectionConfidenceScore(b.confidence) - - detectionConfidenceScore(a.confidence), + (a, b) => detectionConfidenceScore(b.confidence) - detectionConfidenceScore(a.confidence), ); }); // Per-source definitions accessor — one query, one mapping pass. - const sourceDefinitions = (sourceId: string) => - loadDefinitionsForSource(sourceId); + const sourceDefinitions = (sourceId: string) => loadDefinitionsForSource(sourceId); // Existence check for user-facing secret pickers. Core `secret` // rows are routing metadata; when a provider can answer `has()`, // confirm the backing value still exists. Providers without `has()` // remain conservative so keychain/1password don't need to return // the value or prompt just to populate picker/status UI. - const secretsStatus = ( - id: string, - ): Effect.Effect<"resolved" | "missing", StorageFailure> => + const secretsStatus = (id: string): Effect.Effect<"resolved" | "missing", StorageFailure> => Effect.gen(function* () { const rows = yield* secretRowsForId(id); if (rows.some((row) => row.owned_by_connection_id)) return "missing"; @@ -2853,7 +2611,7 @@ export const createExecutor = < }); let min: string | null = null; for (const row of existing) { - const p = row.position as string; + const p = row.position; if (min === null || p < min) min = p; } position = generateKeyBetween(null, min); @@ -2923,13 +2681,13 @@ export const createExecutor = < model: "tool_policy", where: [ { field: "id", value: input.id }, - { field: "scope_id", value: row.scope_id as string }, + { field: "scope_id", value: row.scope_id }, ], update: { - pattern: updated.pattern as string, - action: updated.action as string, - position: updated.position as string, - updated_at: updated.updated_at as Date, + pattern: updated.pattern, + action: updated.action, + position: updated.position, + updated_at: updated.updated_at, }, }); return rowToToolPolicy(updated); @@ -2944,9 +2702,7 @@ export const createExecutor = < .pipe(Effect.asVoid, Effect.withSpan("executor.policies.remove")); const policiesResolve = (toolId: string) => - resolveToolPolicyForId(toolId).pipe( - Effect.withSpan("executor.policies.resolve"), - ); + resolveToolPolicyForId(toolId).pipe(Effect.withSpan("executor.policies.resolve")); const close = () => Effect.gen(function* () { @@ -2984,10 +2740,7 @@ export const createExecutor = < remove: secretsRemove, list: secretsList, usages: secretsUsages, - providers: () => - Effect.sync( - () => Array.from(secretProviders.keys()) as readonly string[], - ), + providers: () => Effect.sync(() => Array.from(secretProviders.keys()) as readonly string[]), }, connections: { get: connectionsGet, @@ -2999,10 +2752,7 @@ export const createExecutor = < remove: connectionsRemove, usages: connectionsUsages, providers: () => - Effect.sync( - () => - Array.from(connectionProviders.keys()) as readonly string[], - ), + Effect.sync(() => Array.from(connectionProviders.keys()) as readonly string[]), }, oauth: oauthBundle.service, policies: { @@ -3020,7 +2770,6 @@ export const createExecutor = < // those leak via the helper functions and won't be cleaned until // every plugin tightens its surface to typed errors. The runtime // shape matches `Executor`. - const toExecutor = (value: unknown): Executor => - value as Executor; + const toExecutor = (value: unknown): Executor => value as Executor; return toExecutor(Object.assign(base, extensions)); }); From fcae413a71778a59b3b56f284e261ac018cc2808 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:34:21 -0700 Subject: [PATCH 073/108] Fix cloud MCP boundary lint --- apps/cloud/src/mcp-session.ts | 40 ++++--- apps/cloud/src/mcp.ts | 52 +++++----- apps/cloud/src/mcp/response-peek.ts | 156 ++++++++++++++++++---------- 3 files changed, 158 insertions(+), 90 deletions(-) diff --git a/apps/cloud/src/mcp-session.ts b/apps/cloud/src/mcp-session.ts index 2752b3d9e..ba6381c29 100644 --- a/apps/cloud/src/mcp-session.ts +++ b/apps/cloud/src/mcp-session.ts @@ -145,6 +145,7 @@ const makeDbHandle = (options: { return { sql, db: drizzle(sql, { schema: combinedSchema }) as DrizzleDb, + // oxlint-disable-next-line executor/no-promise-catch -- boundary: postgres.js close is best-effort during DO/runtime cleanup end: () => sql.end({ timeout: 0 }).catch(() => undefined), }; }; @@ -268,9 +269,13 @@ export class McpSessionDO extends DurableObject { this.transportJsonResponseMode = null; await Promise.all([ + // oxlint-disable-next-line executor/no-promise-catch -- boundary: Durable Object storage cleanup is best-effort after session invalidation this.ctx.storage.delete(TRANSPORT_STATE_KEY).catch(() => false), + // oxlint-disable-next-line executor/no-promise-catch -- boundary: Durable Object storage cleanup is best-effort after session invalidation this.ctx.storage.delete(SESSION_META_KEY).catch(() => false), + // oxlint-disable-next-line executor/no-promise-catch -- boundary: Durable Object storage cleanup is best-effort after session invalidation this.ctx.storage.delete(LAST_ACTIVITY_KEY).catch(() => false), + // oxlint-disable-next-line executor/no-promise-catch -- boundary: Durable Object alarm cleanup is best-effort after session invalidation this.ctx.storage.deleteAlarm().catch(() => undefined), ]); }).pipe(Effect.withSpan("mcp.session.clear_state")); @@ -322,6 +327,7 @@ export class McpSessionDO extends DurableObject { } if (self.mcpServer) { const mcpServer = self.mcpServer; + // oxlint-disable-next-line executor/no-promise-catch -- boundary: MCP SDK close failure is ignored during best-effort runtime teardown yield* Effect.promise(() => mcpServer.close().catch(() => undefined)); self.mcpServer = null; } @@ -332,7 +338,10 @@ export class McpSessionDO extends DurableObject { } self.initialized = false; self.transportJsonResponseMode = null; - }).pipe(Effect.orDie); + }).pipe( + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: DO cleanup has no typed failure surface + Effect.orDie, + ); } private installRuntime( @@ -388,6 +397,7 @@ export class McpSessionDO extends DurableObject { "mcp.request.session_id_present": !!request.headers.get("mcp-session-id"), }, }), + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: cold DO restore is re-entered from Promise-only Durable Object method Effect.orDie, ); } @@ -409,7 +419,11 @@ export class McpSessionDO extends DurableObject { yield* Effect.annotateCurrentSpan({ "mcp.session.transport_upgraded_json_response": true, }); - }).pipe(Effect.withSpan("McpSessionDO.ensureJsonResponseTransportForPost"), Effect.orDie); + }).pipe( + Effect.withSpan("McpSessionDO.ensureJsonResponseTransportForPost"), + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: transport rebuild is internal DO runtime state + Effect.orDie, + ); } private validateSessionOwner(request: Request): Effect.Effect { @@ -435,17 +449,15 @@ export class McpSessionDO extends DurableObject { const self = this; return Effect.gen(function* () { const dbHandle = makeEphemeralDb(); - try { - const sessionMeta = yield* resolveSessionMeta(token.organizationId, token.userId).pipe( - Effect.provide(makeResolveOrganizationServices(dbHandle)), - ); - yield* Effect.promise(() => self.saveSessionMeta(sessionMeta)).pipe( - Effect.withSpan("mcp.session.save_meta"), - ); - return sessionMeta; - } finally { - yield* Effect.promise(() => dbHandle.end()); - } + return yield* resolveSessionMeta(token.organizationId, token.userId).pipe( + Effect.provide(makeResolveOrganizationServices(dbHandle)), + Effect.tap((sessionMeta) => + Effect.promise(() => self.saveSessionMeta(sessionMeta)).pipe( + Effect.withSpan("mcp.session.save_meta"), + ), + ), + Effect.ensuring(Effect.promise(() => dbHandle.end())), + ); }).pipe(Effect.withSpan("mcp.session.resolve_and_store_meta")); } @@ -463,6 +475,7 @@ export class McpSessionDO extends DurableObject { }), (eff) => withIncomingParent(incoming, eff), Effect.provide(DoTelemetryLive), + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: Durable Object init method can only reject its Promise Effect.orDie, ), ); @@ -511,6 +524,7 @@ export class McpSessionDO extends DurableObject { return yield* Effect.failCause(cause); }), ), + // oxlint-disable-next-line executor/no-effect-escape-hatch -- boundary: doInit is called only from Promise-only Durable Object init Effect.orDie, ); } diff --git a/apps/cloud/src/mcp.ts b/apps/cloud/src/mcp.ts index 0bdc49a8d..2b7f049db 100644 --- a/apps/cloud/src/mcp.ts +++ b/apps/cloud/src/mcp.ts @@ -16,7 +16,7 @@ import { env } from "cloudflare:workers"; import { HttpEffect, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"; -import { Cause, Context, Effect, Layer, Option, Schema } from "effect"; +import { Cause, Context, Effect, Layer, Option, Predicate, Result, Schema } from "effect"; import { createCachedRemoteJWKSet } from "./jwks-cache"; import { captureCause } from "./observability"; @@ -171,7 +171,7 @@ export const McpAuthLive = Layer.succeed(McpAuth)({ }), ); if (!verified) return mcpUnauthorized("invalid_token", "The access token is invalid"); - if ("_tag" in verified) return verified; + if (Predicate.isTagged(verified, "Unauthorized")) return verified; if (!verified.accountId) { yield* Effect.annotateCurrentSpan({ "mcp.auth.outcome": "missing_subject" }); return mcpUnauthorized("invalid_token", "The access token is invalid"); @@ -211,8 +211,7 @@ type CfRequestMetadata = { const requestWithCf = (request: Request): Request & { cf?: CfRequestMetadata } => request as Request & { cf?: CfRequestMetadata }; -const getCfMeta = (request: Request): CfRequestMetadata => - requestWithCf(request).cf ?? {}; +const getCfMeta = (request: Request): CfRequestMetadata => requestWithCf(request).cf ?? {}; const HEADERS_TO_DUMP = [ "accept", @@ -285,22 +284,28 @@ const InitializeParams = Schema.Struct({ const NamedParams = Schema.Struct({ name: Schema.optional(Schema.String) }); const UriParams = Schema.Struct({ uri: Schema.optional(Schema.String) }); -const decodeJsonRpcEnvelope = Schema.decodeUnknownOption(JsonRpcEnvelope); +const decodeJsonRpcEnvelopeString = Schema.decodeUnknownOption( + Schema.fromJsonString(JsonRpcEnvelope), +); const decodeInitializeParams = Schema.decodeUnknownOption(InitializeParams); const decodeNamedParams = Schema.decodeUnknownOption(NamedParams); const decodeUriParams = Schema.decodeUnknownOption(UriParams); const decodeElicitationReplyResult = Schema.decodeUnknownOption(ElicitationReplyResult); +const isMcpAuthorized = (value: McpAuthResult): value is McpAuthorizedResult => + Predicate.isTagged(value, "Authorized"); +const isMcpUnauthorized = (value: McpAuthResult): value is McpUnauthorizedResult => + Predicate.isTagged(value, "Unauthorized"); + const readJsonRpcEnvelope = (request: Request): Effect.Effect> => - Effect.promise(async () => { - try { - const text = await request.clone().text(); - if (!text) return Option.none(); - return decodeJsonRpcEnvelope(JSON.parse(text)); - } catch { - return Option.none(); - } - }).pipe(Effect.withSpan("mcp.request.read_json_rpc")); + Effect.tryPromise({ + try: () => request.clone().text(), + catch: () => undefined, + }).pipe( + Effect.map((text) => (text ? decodeJsonRpcEnvelopeString(text) : Option.none())), + Effect.catchCause(() => Effect.succeed(Option.none())), + Effect.withSpan("mcp.request.read_json_rpc"), + ); const methodAttrs = (envelope: JsonRpcEnvelope): Record => { const params = envelope.params ?? {}; @@ -409,15 +414,14 @@ const protectedResourceMetadata = Effect.sync(() => }), ); -const authorizationServerMetadata = Effect.promise(async () => { - try { +const authorizationServerMetadata = Effect.tryPromise({ + try: async () => { const res = await fetch(`${AUTHKIT_DOMAIN}/.well-known/oauth-authorization-server`); if (!res.ok) return jsonResponse({ error: "upstream_error" }, 502); return jsonResponse(await res.json()); - } catch { - return jsonResponse({ error: "upstream_error" }, 502); - } -}); + }, + catch: () => undefined, +}).pipe(Effect.catchCause(() => Effect.succeed(jsonResponse({ error: "upstream_error" }, 502)))); // --------------------------------------------------------------------------- // DO dispatch @@ -545,7 +549,7 @@ const authorizeMcpOrganization = ( Effect.catchCause((error) => Effect.gen(function* () { yield* Effect.annotateCurrentSpan({ - "mcp.auth.organization_authorize_error": String(error), + "mcp.auth.organization_authorize_error": Cause.pretty(error), }); return false; }), @@ -662,7 +666,7 @@ export const mcpApp: Effect.Effect< const auth = yield* McpAuth; const authResult = yield* auth.verifyBearer(request).pipe(Effect.result); - if (authResult._tag === "Failure") { + if (Result.isFailure(authResult)) { yield* annotateMcpRequest(request, { token: null, parseBody: request.method === "POST", @@ -675,11 +679,11 @@ export const mcpApp: Effect.Effect< // POST bodies are JSON-RPC payloads worth parsing; GET (SSE) and DELETE // don't carry one. yield* annotateMcpRequest(request, { - token: authValue._tag === "Authorized" ? authValue.token : null, + token: isMcpAuthorized(authValue) ? authValue.token : null, parseBody: request.method === "POST", }); - if (authValue._tag === "Unauthorized") { + if (isMcpUnauthorized(authValue)) { return unauthorized(authValue, PROTECTED_RESOURCE_METADATA_URL); } const token = authValue.token; diff --git a/apps/cloud/src/mcp/response-peek.ts b/apps/cloud/src/mcp/response-peek.ts index 733742cb5..ad7a78ca2 100644 --- a/apps/cloud/src/mcp/response-peek.ts +++ b/apps/cloud/src/mcp/response-peek.ts @@ -1,23 +1,57 @@ import * as Sentry from "@sentry/cloudflare"; -import { Effect } from "effect"; +import { Cause, Data, Effect, Exit, Option, Schema } from "effect"; import { jsonRpcWebResponse } from "./responses"; const SSE_PEEK_TIMEOUT_MS = 10_000; -type SandboxOutcome = { - readonly status?: string; - readonly error?: { readonly kind?: string; readonly message?: string }; -}; - -type JsonRpcResponseBody = { - readonly jsonrpc?: string; - readonly error?: { readonly code?: number; readonly message?: string }; - readonly result?: { - readonly isError?: boolean; - readonly structuredContent?: SandboxOutcome; - }; -}; +class ResponseBodyTimeoutError extends Data.TaggedError("ResponseBodyTimeoutError")<{ + readonly timeoutMs: number; +}> {} + +class ResponseBodyReadError extends Data.TaggedError("ResponseBodyReadError") {} + +class McpInternalJsonRpcError extends Data.TaggedError("McpInternalJsonRpcError")<{ + readonly message: string; +}> {} + +const ResponseBodyTimeoutErrorData = Schema.Struct({ + _tag: Schema.Literal("ResponseBodyTimeoutError"), + timeoutMs: Schema.Number, +}); +const decodeResponseBodyTimeoutError = Schema.decodeUnknownOption(ResponseBodyTimeoutErrorData); + +const SandboxOutcomeSchema = Schema.Struct({ + status: Schema.optional(Schema.String), + error: Schema.optional( + Schema.Struct({ + kind: Schema.optional(Schema.String), + message: Schema.optional(Schema.String), + }), + ), +}); + +const JsonRpcResponseBodySchema = Schema.Struct({ + jsonrpc: Schema.optional(Schema.String), + error: Schema.optional( + Schema.Struct({ + code: Schema.optional(Schema.Number), + message: Schema.optional(Schema.String), + }), + ), + result: Schema.optional( + Schema.Struct({ + isError: Schema.optional(Schema.Boolean), + structuredContent: Schema.optional(SandboxOutcomeSchema), + }), + ), +}); + +const decodeJsonRpcResponseBody = Schema.decodeUnknownOption( + Schema.fromJsonString(JsonRpcResponseBodySchema), +); + +type JsonRpcResponseBody = typeof JsonRpcResponseBodySchema.Type; const responseBodyShape = (body: string): string => { const trimmed = body.trimStart(); @@ -31,18 +65,18 @@ const responseBodyShape = (body: string): string => { const parseFirstJsonRpc = (contentType: string, body: string): JsonRpcResponseBody | null => { if (!body) return null; - try { - if (contentType.includes("text/event-stream")) { - for (const line of body.split(/\r?\n/)) { - if (line.startsWith("data:")) return JSON.parse(line.slice(5).trimStart()); + if (contentType.includes("text/event-stream")) { + for (const line of body.split(/\r?\n/)) { + if (line.startsWith("data:")) { + return Option.getOrNull(decodeJsonRpcResponseBody(line.slice(5).trimStart())); } - return null; } - if (contentType.includes("application/json")) return JSON.parse(body); - return null; - } catch { return null; } + if (contentType.includes("application/json")) { + return Option.getOrNull(decodeJsonRpcResponseBody(body)); + } + return null; }; const jsonRpcResponseAttrs = (payload: JsonRpcResponseBody | null): Record => { @@ -52,8 +86,10 @@ const jsonRpcResponseAttrs = (payload: JsonRpcResponseBody | null): Record => { if (timeoutMs === null) return await response.text(); @@ -85,11 +115,14 @@ const readResponseText = async (response: Response, timeoutMs: number | null): P let timeout: ReturnType | undefined; const timeoutPromise = new Promise((_, reject) => { timeout = setTimeout(() => { + // oxlint-disable-next-line executor/no-promise-catch -- boundary: best-effort stream cancellation inside timeout callback void reader.cancel().catch(() => undefined); - reject(new ResponseBodyTimeoutError(timeoutMs)); + // oxlint-disable-next-line executor/no-promise-reject -- boundary: Promise.race timeout adapter for Web ReadableStream + reject(new ResponseBodyTimeoutError({ timeoutMs })); }, timeoutMs); }); const readPromise = (async () => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: Web stream reader cleanup must clear host timeout after success or failure try { let text = ""; for (;;) { @@ -125,9 +158,18 @@ const withoutBodyHeaders = (response: Response) => { }); }; +const isResponseBodyTimeoutError = (error: unknown) => + Option.isSome(decodeResponseBodyTimeoutError(error)); + +const responsePeekError = (error: unknown): ResponseBodyTimeoutError | ResponseBodyReadError => + Option.match(decodeResponseBodyTimeoutError(error), { + onNone: () => new ResponseBodyReadError(), + onSome: ({ timeoutMs }) => new ResponseBodyTimeoutError({ timeoutMs }), + }); + const responseReadFailure = (error: unknown) => Effect.gen(function* () { - const timedOut = error instanceof ResponseBodyTimeoutError; + const timedOut = isResponseBodyTimeoutError(error); yield* Effect.annotateCurrentSpan({ "mcp.response.status_code": timedOut ? 504 : 500, "mcp.response.content_type": "application/json", @@ -135,7 +177,7 @@ const responseReadFailure = (error: unknown) => "mcp.response.body.length": 0, "mcp.response.jsonrpc.detected": true, "mcp.peek_response.timed_out": timedOut, - "mcp.peek_response.error": String(error), + "mcp.peek_response.error": timedOut ? "ResponseBodyTimeoutError" : "ResponseBodyReadError", }); return jsonRpcWebResponse( timedOut ? 504 : 500, @@ -149,8 +191,8 @@ const responseReadFailure = (error: unknown) => const reportInternalJsonRpcError = (payload: JsonRpcResponseBody | null) => Effect.sync(() => { if (payload?.error?.code !== -32603) return; - const msg = payload.error.message ?? "unknown"; - Sentry.captureException(new Error(`MCP internal error (-32603): ${msg}`)); + const message = payload.error["message"] ?? "unknown"; + Sentry.captureException(new McpInternalJsonRpcError({ message })); }); export const peekAndAnnotate = (response: Response): Effect.Effect => @@ -167,21 +209,29 @@ export const peekAndAnnotate = (response: Response): Effect.Effect => const isSseResponse = contentType.includes("text/event-stream"); const timeoutMs = isSseResponse ? SSE_PEEK_TIMEOUT_MS : null; - const textResult = yield* Effect.result(Effect.tryPromise({ - try: () => readResponseText(response, timeoutMs), - catch: (error) => error, - }).pipe( - Effect.withSpan("mcp.peek_response", { - attributes: { - "http.response.content_type": contentType, - "http.response.status_code": response.status, - "mcp.peek_response.timeout_ms": timeoutMs ?? 0, - }, - }), - )); - if (textResult._tag === "Failure") return yield* responseReadFailure(textResult.failure); - - const text = textResult.success; + const textExit = yield* Effect.exit( + Effect.tryPromise({ + try: () => readResponseText(response, timeoutMs), + catch: responsePeekError, + }).pipe( + Effect.withSpan("mcp.peek_response", { + attributes: { + "http.response.content_type": contentType, + "http.response.status_code": response.status, + "mcp.peek_response.timeout_ms": timeoutMs ?? 0, + }, + }), + ), + ); + if (Exit.isFailure(textExit)) { + const error = Option.getOrElse( + Cause.findErrorOption(textExit.cause), + () => new ResponseBodyReadError(), + ); + return yield* responseReadFailure(error); + } + + const text = textExit.value; const payload = parseFirstJsonRpc(contentType, text); yield* Effect.annotateCurrentSpan({ "mcp.response.status_code": response.status, From 7130b7d812a886cf0810ff48747546460cc4afec Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:34:39 -0700 Subject: [PATCH 074/108] Fix GraphQL plugin boundary lint --- packages/plugins/graphql/src/sdk/plugin.ts | 441 ++++++++------------- 1 file changed, 171 insertions(+), 270 deletions(-) diff --git a/packages/plugins/graphql/src/sdk/plugin.ts b/packages/plugins/graphql/src/sdk/plugin.ts index a785b731e..31c544b39 100644 --- a/packages/plugins/graphql/src/sdk/plugin.ts +++ b/packages/plugins/graphql/src/sdk/plugin.ts @@ -10,7 +10,7 @@ import { ScopeId, SourceDetectionResult, Usage, - type StorageFailure, + type PluginCtx, type ToolAnnotations, type ToolRow, } from "@executor-js/sdk/core"; @@ -30,7 +30,7 @@ import { type IntrospectionTypeRef, } from "./introspect"; import { extract } from "./extract"; -import { GraphqlExtractionError, GraphqlIntrospectionError } from "./errors"; +import { GraphqlIntrospectionError, GraphqlInvocationError } from "./errors"; import { invokeWithLayer, resolveHeaders } from "./invoke"; import { graphqlSchema, @@ -90,63 +90,13 @@ export interface GraphqlUpdateSourceInput { readonly auth?: GraphqlSourceAuth; } -/** - * Errors any GraphQL extension method may surface. `GraphqlIntrospectionError` - * and `GraphqlExtractionError` are plugin-domain tagged errors that flow - * directly to clients (4xx, each carrying its own `HttpApiSchema` status). - * `StorageFailure` covers raw backend failures (`StorageError` plus - * `UniqueViolationError`); the HTTP edge (`@executor-js/api`'s `withCapture`) - * translates `StorageError` to the opaque `InternalError({ traceId })` at - * Layer composition. - */ -export type GraphqlExtensionFailure = - | GraphqlIntrospectionError - | GraphqlExtractionError - | StorageFailure; - -export interface GraphqlPluginExtension { - /** Add a GraphQL endpoint and register its operations as tools */ - readonly addSource: ( - config: GraphqlSourceConfig, - ) => Effect.Effect< - { readonly toolCount: number; readonly namespace: string }, - GraphqlExtensionFailure - >; - - /** Remove all tools from a previously added GraphQL source by namespace. - * `scope` pins the cleanup to the exact row — without it a shadowed - * outer-scope source with the same namespace could be wiped instead. */ - readonly removeSource: ( - namespace: string, - scope: string, - ) => Effect.Effect; - - /** Fetch the full stored source by namespace (or null if missing). - * `scope` returns the exact row at that scope. For fall-through - * reads across the executor's scope stack, use `executor.sources.*`. */ - readonly getSource: ( - namespace: string, - scope: string, - ) => Effect.Effect; - - /** Update config (endpoint, headers) for an existing GraphQL source. - * Does NOT re-introspect or re-register tools — just patches the - * stored endpoint/headers used at invoke time. `scope` pins the - * mutation to a single row so shadowed rows at other scopes are - * untouched. */ - readonly updateSource: ( - namespace: string, - scope: string, - input: GraphqlUpdateSourceInput, - ) => Effect.Effect; -} - // --------------------------------------------------------------------------- // Helpers // --------------------------------------------------------------------------- /** Derive a namespace from an endpoint URL */ const namespaceFromEndpoint = (endpoint: string): string => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL construction throws; this helper intentionally falls back to the stable default namespace try { const url = new URL(endpoint); return url.hostname.replace(/[^a-z0-9]+/gi, "_").toLowerCase(); @@ -241,14 +191,10 @@ const prepareOperations = ( typeMap.set(t.name, t); } - const fieldMap = new Map< - string, - { kind: GraphqlOperationKind; field: IntrospectionField } - >(); + const fieldMap = new Map(); const schema = introspection.__schema; for (const rootKind of ["query", "mutation"] as const) { - const typeName = - rootKind === "query" ? schema.queryType?.name : schema.mutationType?.name; + const typeName = rootKind === "query" ? schema.queryType?.name : schema.mutationType?.name; if (!typeName) continue; const rootType = typeMap.get(typeName); if (!rootType?.fields) continue; @@ -264,8 +210,7 @@ const prepareOperations = ( const toolPath = `${prefix}.${extracted.fieldName}`; const description = Option.getOrElse( extracted.description, - () => - `GraphQL ${extracted.kind}: ${extracted.fieldName} -> ${extracted.returnTypeName}`, + () => `GraphQL ${extracted.kind}: ${extracted.fieldName} -> ${extracted.returnTypeName}`, ); const key = `${extracted.kind}.${extracted.fieldName}`; @@ -322,178 +267,158 @@ const toGraphqlConfigEntry = ( headers: headersToConfigValues(config.headers), }); -export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { - const httpClientLayer = options?.httpClientLayer ?? FetchHttpClient.layer; +const makeGraphqlExtension = ( + ctx: PluginCtx, + httpClientLayer: Layer.Layer, + configFile: ConfigFileSink | undefined, +) => { + const resolveConfigValues = (values: Record | undefined) => + Effect.gen(function* () { + if (!values) return undefined; + const resolved = yield* resolveHeaders(values, ctx.secrets); + return Object.keys(resolved).length > 0 ? resolved : undefined; + }); - return { - id: "graphql" as const, - packageName: "@executor-js/plugin-graphql", - schema: graphqlSchema, - storage: (deps): GraphqlStore => makeDefaultGraphqlStore(deps), + const resolveOAuthHeader = (auth: GraphqlSourceAuth | undefined) => + Effect.gen(function* () { + if (!auth || auth.kind === "none") return undefined; + const accessToken = yield* ctx.connections.accessToken(auth.connectionId).pipe( + Effect.mapError( + () => + new GraphqlIntrospectionError({ + message: `Failed to resolve OAuth connection "${auth.connectionId}"`, + }), + ), + ); + return { Authorization: `Bearer ${accessToken}` }; + }); - extension: (ctx) => { - const resolveConfigValues = ( - values: Record | undefined, - ) => - Effect.gen(function* () { - if (!values) return undefined; - const resolved = yield* resolveHeaders(values, ctx.secrets); - return Object.keys(resolved).length > 0 ? resolved : undefined; - }); + const resolveRequestHeaders = ( + headers: Record | undefined, + auth: GraphqlSourceAuth | undefined, + ) => + Effect.gen(function* () { + const resolvedHeaders = yield* resolveConfigValues(headers); + const oauthHeader = yield* resolveOAuthHeader(auth); + return { ...(resolvedHeaders ?? {}), ...(oauthHeader ?? {}) }; + }); - const resolveOAuthHeader = (auth: GraphqlSourceAuth | undefined) => - Effect.gen(function* () { - if (!auth || auth.kind === "none") return undefined; - const accessToken = yield* ctx.connections - .accessToken(auth.connectionId) - .pipe( - Effect.mapError( - (err) => - new GraphqlIntrospectionError({ - message: `Failed to resolve OAuth connection "${auth.connectionId}": ${ - "message" in err - ? (err as { message: string }).message - : String(err) - }`, - }), - ), - ); - return { Authorization: `Bearer ${accessToken}` }; - }); + const addSourceInternal = (config: GraphqlSourceConfig) => + ctx.transaction( + Effect.gen(function* () { + let introspectionResult: IntrospectionResult; + if (config.introspectionJson) { + introspectionResult = yield* parseIntrospectionJson(config.introspectionJson); + } else { + const resolvedHeaders = yield* resolveRequestHeaders(config.headers, config.auth); + const resolvedQueryParams = yield* resolveConfigValues(config.queryParams); + introspectionResult = yield* introspect( + config.endpoint, + Object.keys(resolvedHeaders).length > 0 ? resolvedHeaders : undefined, + resolvedQueryParams, + ).pipe(Effect.provide(httpClientLayer)); + } - const resolveRequestHeaders = ( - headers: Record | undefined, - auth: GraphqlSourceAuth | undefined, - ) => - Effect.gen(function* () { - const resolvedHeaders = yield* resolveConfigValues(headers); - const oauthHeader = yield* resolveOAuthHeader(auth); - return { ...(resolvedHeaders ?? {}), ...(oauthHeader ?? {}) }; + const { result, definitions } = yield* extract(introspectionResult); + const namespace = config.namespace ?? namespaceFromEndpoint(config.endpoint); + const prepared = prepareOperations(result.fields, introspectionResult); + + const displayName = config.name?.trim() || namespace; + + const storedSource: StoredGraphqlSource = { + namespace, + scope: config.scope, + name: displayName, + endpoint: config.endpoint, + headers: config.headers ?? {}, + queryParams: config.queryParams ?? {}, + auth: config.auth ?? { kind: "none" }, + }; + + const storedOps: StoredOperation[] = prepared.map((p) => ({ + toolId: `${namespace}.${p.toolPath}`, + sourceId: namespace, + binding: p.binding, + })); + + yield* ctx.storage.upsertSource(storedSource, storedOps); + + yield* ctx.core.sources.register({ + id: namespace, + scope: config.scope, + kind: "graphql", + name: displayName, + url: config.endpoint, + canRemove: true, + canRefresh: false, + canEdit: true, + tools: prepared.map((p) => ({ + name: p.toolPath, + description: p.description, + inputSchema: p.inputSchema, + })), }); - const addSourceInternal = (config: GraphqlSourceConfig) => - ctx.transaction( + if (Object.keys(definitions).length > 0) { + yield* ctx.core.definitions.register({ + sourceId: namespace, + scope: config.scope, + definitions, + }); + } + + return { toolCount: prepared.length, namespace }; + }), + ); + + return { + addSource: (config: GraphqlSourceConfig) => + addSourceInternal(config).pipe( + Effect.tap((result) => + configFile + ? configFile.upsertSource(toGraphqlConfigEntry(result.namespace, config)) + : Effect.void, + ), + ), + + removeSource: (namespace: string, scope: string) => + Effect.gen(function* () { + yield* ctx.transaction( Effect.gen(function* () { - let introspectionResult: IntrospectionResult; - if (config.introspectionJson) { - introspectionResult = yield* parseIntrospectionJson( - config.introspectionJson, - ); - } else { - const resolvedHeaders = yield* resolveRequestHeaders( - config.headers, - config.auth, - ); - const resolvedQueryParams = yield* resolveConfigValues( - config.queryParams, - ); - introspectionResult = yield* introspect( - config.endpoint, - Object.keys(resolvedHeaders).length > 0 - ? resolvedHeaders - : undefined, - resolvedQueryParams, - ).pipe(Effect.provide(httpClientLayer)); - } - - const { result, definitions } = yield* extract(introspectionResult); - const namespace = - config.namespace ?? namespaceFromEndpoint(config.endpoint); - const prepared = prepareOperations( - result.fields, - introspectionResult, - ); - - const displayName = config.name?.trim() || namespace; - - // Persist the source + per-operation bindings first so any - // subsequent core-source register collision rolls back both. - const storedSource: StoredGraphqlSource = { - namespace, - scope: config.scope, - name: displayName, - endpoint: config.endpoint, - headers: config.headers ?? {}, - queryParams: config.queryParams ?? {}, - auth: config.auth ?? { kind: "none" }, - }; - - const storedOps: StoredOperation[] = prepared.map((p) => ({ - toolId: `${namespace}.${p.toolPath}`, - sourceId: namespace, - binding: p.binding, - })); - - yield* ctx.storage.upsertSource(storedSource, storedOps); - - yield* ctx.core.sources.register({ - id: namespace, - scope: config.scope, - kind: "graphql", - name: displayName, - url: config.endpoint, - canRemove: true, - canRefresh: false, - canEdit: true, - tools: prepared.map((p) => ({ - name: p.toolPath, - description: p.description, - inputSchema: p.inputSchema, - })), - }); - - if (Object.keys(definitions).length > 0) { - yield* ctx.core.definitions.register({ - sourceId: namespace, - scope: config.scope, - definitions, - }); - } - - return { toolCount: prepared.length, namespace }; + yield* ctx.storage.removeSource(namespace, scope); + yield* ctx.core.sources.unregister(namespace); }), ); + if (configFile) { + yield* configFile.removeSource(namespace); + } + }), - const configFile = options?.configFile; - - return { - addSource: (config) => - addSourceInternal(config).pipe( - Effect.tap((result) => - configFile - ? configFile.upsertSource( - toGraphqlConfigEntry(result.namespace, config), - ) - : Effect.void, - ), - ), - - removeSource: (namespace, scope) => - Effect.gen(function* () { - yield* ctx.transaction( - Effect.gen(function* () { - yield* ctx.storage.removeSource(namespace, scope); - yield* ctx.core.sources.unregister(namespace); - }), - ); - if (configFile) { - yield* configFile.removeSource(namespace); - } - }), + getSource: (namespace: string, scope: string) => ctx.storage.getSource(namespace, scope), - getSource: (namespace, scope) => - ctx.storage.getSource(namespace, scope), + updateSource: (namespace: string, scope: string, input: GraphqlUpdateSourceInput) => + ctx.storage.updateSourceMeta(namespace, scope, { + name: input.name?.trim() || undefined, + endpoint: input.endpoint, + headers: input.headers, + queryParams: input.queryParams, + auth: input.auth, + }), + }; +}; - updateSource: (namespace, scope, input) => - ctx.storage.updateSourceMeta(namespace, scope, { - name: input.name?.trim() || undefined, - endpoint: input.endpoint, - headers: input.headers, - queryParams: input.queryParams, - auth: input.auth, - }), - } satisfies GraphqlPluginExtension; - }, +export type GraphqlPluginExtension = ReturnType; + +export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { + const httpClientLayer = options?.httpClientLayer ?? FetchHttpClient.layer; + + return { + id: "graphql" as const, + packageName: "@executor-js/plugin-graphql", + schema: graphqlSchema, + storage: (deps): GraphqlStore => makeDefaultGraphqlStore(deps), + + extension: (ctx) => makeGraphqlExtension(ctx, httpClientLayer, options?.configFile), staticSources: (self) => [ { @@ -503,8 +428,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { tools: [ { name: "addSource", - description: - "Add a GraphQL endpoint and register its operations as tools", + description: "Add a GraphQL endpoint and register its operations as tools", inputSchema: { type: "object", properties: { @@ -533,7 +457,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { handler: ({ ctx, args }) => self.addSource({ ...(args as Omit), - scope: ctx.scopes.at(-1)!.id as string, + scope: ctx.scopes.at(-1)!.id, }), }, ], @@ -547,35 +471,26 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { // graphql_operation + graphql_source rows live at the same // scope, so pin every store lookup to it instead of relying // on the scoped adapter's stack-wide fall-through. - const toolScope = toolRow.scope_id as string; - const op = yield* ctx.storage.getOperationByToolId( - toolRow.id, - toolScope, - ); + const toolScope = toolRow.scope_id; + const op = yield* ctx.storage.getOperationByToolId(toolRow.id, toolScope); if (!op) { - return yield* Effect.fail( - new Error(`No GraphQL operation found for tool "${toolRow.id}"`), - ); + return yield* new GraphqlInvocationError({ + message: `No GraphQL operation found for tool "${toolRow.id}"`, + statusCode: Option.none(), + }); } const source = yield* ctx.storage.getSource(op.sourceId, toolScope); if (!source) { - return yield* Effect.fail( - new Error(`No GraphQL source found for "${op.sourceId}"`), - ); + return yield* new GraphqlInvocationError({ + message: `No GraphQL source found for "${op.sourceId}"`, + statusCode: Option.none(), + }); } - const resolvedHeaders = yield* resolveHeaders( - source.headers, - ctx.secrets, - ); - const resolvedQueryParams = yield* resolveHeaders( - source.queryParams, - ctx.secrets, - ); + const resolvedHeaders = yield* resolveHeaders(source.headers, ctx.secrets); + const resolvedQueryParams = yield* resolveHeaders(source.queryParams, ctx.secrets); if (source.auth.kind === "oauth2") { - const accessToken = yield* ctx.connections.accessToken( - source.auth.connectionId, - ); + const accessToken = yield* ctx.connections.accessToken(source.auth.connectionId); resolvedHeaders.Authorization = `Bearer ${accessToken}`; } @@ -601,7 +516,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { // and we don't fall through to the wrong scope's bindings. const scopes = new Set(); for (const row of toolRows as readonly ToolRow[]) { - scopes.add(row.scope_id as string); + scopes.add(row.scope_id); } // One listOperationsBySource per scope is independent storage // work; run them in parallel so a shadowed source doesn't @@ -611,10 +526,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { [...scopes], (scope) => Effect.gen(function* () { - const ops = yield* ctx.storage.listOperationsBySource( - sourceId, - scope, - ); + const ops = yield* ctx.storage.listOperationsBySource(sourceId, scope); const byId = new Map(); for (const op of ops) byId.set(op.toolId, op.binding); return [scope, byId] as const; @@ -625,14 +537,13 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { const out: Record = {}; for (const row of toolRows as readonly ToolRow[]) { - const binding = byScope.get(row.scope_id as string)?.get(row.id); + const binding = byScope.get(row.scope_id)?.get(row.id); if (binding) out[row.id] = annotationsFor(binding); } return out; }), - removeSource: ({ ctx, sourceId, scope }) => - ctx.storage.removeSource(sourceId, scope), + removeSource: ({ ctx, sourceId, scope }) => ctx.storage.removeSource(sourceId, scope), // Look up every place this secret appears across the plugin's two // child tables (`graphql_source_header`, `graphql_source_query_param`). @@ -644,12 +555,8 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { // We thread it through `ctx.storage` rather than re-grabbing it // because the store already owns the typed adapter handle; expose // a single helper rather than re-implementing the where/joins. - const headerRows = yield* ctx.storage.findHeaderRowsBySecret( - args.secretId, - ); - const paramRows = yield* ctx.storage.findQueryParamRowsBySecret( - args.secretId, - ); + const headerRows = yield* ctx.storage.findHeaderRowsBySecret(args.secretId); + const paramRows = yield* ctx.storage.findQueryParamRowsBySecret(args.secretId); // Resolve owner names by joining to graphql_source. We batch the // distinct (source_id, scope_id) pairs to one findMany rather @@ -668,8 +575,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { scopeId: ScopeId.make(r.scope_id), ownerKind: "graphql-source-header", ownerId: r.source_id, - ownerName: - sources.get(`${r.scope_id}:${r.source_id}`) ?? null, + ownerName: sources.get(`${r.scope_id}:${r.source_id}`) ?? null, slot: `header:${r.name}`, }), ); @@ -681,8 +587,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { scopeId: ScopeId.make(r.scope_id), ownerKind: "graphql-source-query-param", ownerId: r.source_id, - ownerName: - sources.get(`${r.scope_id}:${r.source_id}`) ?? null, + ownerName: sources.get(`${r.scope_id}:${r.source_id}`) ?? null, slot: `query_param:${r.name}`, }), ); @@ -694,9 +599,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { Effect.gen(function* () { // OAuth refs only appear in graphql_source.auth_connection_id — // one indexed lookup. No child tables to scan. - const sources = yield* ctx.storage.findSourcesByConnection( - args.connectionId, - ); + const sources = yield* ctx.storage.findSourcesByConnection(args.connectionId); return sources.map( (s) => new Usage({ @@ -717,9 +620,7 @@ export const graphqlPlugin = definePlugin((options?: GraphqlPluginOptions) => { const parsed = yield* Effect.try({ try: () => new URL(trimmed), catch: (cause) => cause, - }).pipe( - Effect.option, - ); + }).pipe(Effect.option); if (Option.isNone(parsed)) return null; const ok = yield* introspect(trimmed).pipe( From 81e97c3963bbc876ab26e42d1623bcb800f47d5d Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:34:57 -0700 Subject: [PATCH 075/108] Fix OnePassword plugin boundary lint --- .../plugins/onepassword/src/sdk/plugin.ts | 263 +++++++----------- .../plugins/onepassword/src/sdk/service.ts | 20 +- 2 files changed, 113 insertions(+), 170 deletions(-) diff --git a/packages/plugins/onepassword/src/sdk/plugin.ts b/packages/plugins/onepassword/src/sdk/plugin.ts index ebb77de56..b11fa2cb0 100644 --- a/packages/plugins/onepassword/src/sdk/plugin.ts +++ b/packages/plugins/onepassword/src/sdk/plugin.ts @@ -10,19 +10,12 @@ import { } from "@executor-js/sdk/core"; import { OnePasswordGroup } from "../api/group"; -import { - OnePasswordExtensionService, - OnePasswordHandlers, -} from "../api/handlers"; +import { OnePasswordExtensionService, OnePasswordHandlers } from "../api/handlers"; import { OnePasswordConfig, Vault, ConnectionStatus } from "./types"; import type { OnePasswordAuth } from "./types"; import { OnePasswordError } from "./errors"; -import { - makeOnePasswordService, - type ResolvedAuth, - type OnePasswordService, -} from "./service"; +import { makeOnePasswordService, type ResolvedAuth, type OnePasswordService } from "./service"; // --------------------------------------------------------------------------- // Constants @@ -50,38 +43,6 @@ export type OnePasswordExtensionFailure = OnePasswordError | StorageFailure; // Plugin extension — public API on executor.onepassword // --------------------------------------------------------------------------- -export interface OnePasswordExtension { - /** Configure the 1Password connection */ - readonly configure: ( - config: OnePasswordConfig, - ) => Effect.Effect; - - /** Get current configuration (if any) */ - readonly getConfig: () => Effect.Effect< - OnePasswordConfig | null, - OnePasswordExtensionFailure - >; - - /** Remove the 1Password configuration */ - readonly removeConfig: () => Effect.Effect; - - /** Check connection status */ - readonly status: () => Effect.Effect< - ConnectionStatus, - OnePasswordExtensionFailure - >; - - /** List accessible vaults (requires auth) */ - readonly listVaults: ( - auth: OnePasswordAuth, - ) => Effect.Effect, OnePasswordExtensionFailure>; - - /** Resolve a secret directly by op:// URI */ - readonly resolve: ( - uri: string, - ) => Effect.Effect; -} - // --------------------------------------------------------------------------- // Typed config store — single blob, JSON encoded. Blob I/O failures surface // as `StorageError` (HTTP edge translates to `InternalError`); decode @@ -94,20 +55,17 @@ export interface OnePasswordStore { OnePasswordConfig | null, StorageError | OnePasswordError >; - readonly saveConfig: ( - config: OnePasswordConfig, - ) => Effect.Effect; + readonly saveConfig: (config: OnePasswordConfig) => Effect.Effect; readonly deleteConfig: () => Effect.Effect; } -const decodeConfig = Schema.decodeUnknownSync(OnePasswordConfig); +const decodeConfig = Schema.decodeUnknownEffect(Schema.fromJsonString(OnePasswordConfig)); -const blobStorageError = (operation: string) => +const blobStorageError = + (operation: string) => (cause: unknown): StorageError => new StorageError({ - message: `onepassword blob ${operation}: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: `onepassword blob ${operation} failed`, cause, }); @@ -123,14 +81,15 @@ export const makeOnePasswordStore = ( Effect.mapError(blobStorageError("read")), Effect.flatMap((raw) => { if (raw === null) return Effect.succeed(null); - return Effect.try({ - try: () => decodeConfig(JSON.parse(raw)), - catch: (cause) => - new OnePasswordError({ - operation: "config decode", - message: cause instanceof Error ? cause.message : String(cause), - }), - }); + return decodeConfig(raw).pipe( + Effect.mapError( + () => + new OnePasswordError({ + operation: "config decode", + message: "Failed to decode 1Password config", + }), + ), + ); }), ), @@ -168,13 +127,13 @@ const resolveAuth = ( }); } return ctx.secrets.get(auth.tokenSecretId).pipe( - Effect.mapError((err) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" - ? new OnePasswordError({ - operation: "auth resolution", - message: `Service account token secret "${auth.tokenSecretId}" not found`, - }) - : err, + Effect.catchTag("SecretOwnedByConnectionError", () => + Effect.fail( + new OnePasswordError({ + operation: "auth resolution", + message: `Service account token secret "${auth.tokenSecretId}" not found`, + }), + ), ), Effect.flatMap((token) => { if (token === null) { @@ -200,9 +159,7 @@ const getServiceFromConfig = ( preferSdk: boolean | undefined, ): Effect.Effect => resolveAuth(config.auth, ctx).pipe( - Effect.flatMap((resolved) => - makeOnePasswordService(resolved, { timeoutMs, preferSdk }), - ), + Effect.flatMap((resolved) => makeOnePasswordService(resolved, { timeoutMs, preferSdk })), ); // --------------------------------------------------------------------------- @@ -241,10 +198,7 @@ const makeProvider = ( list: () => ctx.storage.getConfig().pipe( Effect.flatMap((config) => { - if (!config) - return Effect.succeed( - [] as ReadonlyArray<{ id: string; name: string }>, - ); + if (!config) return Effect.succeed([] as ReadonlyArray<{ id: string; name: string }>); return getServiceFromConfig(config, ctx, timeoutMs, preferSdk).pipe( Effect.flatMap((svc) => svc.listItems(config.vaultId)), Effect.map( @@ -253,12 +207,70 @@ const makeProvider = ( ), ); }), - Effect.orElseSucceed( - () => [] as ReadonlyArray<{ id: string; name: string }>, - ), + Effect.orElseSucceed(() => [] as ReadonlyArray<{ id: string; name: string }>), ), }); +const makeOnePasswordExtension = ( + ctx: PluginCtx, + timeoutMs: number, + preferSdk: boolean | undefined, +) => { + return { + configure: (config: OnePasswordConfig) => ctx.storage.saveConfig(config), + + getConfig: () => ctx.storage.getConfig(), + + removeConfig: () => ctx.storage.deleteConfig(), + + status: () => + Effect.gen(function* () { + const config = yield* ctx.storage.getConfig(); + if (!config) { + return new ConnectionStatus({ + connected: false, + error: "Not configured", + }); + } + const svc = yield* getServiceFromConfig(config, ctx, timeoutMs, preferSdk); + const vaults = yield* svc.listVaults(); + const vault = vaults.find((v) => v.id === config.vaultId); + return new ConnectionStatus({ + connected: true, + vaultName: vault?.title, + }); + }), + + listVaults: (auth: OnePasswordAuth) => + Effect.gen(function* () { + const resolved = yield* resolveAuth(auth, ctx); + const svc = yield* makeOnePasswordService(resolved, { + timeoutMs, + preferSdk, + }); + const vaults = yield* svc.listVaults(); + return vaults + .map((v) => new Vault({ id: v.id, name: v.title })) + .sort((a, b) => a.name.localeCompare(b.name)); + }), + + resolve: (uri: string) => + Effect.gen(function* () { + const config = yield* ctx.storage.getConfig(); + if (!config) { + return yield* new OnePasswordError({ + operation: "resolve", + message: "1Password is not configured", + }); + } + const svc = yield* getServiceFromConfig(config, ctx, timeoutMs, preferSdk); + return yield* svc.resolveSecret(uri); + }), + }; +}; + +export type OnePasswordExtension = ReturnType; + // --------------------------------------------------------------------------- // Plugin factory // --------------------------------------------------------------------------- @@ -270,88 +282,21 @@ export interface OnePasswordPluginOptions { readonly preferSdk?: boolean; } -export const onepasswordPlugin = definePlugin( - (options?: OnePasswordPluginOptions) => { - const timeoutMs = options?.timeoutMs ?? DEFAULT_TIMEOUT_MS; - const preferSdk = options?.preferSdk; - - return { - id: "onepassword" as const, - packageName: "@executor-js/plugin-onepassword", - storage: ({ blobs, scopes }) => - makeOnePasswordStore(blobs, scopes.at(-1)!.id as string), - - extension: (ctx) => { - return { - configure: (config) => ctx.storage.saveConfig(config), - - getConfig: () => ctx.storage.getConfig(), - - removeConfig: () => ctx.storage.deleteConfig(), - - status: () => - Effect.gen(function* () { - const config = yield* ctx.storage.getConfig(); - if (!config) { - return new ConnectionStatus({ - connected: false, - error: "Not configured", - }); - } - const svc = yield* getServiceFromConfig( - config, - ctx, - timeoutMs, - preferSdk, - ); - const vaults = yield* svc.listVaults(); - const vault = vaults.find((v) => v.id === config.vaultId); - return new ConnectionStatus({ - connected: true, - vaultName: vault?.title, - }); - }), - - listVaults: (auth) => - Effect.gen(function* () { - const resolved = yield* resolveAuth(auth, ctx); - const svc = yield* makeOnePasswordService(resolved, { - timeoutMs, - preferSdk, - }); - const vaults = yield* svc.listVaults(); - return vaults - .map((v) => new Vault({ id: v.id, name: v.title })) - .sort((a, b) => a.name.localeCompare(b.name)); - }), - - resolve: (uri) => - Effect.gen(function* () { - const config = yield* ctx.storage.getConfig(); - if (!config) { - return yield* Effect.fail( - new OnePasswordError({ - operation: "resolve", - message: "1Password is not configured", - }), - ); - } - const svc = yield* getServiceFromConfig( - config, - ctx, - timeoutMs, - preferSdk, - ); - return yield* svc.resolveSecret(uri); - }), - } satisfies OnePasswordExtension; - }, - - secretProviders: (ctx) => [makeProvider(ctx, timeoutMs, preferSdk)], - - routes: () => OnePasswordGroup, - handlers: () => OnePasswordHandlers, - extensionService: OnePasswordExtensionService, - }; - }, -); +export const onepasswordPlugin = definePlugin((options?: OnePasswordPluginOptions) => { + const timeoutMs = options?.timeoutMs ?? DEFAULT_TIMEOUT_MS; + const preferSdk = options?.preferSdk; + + return { + id: "onepassword" as const, + packageName: "@executor-js/plugin-onepassword", + storage: ({ blobs, scopes }) => makeOnePasswordStore(blobs, scopes.at(-1)!.id), + + extension: (ctx) => makeOnePasswordExtension(ctx, timeoutMs, preferSdk), + + secretProviders: (ctx) => [makeProvider(ctx, timeoutMs, preferSdk)], + + routes: () => OnePasswordGroup, + handlers: () => OnePasswordHandlers, + extensionService: OnePasswordExtensionService, + }; +}); diff --git a/packages/plugins/onepassword/src/sdk/service.ts b/packages/plugins/onepassword/src/sdk/service.ts index 11c8e9ab1..eeed75487 100644 --- a/packages/plugins/onepassword/src/sdk/service.ts +++ b/packages/plugins/onepassword/src/sdk/service.ts @@ -53,10 +53,10 @@ type OnePasswordSdkModule = typeof import("@1password/sdk"); const loadOnePasswordSdk = (): Effect.Effect => Effect.tryPromise({ try: () => import("@1password/sdk"), - catch: (cause) => + catch: () => new OnePasswordError({ operation: "sdk module load", - message: cause instanceof Error ? cause.message : String(cause), + message: "Failed to load 1Password SDK", }), }); @@ -99,22 +99,20 @@ export const makeNativeSdkService = ( integrationName: "Executor", integrationVersion: "0.0.0", }), - catch: (cause) => + catch: () => new OnePasswordError({ operation: "client setup", - message: cause instanceof Error ? cause.message : String(cause), + message: "Failed to set up 1Password client", }), - }).pipe( - timeoutWithOnePasswordError("client setup", timeoutMs), - ); + }).pipe(timeoutWithOnePasswordError("client setup", timeoutMs)); const wrap = (fn: () => Promise, operation: string): Effect.Effect => Effect.tryPromise({ try: fn, - catch: (cause) => + catch: () => new OnePasswordError({ operation, - message: cause instanceof Error ? cause.message : String(cause), + message: `1Password SDK ${operation} failed`, }), }).pipe( timeoutWithOnePasswordError(operation, timeoutMs), @@ -154,10 +152,10 @@ export const makeCliService = ( const wrapSync = (fn: () => A, operation: string): Effect.Effect => Effect.try({ try: fn, - catch: (cause) => + catch: () => new OnePasswordError({ operation, - message: cause instanceof Error ? cause.message : String(cause), + message: `1Password CLI ${operation} failed`, }), }).pipe(Effect.withSpan(`onepassword.cli.${operation}`)); From 2aa25fb69528618d18389b55955c630faf0f1b88 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:47:06 -0700 Subject: [PATCH 076/108] Fix core OAuth boundary lint --- packages/core/sdk/src/oauth-discovery.ts | 247 ++++---- packages/core/sdk/src/oauth-service.ts | 740 +++++++++++------------ 2 files changed, 448 insertions(+), 539 deletions(-) diff --git a/packages/core/sdk/src/oauth-discovery.ts b/packages/core/sdk/src/oauth-discovery.ts index cd33e9b87..9771ed16f 100644 --- a/packages/core/sdk/src/oauth-discovery.ts +++ b/packages/core/sdk/src/oauth-discovery.ts @@ -18,7 +18,7 @@ // callers actually need. // --------------------------------------------------------------------------- -import { Data, Effect, Result, Schema } from "effect"; +import { Data, Effect, Option, Predicate, Result, Schema } from "effect"; import * as oauth from "oauth4webapi"; import { @@ -37,24 +37,12 @@ import { * token-endpoint failures. A plugin's refresh path should never have * to inspect error messages to tell "metadata drifted, re-discover" * apart from "refresh token is no longer honoured". */ -export class OAuthDiscoveryError extends Data.TaggedError( - "OAuthDiscoveryError", -)<{ +export class OAuthDiscoveryError extends Data.TaggedError("OAuthDiscoveryError")<{ readonly message: string; readonly status?: number; readonly cause?: unknown; }> {} -const discoveryError = ( - message: string, - options: { status?: number; cause?: unknown } = {}, -): OAuthDiscoveryError => - new OAuthDiscoveryError({ - message, - status: options.status, - cause: options.cause, - }); - // --------------------------------------------------------------------------- // Schemas (narrow structural parsing — the RFCs leave many fields // optional; we validate only the subset consumers read) @@ -69,8 +57,7 @@ export const OAuthProtectedResourceMetadataSchema = Schema.Struct({ bearer_methods_supported: Schema.optional(StringArray), resource_documentation: Schema.optional(Schema.String), }).annotate({ identifier: "OAuthProtectedResourceMetadata" }); -export type OAuthProtectedResourceMetadata = - typeof OAuthProtectedResourceMetadataSchema.Type; +export type OAuthProtectedResourceMetadata = typeof OAuthProtectedResourceMetadataSchema.Type; export const OAuthAuthorizationServerMetadataSchema = Schema.Struct({ issuer: Schema.String, @@ -87,8 +74,7 @@ export const OAuthAuthorizationServerMetadataSchema = Schema.Struct({ userinfo_endpoint: Schema.optional(Schema.String), id_token_signing_alg_values_supported: Schema.optional(StringArray), }).annotate({ identifier: "OAuthAuthorizationServerMetadata" }); -export type OAuthAuthorizationServerMetadata = - typeof OAuthAuthorizationServerMetadataSchema.Type; +export type OAuthAuthorizationServerMetadata = typeof OAuthAuthorizationServerMetadataSchema.Type; export type DynamicClientMetadata = { readonly client_name?: string; @@ -127,14 +113,12 @@ export const OAuthClientInformationSchema = Schema.Struct({ }).annotate({ identifier: "OAuthClientInformation" }); export type OAuthClientInformation = typeof OAuthClientInformationSchema.Type; -const decodeResourceMetadata = Schema.decodeUnknownEffect( - OAuthProtectedResourceMetadataSchema, -); -const decodeAuthServerMetadata = Schema.decodeUnknownEffect( - OAuthAuthorizationServerMetadataSchema, +const decodeResourceMetadataJson = Schema.decodeUnknownEffect( + Schema.fromJsonString(OAuthProtectedResourceMetadataSchema), ); -const decodeClientInformation = Schema.decodeUnknownEffect( - OAuthClientInformationSchema, +const decodeAuthServerMetadata = Schema.decodeUnknownEffect(OAuthAuthorizationServerMetadataSchema); +const decodeClientInformationJson = Schema.decodeUnknownEffect( + Schema.fromJsonString(OAuthClientInformationSchema), ); export interface DiscoveryRequestOptions { @@ -155,6 +139,7 @@ export interface DiscoveryRequestOptions { const MCP_PROTOCOL_VERSION_HEADER = "mcp-protocol-version"; const isLoopbackHttpUrl = (value: string): boolean => { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform parser; invalid URLs are not loopback HTTP try { const url = new URL(value); if (url.protocol !== "http:") return false; @@ -178,9 +163,7 @@ const oauth4webapiOptions = ( const out: Record = {}; if (options.fetch) (out as { [customFetch]?: typeof fetch })[customFetch] = options.fetch; if (targetUrl && isLoopbackHttpUrl(targetUrl)) { - (out as { [oauth.allowInsecureRequests]?: boolean })[ - oauth.allowInsecureRequests - ] = true; + (out as { [oauth.allowInsecureRequests]?: boolean })[oauth.allowInsecureRequests] = true; } const signal = AbortSignal.timeout(options.timeoutMs ?? OAUTH2_DEFAULT_TIMEOUT_MS); out.signal = signal; @@ -231,8 +214,7 @@ export const discoverProtectedResourceMetadata = ( resourceUrl: string, options: DiscoveryRequestOptions = {}, ): Effect.Effect< - | { readonly metadataUrl: string; readonly metadata: OAuthProtectedResourceMetadata } - | null, + { readonly metadataUrl: string; readonly metadata: OAuthProtectedResourceMetadata } | null, OAuthDiscoveryError > => Effect.gen(function* () { @@ -260,30 +242,26 @@ export const discoverProtectedResourceMetadata = ( } const text = await response.text(); if (text.length === 0) return "skip" as const; - return { status: response.status, body: JSON.parse(text) } as const; + return { status: response.status, body: text } as const; }, catch: (cause) => - discoveryError( - `Failed to fetch ${url}: ${cause instanceof Error ? cause.message : String(cause)}`, - { cause }, - ), + new OAuthDiscoveryError({ + message: `Failed to fetch protected resource metadata from ${url}`, + cause, + }), }); if (result === "skip") continue; if (!("body" in result)) { - return yield* Effect.fail( - discoveryError( - `Protected resource metadata returned status ${result.status}`, - { status: result.status }, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Protected resource metadata returned status ${result.status}`, + status: result.status, + }); } - const metadata = yield* decodeResourceMetadata(result.body).pipe( + const metadata = yield* decodeResourceMetadataJson(result.body).pipe( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Protected resource metadata is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Protected resource metadata is malformed", cause: err, }), ), @@ -308,9 +286,7 @@ const wellKnownUrlFor = ( ): string => { // Mirrors the library's own well-known composition so the URL we // surface matches what was actually fetched. - const suffix = algorithm === "oauth2" - ? "oauth-authorization-server" - : "openid-configuration"; + const suffix = algorithm === "oauth2" ? "oauth-authorization-server" : "openid-configuration"; return issuerPath && issuerPath !== "/" ? `${issuerOrigin}/.well-known/${suffix}${issuerPath}` : `${issuerOrigin}/.well-known/${suffix}`; @@ -320,11 +296,10 @@ export const discoverAuthorizationServerMetadata = ( issuer: string, options: DiscoveryRequestOptions = {}, ): Effect.Effect< - | { - readonly metadataUrl: string; - readonly metadata: OAuthAuthorizationServerMetadata; - } - | null, + { + readonly metadataUrl: string; + readonly metadata: OAuthAuthorizationServerMetadata; + } | null, OAuthDiscoveryError > => Effect.gen(function* () { @@ -349,13 +324,13 @@ export const discoverAuthorizationServerMetadata = ( }; }, catch: (cause) => { - if (cause instanceof OAuthDiscoveryError) return cause; - return discoveryError( - `Discovery (${algorithm}) failed for ${issuer}: ${ - cause instanceof Error ? cause.message : String(cause) - }`, - { cause }, - ); + if (Predicate.isTagged(cause, "OAuthDiscoveryError")) { + return cause as OAuthDiscoveryError; + } + return new OAuthDiscoveryError({ + message: `Discovery (${algorithm}) failed for ${issuer}`, + cause, + }); }, }).pipe( // If one algorithm fails mid-roundtrip (network, parse, issuer @@ -370,9 +345,7 @@ export const discoverAuthorizationServerMetadata = ( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Authorization server metadata is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Authorization server metadata is malformed", cause: err, }), ), @@ -407,11 +380,17 @@ class DcrErrorBody extends Data.TaggedError("DcrErrorBody")<{ }> {} class DcrTransport extends Data.TaggedError("DcrTransport")<{ - readonly message: string; + readonly detail: string; readonly status?: number; readonly cause?: unknown; }> {} +const DcrErrorBodyJson = Schema.Struct({ + error: Schema.String, + error_description: Schema.optional(Schema.String), +}); +const decodeDcrErrorBodyJson = Schema.decodeUnknownOption(Schema.fromJsonString(DcrErrorBodyJson)); + const buildDcrBody = (m: DynamicClientMetadata): Record => { const body: Record = { redirect_uris: [...m.redirect_uris] }; if (m.client_name !== undefined) body.client_name = m.client_name; @@ -431,33 +410,37 @@ const buildDcrBody = (m: DynamicClientMetadata): Record => { return body; }; -const interpretDcrFailure = ( - status: number, - text: string, -): DcrErrorBody | DcrTransport => { +const interpretDcrFailure = (status: number, text: string): DcrErrorBody | DcrTransport => { // RFC 6749 error envelope: `{error, error_description?}` with 4xx. if (status >= 400 && status < 500) { - const parsed = Result.try({ - try: () => (text ? (JSON.parse(text) as unknown) : null), - catch: () => null, + const body = text ? decodeDcrErrorBodyJson(text) : null; + return Option.match(body ?? Option.none(), { + onNone: () => + new DcrTransport({ + detail: `Dynamic Client Registration endpoint returned status ${status}${ + text ? ` — ${text.slice(0, 200)}` : "" + }`, + status, + }), + onSome: (parsed) => + parsed.error.length > 0 + ? new DcrErrorBody({ + status, + error: parsed.error, + error_description: parsed.error_description, + }) + : new DcrTransport({ + detail: `Dynamic Client Registration endpoint returned status ${status}${ + text ? ` — ${text.slice(0, 200)}` : "" + }`, + status, + }), }); - const body = Result.isSuccess(parsed) ? parsed.success : null; - if ( - body && - typeof body === "object" && - "error" in body && - typeof body.error === "string" && - body.error.length > 0 - ) { - const desc = - "error_description" in body && typeof body.error_description === "string" - ? body.error_description - : undefined; - return new DcrErrorBody({ status, error: body.error, error_description: desc }); - } } return new DcrTransport({ - message: `Dynamic Client Registration endpoint returned status ${status}${text ? ` — ${text.slice(0, 200)}` : ""}`, + detail: `Dynamic Client Registration endpoint returned status ${status}${ + text ? ` — ${text.slice(0, 200)}` : "" + }`, status, }); }; @@ -468,12 +451,9 @@ export const registerDynamicClient = ( ): Effect.Effect => Effect.gen(function* () { const url = new URL(input.registrationEndpoint); - if ( - url.protocol !== "https:" && - !isLoopbackHttpUrl(input.registrationEndpoint) - ) { + if (url.protocol !== "https:" && !isLoopbackHttpUrl(input.registrationEndpoint)) { return yield* new DcrTransport({ - message: `registration_endpoint must be HTTPS or a loopback HTTP URL (got ${url.protocol}//${url.host})`, + detail: `registration_endpoint must be HTTPS or a loopback HTTP URL (got ${url.protocol}//${url.host})`, }); } @@ -497,7 +477,7 @@ export const registerDynamicClient = ( }), catch: (cause) => new DcrTransport({ - message: `Dynamic Client Registration request failed: ${cause instanceof Error ? cause.message : String(cause)}`, + detail: "Dynamic Client Registration request failed", cause, }), }); @@ -505,9 +485,14 @@ export const registerDynamicClient = ( // Accept both 200 and 201 as success — RFC 7591 mandates 201, but // Todoist (and others) return 200 OK with the client information body. if (response.status !== 200 && response.status !== 201) { - const text = yield* Effect.promise(() => - response.text().catch(() => ""), - ); + const text = yield* Effect.tryPromise({ + try: () => response.text(), + catch: () => + new DcrTransport({ + detail: "Dynamic Client Registration error response could not be read", + status: response.status, + }), + }).pipe(Effect.catchTag("DcrTransport", () => Effect.succeed(""))); return yield* interpretDcrFailure(response.status, text); } @@ -515,27 +500,16 @@ export const registerDynamicClient = ( try: () => response.text(), catch: (cause) => new DcrTransport({ - message: "Dynamic Client Registration response could not be read", + detail: "Dynamic Client Registration response could not be read", status: response.status, cause, }), }); - const json = yield* Effect.try({ - try: () => JSON.parse(text) as unknown, - catch: (cause) => - new DcrTransport({ - message: "Dynamic Client Registration response was not valid JSON", - status: response.status, - cause, - }), - }); - return yield* decodeClientInformation(json).pipe( + return yield* decodeClientInformationJson(text).pipe( Effect.mapError( (err) => new OAuthDiscoveryError({ - message: `Dynamic Client Registration response is malformed: ${ - Schema.isSchemaError(err) ? err.message : String(err) - }`, + message: "Dynamic Client Registration response is malformed", cause: err, }), ), @@ -544,16 +518,18 @@ export const registerDynamicClient = ( Effect.catchTags({ DcrErrorBody: (err) => Effect.fail( - discoveryError( - `Dynamic Client Registration failed: ${err.error}${ + new OAuthDiscoveryError({ + message: `Dynamic Client Registration failed: ${err.error}${ err.error_description ? ` — ${err.error_description}` : "" }`, - { status: err.status, cause: err }, - ), + status: err.status, + cause: err, + }), ), DcrTransport: (err) => Effect.fail( - discoveryError(`Dynamic Client Registration failed: ${err.message}`, { + new OAuthDiscoveryError({ + message: `Dynamic Client Registration failed: ${err.detail}`, status: err.status, cause: err.cause ?? err, }), @@ -630,8 +606,7 @@ export const beginDynamicAuthorization = ( const authorizationServerUrl = (() => { if (prior.authorizationServerUrl) return prior.authorizationServerUrl; - const fromResource = - resource && resource.metadata.authorization_servers?.[0]; + const fromResource = resource && resource.metadata.authorization_servers?.[0]; if (fromResource) return fromResource; const u = new URL(input.endpoint); return `${u.protocol}//${u.host}`; @@ -643,35 +618,26 @@ export const beginDynamicAuthorization = ( metadata: prior.authorizationServerMetadata, metadataUrl: prior.authorizationServerMetadataUrl, } - : yield* discoverAuthorizationServerMetadata( - authorizationServerUrl, - options, - ); + : yield* discoverAuthorizationServerMetadata(authorizationServerUrl, options); if (!authServer) { - return yield* Effect.fail( - discoveryError( - `No OAuth authorization server metadata at ${authorizationServerUrl}`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `No OAuth authorization server metadata at ${authorizationServerUrl}`, + }); } const pkceMethods = authServer.metadata.code_challenge_methods_supported ?? []; if (pkceMethods.length > 0 && !pkceMethods.includes("S256")) { - return yield* Effect.fail( - discoveryError( - `Authorization server does not support PKCE S256 (advertised: ${pkceMethods.join(", ")})`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Authorization server does not support PKCE S256 (advertised: ${pkceMethods.join(", ")})`, + }); } const responseTypes = authServer.metadata.response_types_supported ?? []; if (responseTypes.length > 0 && !responseTypes.includes("code")) { - return yield* Effect.fail( - discoveryError( - `Authorization server does not support response_type=code (advertised: ${responseTypes.join(", ")})`, - ), - ); + return yield* new OAuthDiscoveryError({ + message: `Authorization server does not support response_type=code (advertised: ${responseTypes.join(", ")})`, + }); } const baseClientMetadata: DynamicClientMetadata = { @@ -689,9 +655,10 @@ export const beginDynamicAuthorization = ( const reg = authServer.metadata.registration_endpoint; if (!reg) { return Effect.fail( - discoveryError( - "Authorization server does not advertise registration_endpoint — cannot auto-register a client", - ), + new OAuthDiscoveryError({ + message: + "Authorization server does not advertise registration_endpoint — cannot auto-register a client", + }), ); } return registerDynamicClient( @@ -701,9 +668,7 @@ export const beginDynamicAuthorization = ( })()); const codeVerifier = createPkceCodeVerifier(); - const codeChallenge = yield* Effect.promise(() => - createPkceCodeChallenge(codeVerifier), - ); + const codeChallenge = yield* Effect.promise(() => createPkceCodeChallenge(codeVerifier)); const scopes = input.scopes ?? authServer.metadata.scopes_supported ?? []; const authorizationUrl = buildAuthorizationUrl({ diff --git a/packages/core/sdk/src/oauth-service.ts b/packages/core/sdk/src/oauth-service.ts index 79cc1e058..004c903b0 100644 --- a/packages/core/sdk/src/oauth-service.ts +++ b/packages/core/sdk/src/oauth-service.ts @@ -35,13 +35,9 @@ // every strategy because refresh semantics are strategy-independent. // --------------------------------------------------------------------------- -import { Effect, Schema } from "effect"; +import { Effect, Option, Schema } from "effect"; -import type { - DBAdapter, - StorageFailure, - TypedAdapter, -} from "@executor-js/storage-core"; +import type { DBAdapter, StorageFailure, TypedAdapter } from "@executor-js/storage-core"; import { ConnectionRefreshError, @@ -52,9 +48,7 @@ import { type ConnectionRefreshResult, type ConnectionRef, } from "./connections"; -import type { - ConnectionProviderNotRegisteredError, -} from "./errors"; +import type { ConnectionProviderNotRegisteredError } from "./errors"; import type { CoreSchema } from "./core-schema"; import { ConnectionId, ScopeId, SecretId } from "./ids"; import { SetSecretInput, type SecretRef } from "./secrets"; @@ -89,6 +83,7 @@ import { createPkceCodeVerifier, exchangeAuthorizationCode, exchangeClientCredentials, + type OAuth2Error, refreshAccessToken, } from "./oauth-helpers"; @@ -110,9 +105,7 @@ const DynamicDcrSessionPayload = Schema.Struct({ authorizationServerMetadata: OAuthAuthorizationServerMetadataJson, clientInformation: OAuthClientInformationJson, resourceMetadataUrl: Schema.NullOr(Schema.String), - resourceMetadata: Schema.NullOr( - Schema.Record(Schema.String, Schema.Unknown), - ), + resourceMetadata: Schema.NullOr(Schema.Record(Schema.String, Schema.Unknown)), scopes: Schema.Array(Schema.String), }); @@ -122,7 +115,9 @@ const AuthorizationCodeSessionPayload = Schema.Struct({ codeVerifier: Schema.String, authorizationEndpoint: Schema.String, tokenEndpoint: Schema.String, - issuerUrl: Schema.NullOr(Schema.String).pipe(Schema.withDecodingDefaultType(Effect.succeed(null))), + issuerUrl: Schema.NullOr(Schema.String).pipe( + Schema.withDecodingDefaultType(Effect.succeed(null)), + ), clientIdSecretId: Schema.String, clientSecretSecretId: Schema.NullOr(Schema.String), scopes: Schema.Array(Schema.String), @@ -144,20 +139,20 @@ const encodeSessionPayload = Schema.encodeSync(OAuthSessionPayload); const coerceJson = (value: unknown): unknown => { if (typeof value !== "string") return value; - try { - return JSON.parse(value); - } catch { - return value; - } + return Schema.decodeUnknownOption(Schema.fromJsonString(Schema.Unknown))(value).pipe( + Option.getOrElse(() => value), + ); }; const stringArray = (value: unknown): readonly string[] => - Array.isArray(value) - ? value.filter((scope): scope is string => typeof scope === "string") - : []; + Array.isArray(value) ? value.filter((scope): scope is string => typeof scope === "string") : []; + +const isRecord = (value: unknown): value is Record => + value !== null && typeof value === "object"; const originOrNull = (value: unknown): string | null => { if (typeof value !== "string") return null; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform parser; invalid legacy issuer values decode to null try { return new URL(value).origin; } catch { @@ -167,8 +162,7 @@ const originOrNull = (value: unknown): string | null => { const decodeProviderState = (value: unknown): OAuthProviderState => { const raw = coerceJson(value); - const record = - raw && typeof raw === "object" ? (raw as Record) : null; + const record = isRecord(raw) ? raw : null; if (record && !("kind" in record) && "flow" in record && "tokenUrl" in record) { const flow = record.flow; @@ -196,12 +190,7 @@ const decodeProviderState = (value: unknown): OAuthProviderState => { } } - if ( - record && - !("kind" in record) && - "clientIdSecretId" in record && - "scopes" in record - ) { + if (record && !("kind" in record) && "clientIdSecretId" in record && "scopes" in record) { const scopes = stringArray(record.scopes); return Schema.decodeUnknownSync(OAuthProviderStateSchema)({ kind: "authorization-code", @@ -214,48 +203,30 @@ const decodeProviderState = (value: unknown): OAuthProviderState => { }); } - if ( - record && - !("kind" in record) && - "clientInformation" in record && - "endpoint" in record - ) { - const clientInformation = - record.clientInformation && typeof record.clientInformation === "object" - ? (record.clientInformation as Record) - : null; + if (record && !("kind" in record) && "clientInformation" in record && "endpoint" in record) { + const clientInformation = isRecord(record.clientInformation) ? record.clientInformation : null; + const authorizationServerMetadata = isRecord(record.authorizationServerMetadata) + ? record.authorizationServerMetadata + : null; return Schema.decodeUnknownSync(OAuthProviderStateSchema)({ kind: "dynamic-dcr", tokenEndpoint: typeof record.tokenEndpoint === "string" ? record.tokenEndpoint - : record.authorizationServerMetadata && - typeof record.authorizationServerMetadata === "object" && - typeof (record.authorizationServerMetadata as Record) - .token_endpoint === "string" - ? ((record.authorizationServerMetadata as Record) - .token_endpoint as string) + : typeof authorizationServerMetadata?.token_endpoint === "string" + ? authorizationServerMetadata.token_endpoint : "", issuerUrl: - record.authorizationServerMetadata && - typeof record.authorizationServerMetadata === "object" && - typeof (record.authorizationServerMetadata as Record).issuer === - "string" - ? ((record.authorizationServerMetadata as Record) - .issuer as string) + typeof authorizationServerMetadata?.issuer === "string" + ? authorizationServerMetadata.issuer : null, authorizationServerUrl: - typeof record.authorizationServerUrl === "string" - ? record.authorizationServerUrl - : null, + typeof record.authorizationServerUrl === "string" ? record.authorizationServerUrl : null, authorizationServerMetadataUrl: typeof record.authorizationServerMetadataUrl === "string" ? record.authorizationServerMetadataUrl : null, - clientId: - typeof clientInformation?.client_id === "string" - ? clientInformation.client_id - : "", + clientId: typeof clientInformation?.client_id === "string" ? clientInformation.client_id : "", clientSecretSecretId: null, clientAuth: "body", scope: null, @@ -288,10 +259,7 @@ export interface OAuthServiceDeps { * `complete` (and from `start` for `client-credentials`). */ readonly connectionsCreate: ( input: CreateConnectionInput, - ) => Effect.Effect< - ConnectionRef, - ConnectionProviderNotRegisteredError | StorageFailure - >; + ) => Effect.Effect; /** Random session id generator. Tests override to make outputs * deterministic. */ readonly newSessionId?: () => string; @@ -304,9 +272,9 @@ const defaultSessionId = (): string => { if (crypto?.randomUUID) return `oauth2_session_${crypto.randomUUID()}`; const bytes = new Uint8Array(16); crypto.getRandomValues(bytes); - return `oauth2_session_${Array.from(bytes, (byte) => - byte.toString(16).padStart(2, "0"), - ).join("")}`; + return `oauth2_session_${Array.from(bytes, (byte) => byte.toString(16).padStart(2, "0")).join( + "", + )}`; }; const secretIdPart = (value: string): string => @@ -328,11 +296,7 @@ const oauthSecretId = ( const scopedSessionId = (scopeId: string, sessionId: string): string => `${sessionId}_${secretIdPart(scopeId).slice(0, 24)}`; -const terminalRefreshErrors = new Set([ - "invalid_grant", - "invalid_client", - "unauthorized_client", -]); +const terminalRefreshErrors = new Set(["invalid_grant", "invalid_client", "unauthorized_client"]); // --------------------------------------------------------------------------- // Service factory @@ -347,50 +311,42 @@ export const makeOAuth2Service = ( // ------------------------------------------------------------------- // probe // ------------------------------------------------------------------- - const probe = ( - input: OAuthProbeInput, - ): Effect.Effect => + const probe = (input: OAuthProbeInput): Effect.Effect => Effect.gen(function* () { - const resource = yield* discoverProtectedResourceMetadata( - input.endpoint, - { resourceHeaders: input.headers, resourceQueryParams: input.queryParams }, - ).pipe( - Effect.catchTag("OAuthDiscoveryError", (err) => + const resource = yield* discoverProtectedResourceMetadata(input.endpoint, { + resourceHeaders: input.headers, + resourceQueryParams: input.queryParams, + }).pipe( + Effect.catchTag("OAuthDiscoveryError", ({ message }) => Effect.fail( new OAuthProbeError({ - message: `Protected resource metadata probe failed: ${err.message}`, - + message: `Protected resource metadata probe failed: ${message}`, }), ), ), ); - const authorizationServerUrl = (() => { + const authorizationServerUrl = yield* (() => { const fromResource = resource?.metadata.authorization_servers?.[0]; - if (fromResource) return fromResource; - try { - const u = new URL(input.endpoint); - return `${u.protocol}//${u.host}`; - } catch { - return null; - } + if (fromResource) return Effect.succeed(fromResource); + return Effect.try({ + try: () => { + const u = new URL(input.endpoint); + return `${u.protocol}//${u.host}`; + }, + catch: () => null, + }).pipe(Effect.catch(() => Effect.succeed(null))); })(); const authServer = authorizationServerUrl - ? yield* discoverAuthorizationServerMetadata( - authorizationServerUrl, - ).pipe( - Effect.catchTag("OAuthDiscoveryError", () => - Effect.succeed(null), - ), + ? yield* discoverAuthorizationServerMetadata(authorizationServerUrl).pipe( + Effect.catchTag("OAuthDiscoveryError", () => Effect.succeed(null)), ) : null; const supportsDynamicRegistration = !!( authServer?.metadata.registration_endpoint && - (authServer.metadata.token_endpoint_auth_methods_supported ?? []).includes( - "none", - ) + (authServer.metadata.token_endpoint_auth_methods_supported ?? []).includes("none") ); // Bearer challenge probe — POST the endpoint unauth, look for @@ -402,45 +358,39 @@ export const makeOAuth2Service = ( try: async (): Promise => { const controller = new AbortController(); const timer = setTimeout(() => controller.abort(), 6_000); - try { - const probeUrl = new URL(input.endpoint); - for (const [key, value] of Object.entries(input.queryParams ?? {})) { - probeUrl.searchParams.set(key, value); - } - const response = await fetch(probeUrl.toString(), { - method: "POST", - headers: { - ...(input.headers ?? {}), - "content-type": "application/json", - accept: "application/json, text/event-stream", - }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: 1, - method: "initialize", - params: { - protocolVersion: "2025-06-18", - capabilities: {}, - clientInfo: { name: "executor-probe", version: "0" }, - }, - }), - signal: controller.signal, - }); - if (response.status !== 401) return false; - const wwwAuth = - response.headers.get("www-authenticate") ?? - response.headers.get("WWW-Authenticate"); - return !!wwwAuth && /^\s*bearer\b/i.test(wwwAuth); - } finally { - clearTimeout(timer); + const probeUrl = new URL(input.endpoint); + for (const [key, value] of Object.entries(input.queryParams ?? {})) { + probeUrl.searchParams.set(key, value); } + const response = await fetch(probeUrl.toString(), { + method: "POST", + headers: { + ...(input.headers ?? {}), + "content-type": "application/json", + accept: "application/json, text/event-stream", + }, + body: JSON.stringify({ + jsonrpc: "2.0", + id: 1, + method: "initialize", + params: { + protocolVersion: "2025-06-18", + capabilities: {}, + clientInfo: { name: "executor-probe", version: "0" }, + }, + }), + signal: controller.signal, + }).finally(() => clearTimeout(timer)); + if (response.status !== 401) return false; + const wwwAuth = + response.headers.get("www-authenticate") ?? response.headers.get("WWW-Authenticate"); + return !!wwwAuth && /^\s*bearer\b/i.test(wwwAuth); }, catch: () => null, }).pipe(Effect.catch(() => Effect.succeed(false))); return { - resourceMetadata: - (resource?.metadata as Record | undefined) ?? null, + resourceMetadata: (resource?.metadata as Record | undefined) ?? null, resourceMetadataUrl: resource?.metadataUrl ?? null, authorizationServerMetadata: (authServer?.metadata as Record | undefined) ?? null, @@ -459,20 +409,22 @@ export const makeOAuth2Service = ( strategy: OAuthDynamicDcrStrategy, ): Effect.Effect => Effect.gen(function* () { - const started = yield* beginDynamicAuthorization({ - endpoint: input.endpoint, - redirectUrl: input.redirectUrl, - state: "", - scopes: strategy.scopes, - }, { - resourceHeaders: input.headers, - resourceQueryParams: input.queryParams, - }).pipe( - Effect.catchTag("OAuthDiscoveryError", (err) => + const started = yield* beginDynamicAuthorization( + { + endpoint: input.endpoint, + redirectUrl: input.redirectUrl, + state: "", + scopes: strategy.scopes, + }, + { + resourceHeaders: input.headers, + resourceQueryParams: input.queryParams, + }, + ).pipe( + Effect.catchTag("OAuthDiscoveryError", ({ message }) => Effect.fail( new OAuthStartError({ - message: `Dynamic authorization setup failed: ${err.message}`, - + message: `Dynamic authorization setup failed: ${message}`, }), ), ), @@ -492,10 +444,7 @@ export const makeOAuth2Service = ( authorizationUrl: started.state.authorizationServerMetadata.authorization_endpoint, clientId: started.state.clientInformation.client_id, redirectUrl: input.redirectUrl, - scopes: - strategy.scopes ?? - started.state.authorizationServerMetadata.scopes_supported ?? - [], + scopes: strategy.scopes ?? started.state.authorizationServerMetadata.scopes_supported ?? [], state: sessionId, codeChallenge, }); @@ -505,22 +454,20 @@ export const makeOAuth2Service = ( identityLabel: input.identityLabel ?? null, codeVerifier: started.codeVerifier, authorizationServerUrl: started.state.authorizationServerUrl, - authorizationServerMetadataUrl: - started.state.authorizationServerMetadataUrl, - authorizationServerMetadata: - started.state.authorizationServerMetadata as Record, + authorizationServerMetadataUrl: started.state.authorizationServerMetadataUrl, + authorizationServerMetadata: started.state.authorizationServerMetadata as Record< + string, + unknown + >, clientInformation: (() => { const value: unknown = started.state.clientInformation; return value as Record; })(), resourceMetadataUrl: started.state.resourceMetadataUrl, resourceMetadata: - (started.state.resourceMetadata as Record | null) ?? - null, + (started.state.resourceMetadata as Record | null) ?? null, scopes: [ - ...(strategy.scopes ?? - started.state.authorizationServerMetadata.scopes_supported ?? - []), + ...(strategy.scopes ?? started.state.authorizationServerMetadata.scopes_supported ?? []), ], }; @@ -544,25 +491,22 @@ export const makeOAuth2Service = ( ): Effect.Effect => Effect.gen(function* () { const clientId = yield* deps.secretsGet(strategy.clientIdSecretId).pipe( - Effect.mapError((err) => - // Storage failure propagates; null returns aren't errors — the - // branch below handles them. - err, + Effect.mapError( + (err) => + // Storage failure propagates; null returns aren't errors — the + // branch below handles them. + err, ), ); if (clientId === null) { - return yield* Effect.fail( - new OAuthStartError({ - message: `client_id secret "${strategy.clientIdSecretId}" not found`, - }), - ); + return yield* new OAuthStartError({ + message: `client_id secret "${strategy.clientIdSecretId}" not found`, + }); } const sessionId = scopedSessionId(input.tokenScope, newSessionId()); const codeVerifier = createPkceCodeVerifier(); - const codeChallenge = yield* Effect.promise(() => - createPkceCodeChallenge(codeVerifier), - ); + const codeChallenge = yield* Effect.promise(() => createPkceCodeChallenge(codeVerifier)); const authorizationUrl = buildAuthorizationUrl({ authorizationUrl: strategy.authorizationEndpoint, @@ -611,11 +555,9 @@ export const makeOAuth2Service = ( const clientId = yield* deps.secretsGet(strategy.clientIdSecretId); const clientSecret = yield* deps.secretsGet(strategy.clientSecretSecretId); if (clientId === null || clientSecret === null) { - return yield* Effect.fail( - new OAuthStartError({ - message: "client_id / client_secret secret not found", - }), - ); + return yield* new OAuthStartError({ + message: "client_id / client_secret secret not found", + }); } const tokens = yield* exchangeClientCredentials({ @@ -627,18 +569,15 @@ export const makeOAuth2Service = ( clientAuth: strategy.clientAuth ?? "body", }).pipe( Effect.mapError( - (err) => + ({ message }: OAuth2Error) => new OAuthStartError({ - message: `Client credentials exchange failed: ${err.message}`, - + message: `Client credentials exchange failed: ${message}`, }), ), ); const expiresAt = - typeof tokens.expires_in === "number" - ? now() + tokens.expires_in * 1000 - : null; + typeof tokens.expires_in === "number" ? now() + tokens.expires_in * 1000 : null; const providerState: OAuthProviderState = { kind: "client-credentials", @@ -666,21 +605,33 @@ export const makeOAuth2Service = ( refreshToken: null, expiresAt, oauthScope: tokens.scope ?? null, - providerState: Schema.encodeSync(OAuthProviderStateSchema)( - providerState, - ) as Record, + providerState: Schema.encodeSync(OAuthProviderStateSchema)(providerState) as Record< + string, + unknown + >, }), ) .pipe( - Effect.mapError( - (err) => - new OAuthStartError({ - message: `Failed to mint connection: ${ - err instanceof Error ? err.message : String(err) - }`, - - }), - ), + Effect.catchTags({ + ConnectionProviderNotRegisteredError: () => + Effect.fail( + new OAuthStartError({ + message: "Failed to mint connection: ConnectionProviderNotRegisteredError", + }), + ), + StorageError: ({ message }) => + Effect.fail( + new OAuthStartError({ + message: `Failed to mint connection: ${message}`, + }), + ), + UniqueViolationError: () => + Effect.fail( + new OAuthStartError({ + message: "Failed to mint connection: UniqueViolationError", + }), + ), + }), ); return { @@ -709,22 +660,24 @@ export const makeOAuth2Service = ( payload: OAuthSessionPayload; strategyKind: string; }): Effect.Effect => - deps.adapter.create({ - model: "oauth2_session", - data: { - id: args.sessionId, - scope_id: args.input.tokenScope, - plugin_id: args.input.pluginId, - strategy: args.strategyKind, - connection_id: args.input.connectionId, - token_scope: args.input.tokenScope, - redirect_url: args.input.redirectUrl, - payload: encodeSessionPayload(args.payload) as Record, - expires_at: now() + OAUTH2_SESSION_TTL_MS, - created_at: new Date(), - }, - forceAllowId: true, - }).pipe(Effect.asVoid); + deps.adapter + .create({ + model: "oauth2_session", + data: { + id: args.sessionId, + scope_id: args.input.tokenScope, + plugin_id: args.input.pluginId, + strategy: args.strategyKind, + connection_id: args.input.connectionId, + token_scope: args.input.tokenScope, + redirect_url: args.input.redirectUrl, + payload: encodeSessionPayload(args.payload) as Record, + expires_at: now() + OAUTH2_SESSION_TTL_MS, + created_at: new Date(), + }, + forceAllowId: true, + }) + .pipe(Effect.asVoid); // ------------------------------------------------------------------- // complete — exchange the code, mint the Connection, delete the session @@ -741,53 +694,45 @@ export const makeOAuth2Service = ( where: [{ field: "id", value: input.state }], }); if (!row) { - return yield* Effect.fail( - new OAuthSessionNotFoundError({ sessionId: input.state }), - ); + return yield* new OAuthSessionNotFoundError({ sessionId: input.state }); } const deleteSession = deps.adapter.delete({ model: "oauth2_session", where: [ { field: "id", value: input.state }, - { field: "scope_id", value: row.scope_id as string }, + { field: "scope_id", value: row.scope_id }, ], }); if (input.error) { yield* deleteSession; - return yield* Effect.fail( - new OAuthCompleteError({ - message: `Authorization server returned error: ${input.error}`, - code: input.error, - }), - ); + return yield* new OAuthCompleteError({ + message: `Authorization server returned error: ${input.error}`, + code: input.error, + }); } if (!input.code) { yield* deleteSession; - return yield* Effect.fail( - new OAuthCompleteError({ - message: "Missing authorization code", - }), - ); + return yield* new OAuthCompleteError({ + message: "Missing authorization code", + }); } const expiresAt = Number(row.expires_at as number | bigint); if (expiresAt <= now()) { yield* deleteSession; - return yield* Effect.fail( - new OAuthCompleteError({ - message: "OAuth session expired", - }), - ); + return yield* new OAuthCompleteError({ + message: "OAuth session expired", + }); } const payload = decodeSessionPayload(coerceJson(row.payload)); const endpoint = ""; // not stored on the row — the payload's own - // endpoint fields drive exchange; we just need - // a display string for the identity label. - const connectionId = row.connection_id as string; - const tokenScope = row.token_scope as string; - const redirectUrl = row.redirect_url as string; + // endpoint fields drive exchange; we just need + // a display string for the identity label. + const connectionId = row.connection_id; + const tokenScope = row.token_scope; + const redirectUrl = row.redirect_url; // Dispatch to the strategy-specific exchange. const exchangeResult = yield* (() => { @@ -795,11 +740,7 @@ export const makeOAuth2Service = ( case "dynamic-dcr": return exchangeDynamicDcr(payload, input.code, redirectUrl); case "authorization-code": - return exchangeAuthorizationCodeStrategy( - payload, - input.code, - redirectUrl, - ); + return exchangeAuthorizationCodeStrategy(payload, input.code, redirectUrl); } })().pipe(Effect.tapError(() => deleteSession)); @@ -810,8 +751,7 @@ export const makeOAuth2Service = ( const dynamicClientSecretSecretId = yield* (() => { if (payload.kind !== "dynamic-dcr") return Effect.succeed(null); - const clientSecret = (payload.clientInformation as { client_secret?: unknown }) - .client_secret; + const clientSecret = payload.clientInformation.client_secret; if (typeof clientSecret !== "string" || clientSecret.length === 0) { return Effect.succeed(null); } @@ -827,14 +767,20 @@ export const makeOAuth2Service = ( ) .pipe( Effect.as(secretId), - Effect.mapError( - (err) => - new OAuthCompleteError({ - message: `Failed to persist DCR client_secret: ${ - err instanceof Error ? err.message : String(err) - }`, - }), - ), + Effect.catchTags({ + StorageError: ({ message }) => + Effect.fail( + new OAuthCompleteError({ + message: `Failed to persist DCR client_secret: ${message}`, + }), + ), + UniqueViolationError: () => + Effect.fail( + new OAuthCompleteError({ + message: "Failed to persist DCR client_secret: UniqueViolationError", + }), + ), + }), ); })(); @@ -842,21 +788,21 @@ export const makeOAuth2Service = ( payload.kind === "dynamic-dcr" ? { kind: "dynamic-dcr", - tokenEndpoint: (payload.authorizationServerMetadata as { - token_endpoint: string; - }).token_endpoint, + tokenEndpoint: ( + payload.authorizationServerMetadata as { + token_endpoint: string; + } + ).token_endpoint, issuerUrl: - (payload.authorizationServerMetadata as { issuer?: string }).issuer ?? - null, + (payload.authorizationServerMetadata as { issuer?: string }).issuer ?? null, authorizationServerUrl: payload.authorizationServerUrl, - authorizationServerMetadataUrl: - payload.authorizationServerMetadataUrl, - idTokenSigningAlgValuesSupported: - (payload.authorizationServerMetadata as { + authorizationServerMetadataUrl: payload.authorizationServerMetadataUrl, + idTokenSigningAlgValuesSupported: ( + payload.authorizationServerMetadata as { id_token_signing_alg_values_supported?: string[]; - }).id_token_signing_alg_values_supported, - clientId: (payload.clientInformation as { client_id: string }) - .client_id, + } + ).id_token_signing_alg_values_supported, + clientId: (payload.clientInformation as { client_id: string }).client_id, clientSecretSecretId: dynamicClientSecretSecretId, clientAuth: (payload.clientInformation as { token_endpoint_auth_method?: string }) @@ -901,21 +847,33 @@ export const makeOAuth2Service = ( : null, expiresAt: connectionExpiresAt, oauthScope: exchangeResult.tokens.scope ?? null, - providerState: Schema.encodeSync(OAuthProviderStateSchema)( - providerState, - ) as Record, + providerState: Schema.encodeSync(OAuthProviderStateSchema)(providerState) as Record< + string, + unknown + >, }), ) .pipe( - Effect.mapError( - (err) => - new OAuthCompleteError({ - message: `Failed to mint connection: ${ - err instanceof Error ? err.message : String(err) - }`, - - }), - ), + Effect.catchTags({ + ConnectionProviderNotRegisteredError: () => + Effect.fail( + new OAuthCompleteError({ + message: "Failed to mint connection: ConnectionProviderNotRegisteredError", + }), + ), + StorageError: ({ message }) => + Effect.fail( + new OAuthCompleteError({ + message: `Failed to mint connection: ${message}`, + }), + ), + UniqueViolationError: () => + Effect.fail( + new OAuthCompleteError({ + message: "Failed to mint connection: UniqueViolationError", + }), + ), + }), ); yield* deleteSession; @@ -962,19 +920,14 @@ export const makeOAuth2Service = ( redirectUrl, codeVerifier: payload.codeVerifier, code, - idTokenSigningAlgValuesSupported: - md.id_token_signing_alg_values_supported, - clientAuth: - ci.token_endpoint_auth_method === "client_secret_basic" - ? "basic" - : "body", + idTokenSigningAlgValuesSupported: md.id_token_signing_alg_values_supported, + clientAuth: ci.token_endpoint_auth_method === "client_secret_basic" ? "basic" : "body", }).pipe( Effect.mapError( - (err) => + ({ message, error }: OAuth2Error) => new OAuthCompleteError({ - message: `Token exchange failed: ${err.message}`, - code: err.error, - + message: `Token exchange failed: ${message}`, + code: error, }), ), ); @@ -988,28 +941,21 @@ export const makeOAuth2Service = ( payload: Extract, code: string, redirectUrl: string, - ): Effect.Effect< - ExchangeResult, - OAuthCompleteError | StorageFailure - > => + ): Effect.Effect => Effect.gen(function* () { const clientId = yield* deps.secretsGet(payload.clientIdSecretId); if (clientId === null) { - return yield* Effect.fail( - new OAuthCompleteError({ - message: `client_id secret "${payload.clientIdSecretId}" not found`, - }), - ); + return yield* new OAuthCompleteError({ + message: `client_id secret "${payload.clientIdSecretId}" not found`, + }); } const clientSecret = payload.clientSecretSecretId ? yield* deps.secretsGet(payload.clientSecretSecretId) : null; if (payload.clientSecretSecretId && clientSecret === null) { - return yield* Effect.fail( - new OAuthCompleteError({ - message: `client_secret secret "${payload.clientSecretSecretId}" not found`, - }), - ); + return yield* new OAuthCompleteError({ + message: `client_secret secret "${payload.clientSecretSecretId}" not found`, + }); } const tokens = yield* exchangeAuthorizationCode({ @@ -1023,11 +969,10 @@ export const makeOAuth2Service = ( clientAuth: payload.clientAuth, }).pipe( Effect.mapError( - (err) => + ({ message, error }: OAuth2Error) => new OAuthCompleteError({ - message: `Token exchange failed: ${err.message}`, - code: err.error, - + message: `Token exchange failed: ${message}`, + code: error, }), ), ); @@ -1048,7 +993,7 @@ export const makeOAuth2Service = ( model: "oauth2_session", where: [ { field: "id", value: sessionId }, - { field: "scope_id", value: row.scope_id as string }, + { field: "scope_id", value: row.scope_id }, ], }); }); @@ -1071,9 +1016,7 @@ export const makeOAuth2Service = ( catch: (cause) => new ConnectionRefreshError({ connectionId: input.connectionId, - message: `oauth2 providerState is malformed: ${ - cause instanceof Error ? cause.message : String(cause) - }`, + message: "oauth2 providerState is malformed", cause, }), }); @@ -1095,22 +1038,26 @@ export const makeOAuth2Service = ( case "dynamic-dcr": return Effect.gen(function* () { const csec = state.clientSecretSecretId - ? yield* deps - .secretsGet(state.clientSecretSecretId) - .pipe( - Effect.mapError( - (cause) => + ? yield* deps.secretsGet(state.clientSecretSecretId).pipe( + Effect.catchTags({ + StorageError: ({ message, cause }) => + Effect.fail( + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: `Failed to resolve DCR client_secret: ${message}`, + cause, + }), + ), + UniqueViolationError: (cause) => + Effect.fail( new ConnectionRefreshError({ connectionId: input.connectionId, - message: `Failed to resolve DCR client_secret: ${ - cause instanceof Error - ? cause.message - : String(cause) - }`, + message: "Failed to resolve DCR client_secret: UniqueViolationError", cause, }), - ), - ) + ), + }), + ) : null; if (state.clientSecretSecretId && csec === null) { return yield* new ConnectionRefreshError({ @@ -1124,22 +1071,26 @@ export const makeOAuth2Service = ( case "authorization-code": case "client-credentials": return Effect.gen(function* () { - const cid = yield* deps - .secretsGet(state.clientIdSecretId) - .pipe( - Effect.mapError( - (cause) => + const cid = yield* deps.secretsGet(state.clientIdSecretId).pipe( + Effect.catchTags({ + StorageError: ({ message, cause }) => + Effect.fail( new ConnectionRefreshError({ connectionId: input.connectionId, - message: `Failed to resolve client_id secret: ${ - cause instanceof Error - ? cause.message - : String(cause) - }`, + message: `Failed to resolve client_id secret: ${message}`, cause, }), - ), - ); + ), + UniqueViolationError: (cause) => + Effect.fail( + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: "Failed to resolve client_id secret: UniqueViolationError", + cause, + }), + ), + }), + ); if (cid === null) { return yield* new ConnectionRefreshError({ connectionId: input.connectionId, @@ -1148,22 +1099,26 @@ export const makeOAuth2Service = ( }); } const csec = state.clientSecretSecretId - ? yield* deps - .secretsGet(state.clientSecretSecretId) - .pipe( - Effect.mapError( - (cause) => + ? yield* deps.secretsGet(state.clientSecretSecretId).pipe( + Effect.catchTags({ + StorageError: ({ message, cause }) => + Effect.fail( new ConnectionRefreshError({ connectionId: input.connectionId, - message: `Failed to resolve client_secret: ${ - cause instanceof Error - ? cause.message - : String(cause) - }`, + message: `Failed to resolve client_secret: ${message}`, cause, }), - ), - ) + ), + UniqueViolationError: (cause) => + Effect.fail( + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: "Failed to resolve client_secret: UniqueViolationError", + cause, + }), + ), + }), + ) : null; if (state.clientSecretSecretId && csec === null) { return yield* new ConnectionRefreshError({ @@ -1179,35 +1134,28 @@ export const makeOAuth2Service = ( const tokenEndpoint = yield* (() => { if (state.tokenEndpoint) return Effect.succeed(state.tokenEndpoint); - if ( - state.kind === "dynamic-dcr" && - state.authorizationServerUrl - ) { - return discoverAuthorizationServerMetadata( - state.authorizationServerUrl, - ).pipe( + if (state.kind === "dynamic-dcr" && state.authorizationServerUrl) { + return discoverAuthorizationServerMetadata(state.authorizationServerUrl).pipe( Effect.flatMap((metadata) => metadata?.metadata.token_endpoint ? Effect.succeed(metadata.metadata.token_endpoint) : Effect.fail( new ConnectionRefreshError({ connectionId: input.connectionId, - message: - "oauth2 legacy MCP providerState is missing token endpoint", + message: "oauth2 legacy MCP providerState is missing token endpoint", reauthRequired: true, }), ), ), - Effect.mapError((cause) => - cause instanceof ConnectionRefreshError - ? cause - : new ConnectionRefreshError({ - connectionId: input.connectionId, - message: - "Failed to discover token endpoint for legacy MCP OAuth connection", - reauthRequired: true, - cause, - }), + Effect.catchTag("OAuthDiscoveryError", (cause) => + Effect.fail( + new ConnectionRefreshError({ + connectionId: input.connectionId, + message: "Failed to discover token endpoint for legacy MCP OAuth connection", + reauthRequired: true, + cause, + }), + ), ), ); } @@ -1220,56 +1168,51 @@ export const makeOAuth2Service = ( ); })(); - const tokens = yield* (state.kind === "client-credentials" - ? exchangeClientCredentials({ - tokenUrl: tokenEndpoint, - clientId, - clientSecret: clientSecret ?? "", - scopes: state.scopes, - scopeSeparator: state.scopeSeparator, - clientAuth: state.clientAuth, - }) - : refreshAccessToken({ - tokenUrl: tokenEndpoint, - issuerUrl: - state.kind === "dynamic-dcr" || state.kind === "authorization-code" - ? (state.issuerUrl ?? undefined) - : undefined, - clientId, - clientSecret: clientSecret ?? undefined, - refreshToken: input.refreshToken!, - scopes: - state.kind === "dynamic-dcr" || state.kind === "authorization-code" - ? state.scopes - : undefined, - scopeSeparator: - state.kind === "dynamic-dcr" || state.kind === "authorization-code" - ? state.scopeSeparator - : undefined, - clientAuth: state.clientAuth, - idTokenSigningAlgValuesSupported: - state.kind === "dynamic-dcr" - ? state.idTokenSigningAlgValuesSupported - : undefined, - })).pipe( + const tokens = yield* ( + state.kind === "client-credentials" + ? exchangeClientCredentials({ + tokenUrl: tokenEndpoint, + clientId, + clientSecret: clientSecret ?? "", + scopes: state.scopes, + scopeSeparator: state.scopeSeparator, + clientAuth: state.clientAuth, + }) + : refreshAccessToken({ + tokenUrl: tokenEndpoint, + issuerUrl: + state.kind === "dynamic-dcr" || state.kind === "authorization-code" + ? (state.issuerUrl ?? undefined) + : undefined, + clientId, + clientSecret: clientSecret ?? undefined, + refreshToken: input.refreshToken!, + scopes: + state.kind === "dynamic-dcr" || state.kind === "authorization-code" + ? state.scopes + : undefined, + scopeSeparator: + state.kind === "dynamic-dcr" || state.kind === "authorization-code" + ? state.scopeSeparator + : undefined, + clientAuth: state.clientAuth, + idTokenSigningAlgValuesSupported: + state.kind === "dynamic-dcr" ? state.idTokenSigningAlgValuesSupported : undefined, + }) + ).pipe( Effect.mapError( - (err) => + ({ message, error }: OAuth2Error) => new ConnectionRefreshError({ connectionId: input.connectionId, - message: `OAuth refresh failed: ${err.message}`, + message: `OAuth refresh failed: ${message}`, // Terminal RFC 6749 §5.2 errors mean retrying won't heal it. - reauthRequired: err.error - ? terminalRefreshErrors.has(err.error) - : false, - + reauthRequired: error ? terminalRefreshErrors.has(error) : false, }), ), ); const expiresAt = - typeof tokens.expires_in === "number" - ? now() + tokens.expires_in * 1000 - : null; + typeof tokens.expires_in === "number" ? now() + tokens.expires_in * 1000 : null; const result: ConnectionRefreshResult = { accessToken: tokens.access_token, @@ -1293,6 +1236,7 @@ export const makeOAuth2Service = ( const safeHostname = (value: string | null): string | null => { if (!value) return null; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: URL constructor is the platform parser; non-URL labels remain display labels try { return new URL(value).host; } catch { From 9494cb4d399b1fec56e87b498c8cece4b3824eeb Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:47:32 -0700 Subject: [PATCH 077/108] Fix OpenAPI boundary lint --- .../openapi/src/react/AddOpenApiSource.tsx | 518 +++++++++--------- .../openapi/src/react/EditOpenApiSource.tsx | 369 +++++++------ packages/plugins/openapi/src/sdk/store.ts | 453 ++++++++------- 3 files changed, 711 insertions(+), 629 deletions(-) diff --git a/packages/plugins/openapi/src/react/AddOpenApiSource.tsx b/packages/plugins/openapi/src/react/AddOpenApiSource.tsx index 251174fe4..44c237c9d 100644 --- a/packages/plugins/openapi/src/react/AddOpenApiSource.tsx +++ b/packages/plugins/openapi/src/react/AddOpenApiSource.tsx @@ -1,6 +1,8 @@ import { useCallback, useEffect, useRef, useState } from "react"; import { useAtomSet } from "@effect/atom-react"; -import { Option } from "effect"; +import * as Exit from "effect/Exit"; +import * as Option from "effect/Option"; +import * as Schema from "effect/Schema"; import { ConnectionId, ScopeId, SecretId } from "@executor-js/sdk/core"; import { startOAuth } from "@executor-js/react/api/atoms"; @@ -81,6 +83,17 @@ import { type ServerVariable, } from "../sdk/types"; +const ErrorMessage = Schema.Struct({ message: Schema.String }); + +const errorMessageFromExit = (exit: Exit.Exit, fallback: string): string => + Option.match( + Option.flatMap(Exit.findErrorOption(exit), Schema.decodeUnknownOption(ErrorMessage)), + { + onNone: () => fallback, + onSome: ({ message }) => message, + }, + ); + export const OPENAPI_OAUTH_POPUP_NAME = "openapi-oauth"; export const OPENAPI_OAUTH_CALLBACK_PATH = "/api/oauth/callback"; @@ -109,25 +122,15 @@ export const openApiOAuthConnectionId = ( */ export function resolveOAuthUrl(url: string, baseUrl: string): string { if (!url) return url; - try { - new URL(url); + if (URL.canParse(url)) { return url; - } catch { - if (!baseUrl) return url; - try { - return new URL(url, baseUrl).toString(); - } catch { - return url; - } } + if (!baseUrl || !URL.canParse(url, baseUrl)) return url; + return new URL(url, baseUrl).toString(); } export function inferOAuthIssuerUrl(authorizationUrl: string): string | null { - try { - return new URL(authorizationUrl).origin; - } catch { - return null; - } + return URL.canParse(authorizationUrl) ? new URL(authorizationUrl).origin : null; } type StrategySelection = @@ -242,10 +245,10 @@ export default function AddOpenApiSource(props: { const scopeId = useScope(); const userScope = useUserScope(); - const doPreview = useAtomSet(previewOpenApiSpec, { mode: "promise" }); - const doAdd = useAtomSet(addOpenApiSpec, { mode: "promise" }); - const doStartOAuth = useAtomSet(startOAuth, { mode: "promise" }); - const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promise" }); + const doPreview = useAtomSet(previewOpenApiSpec, { mode: "promiseExit" }); + const doAdd = useAtomSet(addOpenApiSpec, { mode: "promiseExit" }); + const doStartOAuth = useAtomSet(startOAuth, { mode: "promiseExit" }); + const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promiseExit" }); const { beginAdd } = usePendingSources(); const secretList = useSecretPickerSecrets(); const oauth = useOAuthPopupFlow({ @@ -278,7 +281,7 @@ export default function AddOpenApiSource(props: { selectedServerIndex >= 0 ? (servers[selectedServerIndex] ?? null) : null; const serverVariables: Record = selectedServer - ? Option.getOrElse(selectedServer.variables, () => ({}) as Record) + ? Option.getOrElse(selectedServer.variables, () => ({})) : {}; const serverVariableEntries: Array<[string, ServerVariable]> = Object.entries(serverVariables); @@ -290,10 +293,7 @@ export default function AddOpenApiSource(props: { // Helper used by analyze + server selection: build a default selection map // from a server's variable defaults. const defaultSelectionsFor = (server: ServerInfo): Record => { - const vars: Record = Option.getOrElse( - server.variables, - () => ({}) as Record, - ); + const vars: Record = Option.getOrElse(server.variables, () => ({})); const out: Record = {}; for (const [name, v] of Object.entries(vars)) out[name] = v.default; return out; @@ -371,45 +371,47 @@ export default function AddOpenApiSource(props: { setAnalyzing(true); setAnalyzeError(null); setAddError(null); - try { - const credentials = serializeHttpCredentials(specFetchCredentials); - const result = await doPreview({ - params: { scopeId }, - payload: { - spec: specUrl, - specFetchCredentials: credentials, - }, - }); - setPreview(result); - - const firstServer = result.servers[0]; - if (firstServer) { - setSelectedServerIndex(0); - setVariableSelections(defaultSelectionsFor(firstServer)); - setCustomBaseUrl(""); - } else { - setSelectedServerIndex(-1); - setVariableSelections({}); - setCustomBaseUrl(""); - } - - const firstPreset = result.headerPresets[0]; - if (firstPreset) { - setStrategy({ kind: "header", presetIndex: 0 }); - setCustomHeaders(entriesFromSpecPreset(firstPreset)); - } else { - // No header presets — default to "custom" so the headers editor is - // visible immediately. Specs with no `security` block (e.g. Microsoft - // Graph) would otherwise leave the user staring at just the - // Authentication heading with no way to add headers. - setStrategy({ kind: "custom" }); - setCustomHeaders([]); - } - } catch (e) { - setAnalyzeError(e instanceof Error ? e.message : "Failed to parse spec"); - } finally { + const credentials = serializeHttpCredentials(specFetchCredentials); + const exit = await doPreview({ + params: { scopeId }, + payload: { + spec: specUrl, + specFetchCredentials: credentials, + }, + }); + if (Exit.isFailure(exit)) { + setAnalyzeError(errorMessageFromExit(exit, "Failed to parse spec")); setAnalyzing(false); + return; + } + + const result = exit.value; + setPreview(result); + + const firstServer = result.servers[0]; + if (firstServer) { + setSelectedServerIndex(0); + setVariableSelections(defaultSelectionsFor(firstServer)); + setCustomBaseUrl(""); + } else { + setSelectedServerIndex(-1); + setVariableSelections({}); + setCustomBaseUrl(""); } + + const firstPreset = result.headerPresets[0]; + if (firstPreset) { + setStrategy({ kind: "header", presetIndex: 0 }); + setCustomHeaders(entriesFromSpecPreset(firstPreset)); + } else { + // No header presets — default to "custom" so the headers editor is + // visible immediately. Specs with no `security` block (e.g. Microsoft + // Graph) would otherwise leave the user staring at just the + // Authentication heading with no way to add headers. + setStrategy({ kind: "custom" }); + setCustomHeaders([]); + } + setAnalyzing(false); }; handleAnalyzeRef.current = handleAnalyze; @@ -470,122 +472,126 @@ export default function AddOpenApiSource(props: { if (!selectedOAuth2Preset || !oauth2ClientIdSecretId || !preview) return; oauth.cancel(); setOauth2Error(null); - try { - const displayName = identity.name.trim() || selectedOAuth2Preset.securitySchemeName; - - const tokenUrl = resolveOAuthUrl(selectedOAuth2Preset.tokenUrl, resolvedBaseUrl); - - if (selectedOAuth2Preset.flow === "clientCredentials") { - // RFC 6749 §4.4: no user-interactive consent step. The client_secret - // is mandatory; the backend exchanges tokens inline and returns a - // completed OAuth2Auth we can attach to the source directly. - if (!oauth2ClientSecretSecretId) { - setOauth2Error("client_credentials requires a client secret"); - return; - } - setStartingOAuth(true); - const connectionId = openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow); - const response = await doStartOAuth({ - params: { scopeId }, - payload: { - endpoint: tokenUrl, - redirectUrl: tokenUrl, - connectionId, - tokenScope: scopeId as string, - strategy: { - kind: "client-credentials", - tokenEndpoint: tokenUrl, - clientIdSecretId: oauth2ClientIdSecretId, - clientSecretSecretId: oauth2ClientSecretSecretId, - scopes: [...oauth2SelectedScopes], - }, - pluginId: "openapi", - identityLabel: `${displayName} OAuth`, + const displayName = identity.name.trim() || selectedOAuth2Preset.securitySchemeName; + + const tokenUrl = resolveOAuthUrl(selectedOAuth2Preset.tokenUrl, resolvedBaseUrl); + + if (selectedOAuth2Preset.flow === "clientCredentials") { + // RFC 6749 §4.4: no user-interactive consent step. The client_secret + // is mandatory; the backend exchanges tokens inline and returns a + // completed OAuth2Auth we can attach to the source directly. + if (!oauth2ClientSecretSecretId) { + setOauth2Error("client_credentials requires a client secret"); + return; + } + setStartingOAuth(true); + const connectionId = openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow); + const startOAuthExit = await doStartOAuth({ + params: { scopeId }, + payload: { + endpoint: tokenUrl, + redirectUrl: tokenUrl, + connectionId, + tokenScope: scopeId, + strategy: { + kind: "client-credentials", + tokenEndpoint: tokenUrl, + clientIdSecretId: oauth2ClientIdSecretId, + clientSecretSecretId: oauth2ClientSecretSecretId, + scopes: [...oauth2SelectedScopes], }, - }); - setStartingOAuth(false); - if (!response.completedConnection) { - setOauth2Error("client_credentials flow did not mint a connection"); - return; - } + pluginId: "openapi", + identityLabel: `${displayName} OAuth`, + }, + }); + setStartingOAuth(false); + if (Exit.isFailure(startOAuthExit)) { + setOauth2Error(errorMessageFromExit(startOAuthExit, "Failed to start OAuth")); + return; + } + const response = startOAuthExit.value; + if (!response.completedConnection) { + setOauth2Error("client_credentials flow did not mint a connection"); + return; + } + setOauth2AuthState({ + fingerprint: selectedOAuth2Fingerprint, + auth: new OAuth2Auth({ + kind: "oauth2", + connectionId: response.completedConnection.connectionId, + securitySchemeName: selectedOAuth2Preset.securitySchemeName, + flow: "clientCredentials", + tokenUrl, + authorizationUrl: null, + clientIdSecretId: oauth2ClientIdSecretId, + clientSecretSecretId: oauth2ClientSecretSecretId, + scopes: [...oauth2SelectedScopes], + }), + }); + setOauth2Error(null); + return; + } + + const authorizationUrl = resolveOAuthUrl( + Option.getOrElse(selectedOAuth2Preset.authorizationUrl, () => ""), + resolvedBaseUrl, + ); + const issuerUrl = inferOAuthIssuerUrl(authorizationUrl); + const startOAuthExit = await doStartOAuth({ + params: { scopeId }, + payload: { + endpoint: authorizationUrl, + connectionId: openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow), + tokenScope: scopeId, + redirectUrl: oauth2RedirectUrl, + strategy: { + kind: "authorization-code", + authorizationEndpoint: authorizationUrl, + tokenEndpoint: tokenUrl, + issuerUrl, + clientIdSecretId: oauth2ClientIdSecretId, + clientSecretSecretId: oauth2ClientSecretSecretId ?? null, + scopes: [...oauth2SelectedScopes], + }, + pluginId: "openapi", + identityLabel: `${displayName} OAuth`, + }, + }); + if (Exit.isFailure(startOAuthExit)) { + setOauth2Error(errorMessageFromExit(startOAuthExit, "Failed to start OAuth")); + return; + } + const response = startOAuthExit.value; + if (response.authorizationUrl === null) { + setOauth2Error("Unexpected response flow from server"); + return; + } + + await oauth.openAuthorization({ + run: async () => ({ + sessionId: response.sessionId, + authorizationUrl: response.authorizationUrl, + }), + onSuccess: (result) => { setOauth2AuthState({ fingerprint: selectedOAuth2Fingerprint, auth: new OAuth2Auth({ kind: "oauth2", - connectionId: response.completedConnection.connectionId, + connectionId: result.connectionId, securitySchemeName: selectedOAuth2Preset.securitySchemeName, - flow: "clientCredentials", + flow: "authorizationCode", tokenUrl, - authorizationUrl: null, + authorizationUrl, + issuerUrl, clientIdSecretId: oauth2ClientIdSecretId, clientSecretSecretId: oauth2ClientSecretSecretId, scopes: [...oauth2SelectedScopes], }), }); setOauth2Error(null); - return; - } - - const authorizationUrl = resolveOAuthUrl( - Option.getOrElse(selectedOAuth2Preset.authorizationUrl, () => ""), - resolvedBaseUrl, - ); - const issuerUrl = inferOAuthIssuerUrl(authorizationUrl); - - await oauth.openAuthorization({ - run: async () => { - const response = await doStartOAuth({ - params: { scopeId }, - payload: { - endpoint: authorizationUrl, - connectionId: openApiOAuthConnectionId(resolvedSourceId, selectedOAuth2Preset.flow), - tokenScope: scopeId as string, - redirectUrl: oauth2RedirectUrl, - strategy: { - kind: "authorization-code", - authorizationEndpoint: authorizationUrl, - tokenEndpoint: tokenUrl, - issuerUrl, - clientIdSecretId: oauth2ClientIdSecretId, - clientSecretSecretId: oauth2ClientSecretSecretId ?? null, - scopes: [...oauth2SelectedScopes], - }, - pluginId: "openapi", - identityLabel: `${displayName} OAuth`, - }, - }); - if (response.authorizationUrl === null) { - throw new Error("Unexpected response flow from server"); - } - return { - sessionId: response.sessionId, - authorizationUrl: response.authorizationUrl, - }; - }, - onSuccess: (result) => { - setOauth2AuthState({ - fingerprint: selectedOAuth2Fingerprint, - auth: new OAuth2Auth({ - kind: "oauth2", - connectionId: result.connectionId, - securitySchemeName: selectedOAuth2Preset.securitySchemeName, - flow: "authorizationCode", - tokenUrl, - authorizationUrl, - issuerUrl, - clientIdSecretId: oauth2ClientIdSecretId, - clientSecretSecretId: oauth2ClientSecretSecretId, - scopes: [...oauth2SelectedScopes], - }), - }); - setOauth2Error(null); - }, - onError: setOauth2Error, - }); - } catch (e) { - setStartingOAuth(false); - setOauth2Error(e instanceof Error ? e.message : "Failed to start OAuth"); - } + }, + onError: setOauth2Error, + }); }, [ selectedOAuth2Preset, oauth2ClientIdSecretId, @@ -621,103 +627,123 @@ export default function AddOpenApiSource(props: { kind: "openapi", url: resolvedBaseUrl || undefined, }); - try { - const result = await doAdd({ + const failAdd = (message: string) => { + setAddError(message); + setAdding(false); + placeholder.done(); + }; + const resultExit = await doAdd({ + params: { scopeId }, + payload: { + spec: specUrl, + specFetchCredentials: serializeHttpCredentials(specFetchCredentials), + name: identity.name.trim() || undefined, + namespace: slugifyNamespace(identity.namespace) || undefined, + baseUrl: resolvedBaseUrl || undefined, + ...(hasHeaders ? { headers: configuredHeaders } : {}), + ...(Object.keys(serializeHttpCredentials(runtimeCredentials).queryParams).length > 0 + ? { queryParams: serializeHttpCredentials(runtimeCredentials).queryParams } + : {}), + ...(configuredOAuth2 ? { oauth2: configuredOAuth2 } : {}), + }, + reactivityKeys: addSpecWriteKeys, + }); + + if (Exit.isFailure(resultExit)) { + failAdd(errorMessageFromExit(resultExit, "Failed to add source")); + return; + } + + const sourceId = resultExit.value.namespace; + const sourceScope = ScopeId.make(scopeId); + const bindingScope = ScopeId.make(userScope); + + for (const binding of headerBindings) { + const bindingExit = await doSetBinding({ params: { scopeId }, payload: { - spec: specUrl, - specFetchCredentials: serializeHttpCredentials(specFetchCredentials), - name: identity.name.trim() || undefined, - namespace: slugifyNamespace(identity.namespace) || undefined, - baseUrl: resolvedBaseUrl || undefined, - ...(hasHeaders ? { headers: configuredHeaders } : {}), - ...(Object.keys(serializeHttpCredentials(runtimeCredentials).queryParams).length > 0 - ? { queryParams: serializeHttpCredentials(runtimeCredentials).queryParams } - : {}), - ...(configuredOAuth2 ? { oauth2: configuredOAuth2 } : {}), + sourceId, + sourceScope, + scope: bindingScope, + slot: binding.slot, + value: { + kind: "secret", + secretId: SecretId.make(binding.secretId), + }, }, - reactivityKeys: addSpecWriteKeys, + reactivityKeys: bindingWriteKeys, }); - - const sourceId = result.namespace; - const sourceScope = ScopeId.make(scopeId); - const bindingScope = ScopeId.make(userScope); - - for (const binding of headerBindings) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: binding.slot, - value: { - kind: "secret", - secretId: SecretId.make(binding.secretId), - }, - }, - reactivityKeys: bindingWriteKeys, - }); + if (Exit.isFailure(bindingExit)) { + failAdd(errorMessageFromExit(bindingExit, "Failed to add source")); + return; } + } - if (configuredOAuth2 && oauth2ClientIdSecretId) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: configuredOAuth2.clientIdSlot, - value: { - kind: "secret", - secretId: SecretId.make(oauth2ClientIdSecretId), - }, + if (configuredOAuth2 && oauth2ClientIdSecretId) { + const clientIdBindingExit = await doSetBinding({ + params: { scopeId }, + payload: { + sourceId, + sourceScope, + scope: bindingScope, + slot: configuredOAuth2.clientIdSlot, + value: { + kind: "secret", + secretId: SecretId.make(oauth2ClientIdSecretId), }, - reactivityKeys: bindingWriteKeys, - }); + }, + reactivityKeys: bindingWriteKeys, + }); + if (Exit.isFailure(clientIdBindingExit)) { + failAdd(errorMessageFromExit(clientIdBindingExit, "Failed to add source")); + return; } + } - if (configuredOAuth2?.clientSecretSlot && oauth2ClientSecretSecretId) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: configuredOAuth2.clientSecretSlot, - value: { - kind: "secret", - secretId: SecretId.make(oauth2ClientSecretSecretId), - }, + if (configuredOAuth2?.clientSecretSlot && oauth2ClientSecretSecretId) { + const clientSecretBindingExit = await doSetBinding({ + params: { scopeId }, + payload: { + sourceId, + sourceScope, + scope: bindingScope, + slot: configuredOAuth2.clientSecretSlot, + value: { + kind: "secret", + secretId: SecretId.make(oauth2ClientSecretSecretId), }, - reactivityKeys: bindingWriteKeys, - }); + }, + reactivityKeys: bindingWriteKeys, + }); + if (Exit.isFailure(clientSecretBindingExit)) { + failAdd(errorMessageFromExit(clientSecretBindingExit, "Failed to add source")); + return; } + } - if (configuredOAuth2 && oauth2Auth) { - await doSetBinding({ - params: { scopeId }, - payload: { - sourceId, - sourceScope, - scope: bindingScope, - slot: configuredOAuth2.connectionSlot, - value: { - kind: "connection", - connectionId: ConnectionId.make(oauth2Auth.connectionId), - }, + if (configuredOAuth2 && oauth2Auth) { + const connectionBindingExit = await doSetBinding({ + params: { scopeId }, + payload: { + sourceId, + sourceScope, + scope: bindingScope, + slot: configuredOAuth2.connectionSlot, + value: { + kind: "connection", + connectionId: ConnectionId.make(oauth2Auth.connectionId), }, - reactivityKeys: bindingWriteKeys, - }); + }, + reactivityKeys: bindingWriteKeys, + }); + if (Exit.isFailure(connectionBindingExit)) { + failAdd(errorMessageFromExit(connectionBindingExit, "Failed to add source")); + return; } - - props.onComplete(); - } catch (e) { - setAddError(e instanceof Error ? e.message : "Failed to add source"); - setAdding(false); - } finally { - placeholder.done(); } + + placeholder.done(); + props.onComplete(); }; // ---- Render ---- diff --git a/packages/plugins/openapi/src/react/EditOpenApiSource.tsx b/packages/plugins/openapi/src/react/EditOpenApiSource.tsx index b5cb3f5ff..67db6cdb6 100644 --- a/packages/plugins/openapi/src/react/EditOpenApiSource.tsx +++ b/packages/plugins/openapi/src/react/EditOpenApiSource.tsx @@ -1,5 +1,8 @@ import { useEffect, useMemo, useRef, useState } from "react"; import { useAtomSet, useAtomValue } from "@effect/atom-react"; +import * as Exit from "effect/Exit"; +import * as Option from "effect/Option"; +import * as Schema from "effect/Schema"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import { connectionsAtom, sourceAtom, startOAuth } from "@executor-js/react/api/atoms"; @@ -42,7 +45,21 @@ import { resolveOAuthUrl, } from "./AddOpenApiSource"; import { oauth2ClientSecretSlot } from "../sdk/store"; -import type { OpenApiSourceBindingValue } from "../sdk/types"; +import { + OpenApiSourceBindingValue, + type OpenApiSourceBindingValue as OpenApiSourceBindingValueType, +} from "../sdk/types"; + +const ErrorMessage = Schema.Struct({ message: Schema.String }); + +const errorMessageFromExit = (exit: Exit.Exit, fallback: string): string => + Option.match( + Option.flatMap(Exit.findErrorOption(exit), Schema.decodeUnknownOption(ErrorMessage)), + { + onNone: () => fallback, + onSome: ({ message }) => message, + }, + ); type SlotDef = | { @@ -79,7 +96,7 @@ const openApiOAuthConnectionId = ( targetScope: ScopeId, ): ConnectionId => ConnectionId.make( - `openapi-oauth-${slugify(sourceId)}-${slugify(securitySchemeName)}-${shortHash(targetScope as string)}`, + `openapi-oauth-${slugify(sourceId)}-${slugify(securitySchemeName)}-${shortHash(targetScope)}`, ); const bindingSecretId = (sourceId: string, slot: string, scopeId: string): string => @@ -101,7 +118,7 @@ const exactBindingForScope = ( ) => rows.find((row) => row.slot === slot && row.scopeId === scopeId) ?? null; const scopeRank = (ranks: ReadonlyMap, scopeId: ScopeId): number => - ranks.get(scopeId as string) ?? Number.MAX_SAFE_INTEGER; + ranks.get(scopeId) ?? Number.MAX_SAFE_INTEGER; const effectiveBindingForScope = ( rows: readonly { @@ -119,21 +136,13 @@ const effectiveBindingForScope = ( const isSecretBindingValue = ( value: unknown, -): value is Extract => - typeof value === "object" && - value !== null && - "kind" in value && - (value as { kind?: unknown }).kind === "secret" && - "secretId" in value; +): value is Extract => + Schema.is(OpenApiSourceBindingValue)(value) && value.kind === "secret"; const isConnectionBindingValue = ( value: unknown, -): value is Extract => - typeof value === "object" && - value !== null && - "kind" in value && - (value as { kind?: unknown }).kind === "connection" && - "connectionId" in value; +): value is Extract => + Schema.is(OpenApiSourceBindingValue)(value) && value.kind === "connection"; export default function EditOpenApiSource(props: { readonly sourceId: string; @@ -150,7 +159,7 @@ export default function EditOpenApiSource(props: { const sourceScopeId = sourceSummary?.scopeId ?? displayScope; const sourceScope = ScopeId.make(sourceScopeId); const scopeRanks = useMemo( - () => new Map(scopeStack.map((scope, index) => [scope.id as string, index] as const)), + () => new Map(scopeStack.map((scope, index) => [scope.id, index] as const)), [scopeStack], ); @@ -161,10 +170,10 @@ export default function EditOpenApiSource(props: { const connectionsResult = useAtomValue(connectionsAtom(displayScope)); const secretList = useSecretPickerSecrets(); - const doUpdate = useAtomSet(updateOpenApiSource, { mode: "promise" }); - const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promise" }); - const doRemoveBinding = useAtomSet(removeOpenApiSourceBinding, { mode: "promise" }); - const doStartOAuth = useAtomSet(startOAuth, { mode: "promise" }); + const doUpdate = useAtomSet(updateOpenApiSource, { mode: "promiseExit" }); + const doSetBinding = useAtomSet(setOpenApiSourceBinding, { mode: "promiseExit" }); + const doRemoveBinding = useAtomSet(removeOpenApiSourceBinding, { mode: "promiseExit" }); + const doStartOAuth = useAtomSet(startOAuth, { mode: "promiseExit" }); const oauth = useOAuthPopupFlow({ popupName: OPENAPI_OAUTH_POPUP_NAME, popupBlockedMessage: "OAuth popup was blocked by the browser", @@ -224,28 +233,28 @@ export default function EditOpenApiSource(props: { const seq = ++sourceSaveSeq.current; setSourceSaveState("saving"); setError(null); - void doUpdate({ - params: { scopeId: ScopeId.make(sourceScopeId), namespace: props.sourceId }, - payload: { - name: nextName || undefined, - baseUrl: nextBaseUrl || undefined, - headers: source.config.headers, - oauth2: source.config.oauth2, - }, - reactivityKeys: openApiWriteKeys, - }) - .then(() => { - if (sourceSaveSeq.current !== seq) return; - setSourceSaveState("saved"); - window.setTimeout(() => { - if (sourceSaveSeq.current === seq) setSourceSaveState("idle"); - }, 1600); - }) - .catch((e: unknown) => { - if (sourceSaveSeq.current !== seq) return; - setSourceSaveState("idle"); - setError(e instanceof Error ? e.message : "Failed to save source details"); + void (async () => { + const exit = await doUpdate({ + params: { scopeId: ScopeId.make(sourceScopeId), namespace: props.sourceId }, + payload: { + name: nextName || undefined, + baseUrl: nextBaseUrl || undefined, + headers: source.config.headers, + oauth2: source.config.oauth2, + }, + reactivityKeys: openApiWriteKeys, }); + if (sourceSaveSeq.current !== seq) return; + if (Exit.isFailure(exit)) { + setSourceSaveState("idle"); + setError(errorMessageFromExit(exit, "Failed to save source details")); + return; + } + setSourceSaveState("saved"); + window.setTimeout(() => { + if (sourceSaveSeq.current === seq) setSourceSaveState("idle"); + }, 1600); + })(); }, 600); return () => window.clearTimeout(timeout); @@ -321,44 +330,40 @@ export default function EditOpenApiSource(props: { if (!trimmed) return; setBusyKey(inputKey); setError(null); - try { - await doSetBinding({ - params: { scopeId: displayScope }, - payload: { - sourceId: props.sourceId, - sourceScope, - scope: targetScope, - slot, - value: { kind: "secret", secretId: SecretId.make(trimmed) }, - }, - reactivityKeys: sourceWriteKeys, - }); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to save credential binding"); - } finally { - setBusyKey(null); + const exit = await doSetBinding({ + params: { scopeId: displayScope }, + payload: { + sourceId: props.sourceId, + sourceScope, + scope: targetScope, + slot, + value: { kind: "secret", secretId: SecretId.make(trimmed) }, + }, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { + setError(errorMessageFromExit(exit, "Failed to save credential binding")); } + setBusyKey(null); }; const clearBinding = async (targetScope: ScopeId, slot: string) => { setBusyKey(`${targetScope}:${slot}:clear`); setError(null); - try { - await doRemoveBinding({ - params: { scopeId: displayScope }, - payload: { - sourceId: props.sourceId, - sourceScope, - slot, - scope: targetScope, - }, - reactivityKeys: sourceWriteKeys, - }); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to clear credential binding"); - } finally { - setBusyKey(null); + const exit = await doRemoveBinding({ + params: { scopeId: displayScope }, + payload: { + sourceId: props.sourceId, + sourceScope, + slot, + scope: targetScope, + }, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { + setError(errorMessageFromExit(exit, "Failed to clear credential binding")); } + setBusyKey(null); }; const connectOAuth = async (targetScope: ScopeId) => { @@ -410,35 +415,112 @@ export default function EditOpenApiSource(props: { setPendingOAuthConnection({ scopeId: targetScope, slot: oauth2.connectionSlot, - connectionId: connectionId as string, + connectionId: connectionId, }); setError(null); - try { - const displayName = source.name; - const tokenUrl = resolveOAuthUrl(oauth2.tokenUrl, source.config.baseUrl ?? ""); - if (oauth2.flow === "clientCredentials") { - const response = await doStartOAuth({ - params: { scopeId: displayScope }, - payload: { - endpoint: tokenUrl, - redirectUrl: tokenUrl, - connectionId: connectionId as string, - tokenScope: targetScope as string, - strategy: { - kind: "client-credentials", - tokenEndpoint: tokenUrl, - clientIdSecretId, - clientSecretSecretId: clientSecretValue!.secretId, - scopes: [...oauth2.scopes], - }, - pluginId: "openapi", - identityLabel: `${displayName} OAuth`, + const failConnect = (message: string) => { + setError(message); + setPendingOAuthConnection(null); + setBusyKey(null); + }; + const displayName = source.name; + const tokenUrl = resolveOAuthUrl(oauth2.tokenUrl, source.config.baseUrl ?? ""); + if (oauth2.flow === "clientCredentials") { + const startOAuthExit = await doStartOAuth({ + params: { scopeId: displayScope }, + payload: { + endpoint: tokenUrl, + redirectUrl: tokenUrl, + connectionId: connectionId, + tokenScope: targetScope, + strategy: { + kind: "client-credentials", + tokenEndpoint: tokenUrl, + clientIdSecretId, + clientSecretSecretId: clientSecretValue!.secretId, + scopes: [...oauth2.scopes], }, - }); - if (!response.completedConnection) { - throw new Error("Unexpected OAuth response"); - } - await doSetBinding({ + pluginId: "openapi", + identityLabel: `${displayName} OAuth`, + }, + }); + if (Exit.isFailure(startOAuthExit)) { + failConnect(errorMessageFromExit(startOAuthExit, "Failed to connect OAuth")); + return; + } + const response = startOAuthExit.value; + if (!response.completedConnection) { + failConnect("Unexpected OAuth response"); + return; + } + const setBindingExit = await doSetBinding({ + params: { scopeId: displayScope }, + payload: { + sourceId: props.sourceId, + sourceScope, + scope: targetScope, + slot: oauth2.connectionSlot, + value: { + kind: "connection", + connectionId: ConnectionId.make(response.completedConnection.connectionId), + }, + }, + reactivityKeys: [...sourceWriteKeys, ...connectionWriteKeys], + }); + if (Exit.isFailure(setBindingExit)) { + failConnect(errorMessageFromExit(setBindingExit, "Failed to connect OAuth")); + return; + } + setPendingOAuthConnection(null); + setBusyKey(null); + return; + } + + const authorizationUrl = resolveOAuthUrl( + oauth2.authorizationUrl ?? "", + source.config.baseUrl ?? "", + ); + const issuerUrl = oauth2.issuerUrl ?? inferOAuthIssuerUrl(authorizationUrl); + const startOAuthExit = await doStartOAuth({ + params: { scopeId: displayScope }, + payload: { + endpoint: authorizationUrl, + connectionId, + tokenScope: targetScope, + redirectUrl: oauth2RedirectUrl, + strategy: { + kind: "authorization-code", + authorizationEndpoint: authorizationUrl, + tokenEndpoint: tokenUrl, + issuerUrl, + clientIdSecretId, + clientSecretSecretId: + clientSecretBinding && isSecretBindingValue(clientSecretBinding.value) + ? clientSecretBinding.value.secretId + : null, + scopes: [...oauth2.scopes], + }, + pluginId: "openapi", + identityLabel: `${displayName} OAuth`, + }, + }); + if (Exit.isFailure(startOAuthExit)) { + failConnect(errorMessageFromExit(startOAuthExit, "Failed to connect OAuth")); + return; + } + const response = startOAuthExit.value; + if (response.authorizationUrl === null) { + failConnect("Unexpected OAuth response"); + return; + } + + await oauth.openAuthorization({ + run: async () => ({ + sessionId: response.sessionId, + authorizationUrl: response.authorizationUrl, + }), + onSuccess: async (result) => { + const setBindingExit = await doSetBinding({ params: { scopeId: displayScope }, payload: { sourceId: props.sourceId, @@ -447,83 +529,24 @@ export default function EditOpenApiSource(props: { slot: oauth2.connectionSlot, value: { kind: "connection", - connectionId: ConnectionId.make(response.completedConnection.connectionId), + connectionId: ConnectionId.make(result.connectionId), }, }, reactivityKeys: [...sourceWriteKeys, ...connectionWriteKeys], }); + if (Exit.isFailure(setBindingExit)) { + failConnect(errorMessageFromExit(setBindingExit, "Failed to connect OAuth")); + return; + } setPendingOAuthConnection(null); setBusyKey(null); - return; - } - - const authorizationUrl = resolveOAuthUrl( - oauth2.authorizationUrl ?? "", - source.config.baseUrl ?? "", - ); - const issuerUrl = oauth2.issuerUrl ?? inferOAuthIssuerUrl(authorizationUrl); - await oauth.openAuthorization({ - run: async () => { - const response = await doStartOAuth({ - params: { scopeId: displayScope }, - payload: { - endpoint: authorizationUrl, - connectionId: connectionId as string, - tokenScope: targetScope as string, - redirectUrl: oauth2RedirectUrl, - strategy: { - kind: "authorization-code", - authorizationEndpoint: authorizationUrl, - tokenEndpoint: tokenUrl, - issuerUrl, - clientIdSecretId, - clientSecretSecretId: - clientSecretBinding && isSecretBindingValue(clientSecretBinding.value) - ? clientSecretBinding.value.secretId - : null, - scopes: [...oauth2.scopes], - }, - pluginId: "openapi", - identityLabel: `${displayName} OAuth`, - }, - }); - if (response.authorizationUrl === null) { - throw new Error("Unexpected OAuth response"); - } - return { - sessionId: response.sessionId, - authorizationUrl: response.authorizationUrl, - }; - }, - onSuccess: async (result) => { - await doSetBinding({ - params: { scopeId: displayScope }, - payload: { - sourceId: props.sourceId, - sourceScope, - scope: targetScope, - slot: oauth2.connectionSlot, - value: { - kind: "connection", - connectionId: ConnectionId.make(result.connectionId), - }, - }, - reactivityKeys: [...sourceWriteKeys, ...connectionWriteKeys], - }); - setPendingOAuthConnection(null); - setBusyKey(null); - }, - onError: (message) => { - setError(message); - setPendingOAuthConnection(null); - setBusyKey(null); - }, - }); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to connect OAuth"); - setPendingOAuthConnection(null); - setBusyKey(null); - } + }, + onError: (message) => { + setError(message); + setPendingOAuthConnection(null); + setBusyKey(null); + }, + }); }; return ( @@ -590,10 +613,10 @@ export default function EditOpenApiSource(props: { ({ - value: entry.scopeId as string, + value: entry.scopeId, label: entry.label, }))} - value={activeCredentialScopeId as string} + value={activeCredentialScopeId} onChange={setSelectedCredentialScope} /> @@ -618,9 +641,9 @@ export default function EditOpenApiSource(props: { isSecretBindingValue(effective.value); const currentSecretId = exact && isSecretBindingValue(exact.value) - ? (exact.value.secretId as string) + ? exact.value.secretId : inherited && effective && isSecretBindingValue(effective.value) - ? (effective.value.secretId as string) + ? effective.value.secretId : null; return ( ( - "OpenApiStoredSource", -)({ +export class StoredSourceSchema extends Schema.Class("OpenApiStoredSource")({ namespace: Schema.String, name: Schema.String, config: Schema.Struct({ @@ -184,20 +182,12 @@ export class StoredSourceSchema extends Schema.Class( sourceUrl: Schema.optional(Schema.String), baseUrl: Schema.optional(Schema.String), namespace: Schema.optional(Schema.String), - headers: Schema.optional( - Schema.Record(Schema.String, ConfiguredHeaderValue), - ), - queryParams: Schema.optional( - Schema.Record(Schema.String, HeaderValue), - ), + headers: Schema.optional(Schema.Record(Schema.String, ConfiguredHeaderValue)), + queryParams: Schema.optional(Schema.Record(Schema.String, HeaderValue)), specFetchCredentials: Schema.optional( Schema.Struct({ - headers: Schema.optional( - Schema.Record(Schema.String, HeaderValue), - ), - queryParams: Schema.optional( - Schema.Record(Schema.String, HeaderValue), - ), + headers: Schema.optional(Schema.Record(Schema.String, HeaderValue)), + queryParams: Schema.optional(Schema.Record(Schema.String, HeaderValue)), }), ), // Canonical source-owned OAuth config. Concrete client credentials @@ -221,10 +211,85 @@ export interface StoredOperation { const encodeBinding = Schema.encodeSync(OperationBinding); const decodeBinding = Schema.decodeUnknownSync(OperationBinding); +const decodeBindingJson = Schema.decodeUnknownSync(Schema.fromJsonString(OperationBinding)); const decodeOAuth2 = Schema.decodeUnknownSync(OAuth2Auth); +const decodeOAuth2Option = Schema.decodeUnknownOption(OAuth2Auth); +const decodeOAuth2JsonOption = Schema.decodeUnknownOption(Schema.fromJsonString(OAuth2Auth)); +const decodeOAuth2SourceConfigOption = Schema.decodeUnknownOption(OAuth2SourceConfig); +const decodeOAuth2SourceConfigJsonOption = Schema.decodeUnknownOption( + Schema.fromJsonString(OAuth2SourceConfig), +); const encodeOAuth2SourceConfig = Schema.encodeSync(OAuth2SourceConfig); +const decodeHeaderValueOption = Schema.decodeUnknownOption(HeaderValue); +const UnknownRecord = Schema.Record(Schema.String, Schema.Unknown); +const decodeUnknownRecord = Schema.decodeUnknownSync(UnknownRecord); +const decodeUnknownRecordJson = Schema.decodeUnknownSync(Schema.fromJsonString(UnknownRecord)); +const decodeConfiguredHeaderBindingOption = Schema.decodeUnknownOption(ConfiguredHeaderBinding); + +const NullableString = Schema.NullOr(Schema.String); +const OptionalNullableString = Schema.optional(NullableString); + +const ChildStorageRow = Schema.Struct({ + name: Schema.String, + kind: Schema.Literals(["text", "secret"]), + text_value: OptionalNullableString, + secret_id: OptionalNullableString, + secret_prefix: OptionalNullableString, +}); +const decodeChildStorageRowOption = Schema.decodeUnknownOption(ChildStorageRow); + +const SourceBindingStorageRow = Schema.Struct({ + source_id: Schema.String, + source_scope_id: Schema.String, + target_scope_id: Schema.String, + slot: Schema.String, + kind: Schema.Literals(["secret", "connection", "text"]), + secret_id: OptionalNullableString, + connection_id: OptionalNullableString, + text_value: OptionalNullableString, + created_at: Schema.Unknown, + updated_at: Schema.Unknown, +}); +const decodeSourceBindingStorageRow = Schema.decodeUnknownSync(SourceBindingStorageRow); + +const SourceStorageRow = Schema.Struct({ + id: Schema.String, + scope_id: Schema.String, + name: Schema.String, + spec: Schema.String, + source_url: OptionalNullableString, + base_url: OptionalNullableString, + headers: Schema.optional(Schema.Unknown), + oauth2: Schema.optional(Schema.Unknown), +}); +const decodeSourceStorageRow = Schema.decodeUnknownSync(SourceStorageRow); + +const OperationStorageRow = Schema.Struct({ + id: Schema.String, + source_id: Schema.String, + binding: Schema.Unknown, +}); +const decodeOperationStorageRow = Schema.decodeUnknownSync(OperationStorageRow); + +const ChildUsageStorageRow = Schema.Struct({ + source_id: Schema.String, + scope_id: Schema.String, + name: Schema.String, +}); +const decodeChildUsageStorageRow = Schema.decodeUnknownSync(ChildUsageStorageRow); + +const SourceNameStorageRow = Schema.Struct({ + id: Schema.String, + scope_id: Schema.String, + name: Schema.String, +}); +const decodeSourceNameStorageRow = Schema.decodeUnknownSync(SourceNameStorageRow); + +const decodeStorageDate = (value: unknown): Date => + value instanceof Date ? value : new Date(Schema.decodeUnknownSync(Schema.String)(value)); + interface ChildRow { readonly id: string; readonly scope_id: string; @@ -277,21 +342,24 @@ const childRowsToValueMap = ( ): Record => { const out: Record = {}; for (const row of rows) { - const name = row.name as string; - if (row.kind === "secret" && typeof row.secret_id === "string") { - const prefix = row.secret_prefix as string | undefined | null; - out[name] = prefix - ? { secretId: row.secret_id, prefix } - : { secretId: row.secret_id }; - } else if (row.kind === "text" && typeof row.text_value === "string") { - out[name] = row.text_value; + const decoded = decodeChildStorageRowOption(row); + if (Option.isSome(decoded)) { + const child = decoded.value; + if (child.kind === "secret" && child.secret_id != null) { + out[child.name] = + child.secret_prefix != null + ? { secretId: child.secret_id, prefix: child.secret_prefix } + : { secretId: child.secret_id }; + } else if (child.kind === "text" && child.text_value != null) { + out[child.name] = child.text_value; + } } } return out; }; -const toJsonRecord = (value: unknown): Record => - value as Record; +// oxlint-disable-next-line executor/no-explicit-unknown-record -- boundary: storage adapter accepts JSON object columns +const toJsonRecord = (value: unknown): Record => value as Record; const toConfiguredHeaderBinding = (value: { readonly slot?: unknown; @@ -303,11 +371,10 @@ const toConfiguredHeaderBinding = (value: { ...(typeof value.prefix === "string" ? { prefix: value.prefix } : {}), }); -const decodeHeaders = (value: unknown): Record => { +const decodeHeaders = (value: unknown): Record => { if (value == null) return {}; - if (typeof value === "string") - return JSON.parse(value) as Record; - return value as Record; + if (typeof value === "string") return decodeUnknownRecordJson(value); + return decodeUnknownRecord(value); }; const slugifySlotPart = (value: string): string => @@ -344,20 +411,18 @@ const normalizeStoredHeaders = ( legacy[name] = header; continue; } - if ( - header && - typeof header === "object" && - "kind" in header && - (header as { kind?: unknown }).kind === "binding" - ) { - headers[name] = toConfiguredHeaderBinding(header); + const binding = decodeConfiguredHeaderBindingOption(header); + if (Option.isSome(binding)) { + headers[name] = toConfiguredHeaderBinding(binding.value); continue; } - legacy[name] = header; + const legacyHeader = decodeHeaderValueOption(header); + if (Option.isNone(legacyHeader)) continue; + legacy[name] = legacyHeader.value; headers[name] = new ConfiguredHeaderBinding({ kind: "binding", slot: headerBindingSlot(name), - prefix: header.prefix, + prefix: typeof legacyHeader.value === "string" ? undefined : legacyHeader.value.prefix, }); } return { headers, legacy }; @@ -370,13 +435,16 @@ const normalizeStoredOAuth2 = ( readonly legacy?: OAuth2Auth; } => { if (value == null) return {}; - const parsed = typeof value === "string" ? JSON.parse(value) : value; - if (parsed && typeof parsed === "object" && "connectionSlot" in parsed) { - return { - oauth2: Schema.decodeUnknownSync(OAuth2SourceConfig)(parsed), - }; + const sourceConfig = + typeof value === "string" + ? decodeOAuth2SourceConfigJsonOption(value) + : decodeOAuth2SourceConfigOption(value); + if (Option.isSome(sourceConfig)) { + return { oauth2: sourceConfig.value }; } - const legacy = decodeOAuth2(parsed); + const legacyOption = + typeof value === "string" ? decodeOAuth2JsonOption(value) : decodeOAuth2Option(value); + const legacy = Option.isSome(legacyOption) ? legacyOption.value : decodeOAuth2(value); return { legacy, oauth2: new OAuth2SourceConfig({ @@ -438,10 +506,7 @@ export interface OpenapiStore { scope: string, ) => Effect.Effect; - readonly listSources: () => Effect.Effect< - readonly StoredSource[], - StorageFailure - >; + readonly listSources: () => Effect.Effect; readonly getOperationByToolId: ( toolId: string, @@ -453,10 +518,7 @@ export interface OpenapiStore { scope: string, ) => Effect.Effect; - readonly removeSource: ( - namespace: string, - scope: string, - ) => Effect.Effect; + readonly removeSource: (namespace: string, scope: string) => Effect.Effect; readonly listSourceBindings: ( sourceId: string, @@ -501,10 +563,7 @@ export interface OpenapiStore { * `query_param:foo` or `spec_fetch_header:Authorization`. */ readonly findChildRowsBySecret: (secretId: string) => Effect.Effect< readonly { - readonly kind: - | "query_param" - | "spec_fetch_header" - | "spec_fetch_query_param"; + readonly kind: "query_param" | "spec_fetch_header" | "spec_fetch_query_param"; readonly source_id: string; readonly scope_id: string; readonly name: string; @@ -527,14 +586,12 @@ export const makeDefaultOpenapiStore = ({ adapter, scopes, }: StorageDeps): OpenapiStore => { - const scopeIds = scopes.map((scope) => scope.id as string); + const scopeIds = scopes.map((scope) => String(scope.id)); const scopePrecedence = new Map(); scopeIds.forEach((scope, index) => scopePrecedence.set(scope, index)); - const scopeRank = (scopeId: string): number => - scopePrecedence.get(scopeId) ?? Infinity; + const scopeRank = (scopeId: string): number => scopePrecedence.get(scopeId) ?? Infinity; - const encodeSyntheticRowIdPart = (value: string): string => - encodeURIComponent(value); + const encodeSyntheticRowIdPart = (value: string): string => encodeURIComponent(value); const sourceBindingRowId = ( sourceId: string, @@ -550,47 +607,47 @@ export const makeDefaultOpenapiStore = ({ encodeSyntheticRowIdPart(scopeId), ].join("::"); - const rowToSourceBindingValue = ( - row: Record, - ): OpenApiSourceBindingValue => { - const kind = row.kind as string; - if (kind === "secret" && typeof row.secret_id === "string") { - return { kind: "secret", secretId: SecretId.make(row.secret_id) }; + const rowToSourceBindingValue = (row: Record): OpenApiSourceBindingValue => { + const decoded = decodeSourceBindingStorageRow(row); + if (decoded.kind === "secret" && decoded.secret_id != null) { + return { kind: "secret", secretId: SecretId.make(decoded.secret_id) }; } - if (kind === "connection" && typeof row.connection_id === "string") { + if (decoded.kind === "connection" && decoded.connection_id != null) { return { kind: "connection", - connectionId: ConnectionId.make(row.connection_id), + connectionId: ConnectionId.make(decoded.connection_id), }; } // text fallback covers both well-formed text rows and any // partial/null row that survived a malformed write — `text_value` // defaults to "" so the type stays satisfied without a throw. - return { kind: "text", text: (row.text_value as string | null) ?? "" }; + return { kind: "text", text: decoded.text_value ?? "" }; }; - const rowToSourceBinding = ( - row: Record, - ): OpenApiSourceBindingRef => - new OpenApiSourceBindingRef({ - sourceId: row.source_id as string, - sourceScopeId: ScopeId.make(row.source_scope_id as string), - scopeId: ScopeId.make(row.target_scope_id as string), - slot: row.slot as string, + const rowToSourceBinding = (row: Record): OpenApiSourceBindingRef => { + const decoded = decodeSourceBindingStorageRow(row); + return new OpenApiSourceBindingRef({ + sourceId: decoded.source_id, + sourceScopeId: ScopeId.make(decoded.source_scope_id), + scopeId: ScopeId.make(decoded.target_scope_id), + slot: decoded.slot, value: rowToSourceBindingValue(row), - createdAt: - row.created_at instanceof Date - ? row.created_at - : new Date(row.created_at as string), - updatedAt: - row.updated_at instanceof Date - ? row.updated_at - : new Date(row.updated_at as string), + createdAt: decodeStorageDate(decoded.created_at), + updatedAt: decodeStorageDate(decoded.updated_at), }); + }; + + const sourceBindingTargetScope = (row: Record): string => + decodeSourceBindingStorageRow(row).target_scope_id; const sourceBindingValueColumns = ( value: OpenApiSourceBindingValue, - ): { kind: string; secret_id?: string; connection_id?: string; text_value?: string } => { + ): { + kind: string; + secret_id?: string; + connection_id?: string; + text_value?: string; + } => { if (value.kind === "secret") { return { kind: "secret", secret_id: value.secretId }; } @@ -606,24 +663,20 @@ export const makeDefaultOpenapiStore = ({ }) => Effect.gen(function* () { if (!scopeIds.includes(params.sourceScope)) { - return yield* Effect.fail( - new StorageError({ - message: - `OpenAPI source binding references source scope "${params.sourceScope}" ` + - `which is not in the executor's scope stack [${scopeIds.join(", ")}].`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: + `OpenAPI source binding references source scope "${params.sourceScope}" ` + + `which is not in the executor's scope stack [${scopeIds.join(", ")}].`, + cause: undefined, + }); } if (!scopeIds.includes(params.targetScope)) { - return yield* Effect.fail( - new StorageError({ - message: - `OpenAPI source binding targets scope "${params.targetScope}" which is not ` + - `in the executor's scope stack [${scopeIds.join(", ")}].`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: + `OpenAPI source binding targets scope "${params.targetScope}" which is not ` + + `in the executor's scope stack [${scopeIds.join(", ")}].`, + cause: undefined, + }); } }); @@ -645,23 +698,19 @@ export const makeDefaultOpenapiStore = ({ ], }); if (!source) { - return yield* Effect.fail( - new StorageError({ - message: `OpenAPI source "${params.sourceId}" does not exist at scope "${params.sourceScope}"`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: `OpenAPI source "${params.sourceId}" does not exist at scope "${params.sourceScope}"`, + cause: undefined, + }); } if (scopeRank(params.targetScope) > scopeRank(params.sourceScope)) { - return yield* Effect.fail( - new StorageError({ - message: - `OpenAPI source bindings for "${params.sourceId}" cannot be written at ` + - `outer scope "${params.targetScope}" because the base source lives at ` + - `"${params.sourceScope}"`, - cause: undefined, - }), - ); + return yield* new StorageError({ + message: + `OpenAPI source bindings for "${params.sourceId}" cannot be written at ` + + `outer scope "${params.targetScope}" because the base source lives at ` + + `"${params.sourceScope}"`, + cause: undefined, + }); } return source; }); @@ -684,20 +733,15 @@ export const makeDefaultOpenapiStore = ({ }) .pipe(Effect.map(childRowsToValueMap)); - const rowToSource = ( - row: Record, - ): Effect.Effect => + const rowToSource = (row: Record): Effect.Effect => Effect.gen(function* () { - const sourceId = row.id as string; - const scope = row.scope_id as string; - const normalizedHeaders = normalizeStoredHeaders(row.headers); - const normalizedOAuth2 = normalizeStoredOAuth2(row.oauth2); + const sourceRow = decodeSourceStorageRow(row); + const sourceId = sourceRow.id; + const scope = sourceRow.scope_id; + const normalizedHeaders = normalizeStoredHeaders(sourceRow.headers); + const normalizedOAuth2 = normalizeStoredOAuth2(sourceRow.oauth2); - const queryParams = yield* loadChildValueMap( - "openapi_source_query_param", - sourceId, - scope, - ); + const queryParams = yield* loadChildValueMap("openapi_source_query_param", sourceId, scope); const specFetchHeaders = yield* loadChildValueMap( "openapi_source_spec_fetch_header", sourceId, @@ -709,13 +753,10 @@ export const makeDefaultOpenapiStore = ({ scope, ); const specFetchCredentials: OpenApiSpecFetchCredentials | undefined = - Object.keys(specFetchHeaders).length === 0 && - Object.keys(specFetchQueryParams).length === 0 + Object.keys(specFetchHeaders).length === 0 && Object.keys(specFetchQueryParams).length === 0 ? undefined : { - ...(Object.keys(specFetchHeaders).length > 0 - ? { headers: specFetchHeaders } - : {}), + ...(Object.keys(specFetchHeaders).length > 0 ? { headers: specFetchHeaders } : {}), ...(Object.keys(specFetchQueryParams).length > 0 ? { queryParams: specFetchQueryParams } : {}), @@ -724,38 +765,40 @@ export const makeDefaultOpenapiStore = ({ return { namespace: sourceId, scope, - name: row.name as string, + name: sourceRow.name, config: { - spec: row.spec as string, - sourceUrl: (row.source_url as string | null | undefined) ?? undefined, - baseUrl: (row.base_url as string | null | undefined) ?? undefined, + spec: sourceRow.spec, + sourceUrl: sourceRow.source_url ?? undefined, + baseUrl: sourceRow.base_url ?? undefined, headers: normalizedHeaders.headers, queryParams, specFetchCredentials, oauth2: normalizedOAuth2.oauth2, }, legacy: - Object.keys(normalizedHeaders.legacy).length > 0 || - normalizedOAuth2.legacy + Object.keys(normalizedHeaders.legacy).length > 0 || normalizedOAuth2.legacy ? { ...(Object.keys(normalizedHeaders.legacy).length > 0 ? { headers: normalizedHeaders.legacy } : {}), - ...(normalizedOAuth2.legacy - ? { oauth2: normalizedOAuth2.legacy } - : {}), + ...(normalizedOAuth2.legacy ? { oauth2: normalizedOAuth2.legacy } : {}), } : undefined, }; }); - const rowToOperation = (row: Record): StoredOperation => ({ - toolId: row.id as string, - sourceId: row.source_id as string, - binding: decodeBinding( - typeof row.binding === "string" ? JSON.parse(row.binding) : row.binding, - ), - }); + const rowToOperation = (row: Record): StoredOperation => { + const operationRow = decodeOperationStorageRow(row); + return { + toolId: operationRow.id, + sourceId: operationRow.source_id, + binding: decodeBinding( + typeof operationRow.binding === "string" + ? decodeBindingJson(operationRow.binding) + : operationRow.binding, + ), + }; + }; // Replace the rows of one child table for a source: delete then bulk // insert. Single helper so upsertSource and updateSourceMeta both @@ -845,20 +888,18 @@ export const makeDefaultOpenapiStore = ({ source_url: input.config.sourceUrl ?? undefined, base_url: input.config.baseUrl ?? undefined, headers: Object.fromEntries( - Object.entries(input.config.headers ?? {}).map( - ([name, value]) => [ - name, - typeof value === "string" - ? value - : value.kind === "binding" - ? { - kind: value.kind, - slot: value.slot, - ...(value.prefix ? { prefix: value.prefix } : {}), - } - : value, - ], - ), + Object.entries(input.config.headers ?? {}).map(([name, value]) => [ + name, + typeof value === "string" + ? value + : value.kind === "binding" + ? { + kind: value.kind, + slot: value.slot, + ...(value.prefix ? { prefix: value.prefix } : {}), + } + : value, + ]), ) as Record, oauth2: input.config.oauth2 ? toJsonRecord(encodeOAuth2SourceConfig(input.config.oauth2)) @@ -911,14 +952,10 @@ export const makeDefaultOpenapiStore = ({ const existing = yield* rowToSource(existingRow); const nextName = patch.name?.trim() || existing.name; - const nextBaseUrl = - patch.baseUrl !== undefined ? patch.baseUrl : existing.config.baseUrl; + const nextBaseUrl = patch.baseUrl !== undefined ? patch.baseUrl : existing.config.baseUrl; const nextHeaders = - patch.headers !== undefined - ? patch.headers - : (existing.config.headers ?? {}); - const nextOAuth2 = - patch.oauth2 !== undefined ? patch.oauth2 : existing.config.oauth2; + patch.headers !== undefined ? patch.headers : (existing.config.headers ?? {}); + const nextOAuth2 = patch.oauth2 !== undefined ? patch.oauth2 : existing.config.oauth2; yield* adapter.update({ model: "openapi_source", @@ -941,9 +978,7 @@ export const makeDefaultOpenapiStore = ({ }, ]), ) as Record, - oauth2: nextOAuth2 - ? toJsonRecord(encodeOAuth2SourceConfig(nextOAuth2)) - : undefined, + oauth2: nextOAuth2 ? toJsonRecord(encodeOAuth2SourceConfig(nextOAuth2)) : undefined, }, }); if (patch.queryParams !== undefined) { @@ -999,8 +1034,7 @@ export const makeDefaultOpenapiStore = ({ }) .pipe(Effect.map((rows) => rows.map(rowToOperation))), - removeSource: (namespace, scope) => - deleteSource(namespace, scope, { includeBindings: true }), + removeSource: (namespace, scope) => deleteSource(namespace, scope, { includeBindings: true }), listSourceBindings: (sourceId, sourceScope) => Effect.gen(function* () { @@ -1017,14 +1051,10 @@ export const makeDefaultOpenapiStore = ({ ], }); return rows - .filter( - (row) => - scopeRank(row.target_scope_id as string) <= sourceScopeRank, - ) + .filter((row) => scopeRank(sourceBindingTargetScope(row)) <= sourceScopeRank) .sort( (a, b) => - scopeRank(a.target_scope_id as string) - - scopeRank(b.target_scope_id as string), + scopeRank(sourceBindingTargetScope(a)) - scopeRank(sourceBindingTargetScope(b)), ) .map(rowToSourceBinding); }), @@ -1045,31 +1075,24 @@ export const makeDefaultOpenapiStore = ({ }); const sourceScopeRank = scopeRank(sourceScope); const row = rows - .filter( - (candidate) => - scopeRank(candidate.target_scope_id as string) <= sourceScopeRank, - ) + .filter((candidate) => scopeRank(sourceBindingTargetScope(candidate)) <= sourceScopeRank) .sort( (a, b) => - scopeRank(a.target_scope_id as string) - - scopeRank(b.target_scope_id as string), + scopeRank(sourceBindingTargetScope(a)) - scopeRank(sourceBindingTargetScope(b)), )[0]; return row ? rowToSourceBinding(row) : null; }), setSourceBinding: (input) => Effect.gen(function* () { + const sourceScope = String(input.sourceScope); + const targetScope = String(input.scope); yield* validateBindingTarget({ sourceId: input.sourceId, - sourceScope: input.sourceScope as string, - targetScope: input.scope as string, + sourceScope, + targetScope, }); - const id = sourceBindingRowId( - input.sourceId, - input.sourceScope as string, - input.slot, - input.scope as string, - ); + const id = sourceBindingRowId(input.sourceId, sourceScope, input.slot, targetScope); const now = new Date(); const valueColumns = sourceBindingValueColumns(input.value); yield* adapter.delete({ @@ -1081,8 +1104,8 @@ export const makeDefaultOpenapiStore = ({ data: { id, source_id: input.sourceId, - source_scope_id: input.sourceScope as string, - target_scope_id: input.scope as string, + source_scope_id: sourceScope, + target_scope_id: targetScope, slot: input.slot, ...valueColumns, created_at: now, @@ -1139,8 +1162,14 @@ export const makeDefaultOpenapiStore = ({ Effect.gen(function* () { const tables = [ { model: "openapi_source_query_param" as const, kind: "query_param" as const }, - { model: "openapi_source_spec_fetch_header" as const, kind: "spec_fetch_header" as const }, - { model: "openapi_source_spec_fetch_query_param" as const, kind: "spec_fetch_query_param" as const }, + { + model: "openapi_source_spec_fetch_header" as const, + kind: "spec_fetch_header" as const, + }, + { + model: "openapi_source_spec_fetch_query_param" as const, + kind: "spec_fetch_query_param" as const, + }, ]; const perTable = yield* Effect.forEach( tables, @@ -1152,12 +1181,15 @@ export const makeDefaultOpenapiStore = ({ }) .pipe( Effect.map((rows) => - rows.map((r) => ({ - kind: t.kind, - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, - })), + rows.map((r) => { + const row = decodeChildUsageStorageRow(r); + return { + kind: t.kind, + source_id: row.source_id, + scope_id: row.scope_id, + name: row.name, + }; + }), ), ), { concurrency: "unbounded" }, @@ -1172,8 +1204,9 @@ export const makeDefaultOpenapiStore = ({ const requested = new Set(keys); const out = new Map(); for (const r of rows) { - const key = `${r.scope_id as string}:${r.id as string}`; - if (requested.has(key)) out.set(key, r.name as string); + const row = decodeSourceNameStorageRow(r); + const key = `${row.scope_id}:${row.id}`; + if (requested.has(key)) out.set(key, row.name); } return out; }), From ed17ac4e671ffe0e76bb69fd5655066e818f38e0 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:47:55 -0700 Subject: [PATCH 078/108] Fix Google Discovery boundary lint --- .../google-discovery/src/sdk/binding-store.ts | 110 +++---- .../google-discovery/src/sdk/plugin.ts | 302 ++++++++---------- 2 files changed, 179 insertions(+), 233 deletions(-) diff --git a/packages/plugins/google-discovery/src/sdk/binding-store.ts b/packages/plugins/google-discovery/src/sdk/binding-store.ts index 3d1c35db9..0a096a9f5 100644 --- a/packages/plugins/google-discovery/src/sdk/binding-store.ts +++ b/packages/plugins/google-discovery/src/sdk/binding-store.ts @@ -13,13 +13,9 @@ // survive adapter serialization. // --------------------------------------------------------------------------- -import { Effect, Schema } from "effect"; +import { Effect, Option, Schema } from "effect"; -import { - defineSchema, - type StorageDeps, - type StorageFailure, -} from "@executor-js/sdk/core"; +import { defineSchema, type StorageDeps, type StorageFailure } from "@executor-js/sdk/core"; import { GoogleDiscoveryMethodBinding, @@ -135,15 +131,14 @@ const encodeBinding = Schema.encodeSync(GoogleDiscoveryMethodBinding); const decodeBinding = Schema.decodeUnknownSync(GoogleDiscoveryMethodBinding); const toJsonRecord = (value: unknown): Record => value as Record; +const decodeString = Schema.decodeUnknownSync(Schema.String); +const decodeJsonObject = Schema.decodeUnknownSync(Schema.Record(Schema.String, Schema.Unknown)); +const decodeJsonString = Schema.decodeUnknownOption(Schema.fromJsonString(Schema.Unknown)); const decodeJson = (value: unknown): unknown => { if (value === null || value === undefined) return value; if (typeof value !== "string") return value; - try { - return JSON.parse(value); - } catch { - return value; - } + return Option.getOrElse(decodeJsonString(value), () => value); }; // --- auth column packing/unpacking ------------------------------------------ @@ -239,12 +234,10 @@ const rowsToValueMap = ( ): Record => { const out: Record = {}; for (const row of rows) { - const name = row.name as string; + const name = decodeString(row.name); if (row.kind === "secret" && typeof row.secret_id === "string") { const prefix = row.secret_prefix as string | undefined | null; - out[name] = prefix - ? { secretId: row.secret_id, prefix } - : { secretId: row.secret_id }; + out[name] = prefix ? { secretId: row.secret_id, prefix } : { secretId: row.secret_id }; } else if (row.kind === "text" && typeof row.text_value === "string") { out[name] = row.text_value; } @@ -320,9 +313,7 @@ export interface GoogleDiscoveryStore { /** Source rows whose oauth2 auth columns reference the given secret id. * `slot` distinguishes client_id vs client_secret. */ - readonly findSourcesBySecret: ( - secretId: string, - ) => Effect.Effect< + readonly findSourcesBySecret: (secretId: string) => Effect.Effect< readonly { readonly namespace: string; readonly scope_id: string; @@ -333,9 +324,7 @@ export interface GoogleDiscoveryStore { >; /** Source rows whose oauth2 auth points at the given connection id. */ - readonly findSourcesByConnection: ( - connectionId: string, - ) => Effect.Effect< + readonly findSourcesByConnection: (connectionId: string) => Effect.Effect< readonly { readonly namespace: string; readonly scope_id: string; @@ -382,7 +371,7 @@ export const makeGoogleDiscoveryStore = ( }); if (!row) return null; const decoded = decodeBinding(decodeJson(row.binding)); - return { namespace: row.source_id as string, binding: decoded }; + return { namespace: decodeString(row.source_id), binding: decoded }; }), putBinding: (toolId, sourceId, scope, binding) => @@ -420,7 +409,7 @@ export const makeGoogleDiscoveryStore = ( { field: "scope_id", value: scope }, ], }); - const ids = rows.map((r) => r.id as string); + const ids = rows.map((r) => decodeString(r.id)); yield* db.deleteMany({ model: "google_discovery_binding", where: [ @@ -442,7 +431,7 @@ export const makeGoogleDiscoveryStore = ( }); const out = new Map(); for (const row of rows) { - out.set(row.id as string, decodeBinding(decodeJson(row.binding))); + out.set(decodeString(row.id), decodeBinding(decodeJson(row.binding))); } return out; }), @@ -462,7 +451,7 @@ export const makeGoogleDiscoveryStore = ( yield* deleteSourceChildren(source.namespace, source.scope); const encoded = stripExtractedFields( - encodeStoredSourceData(source.config) as Record, + decodeJsonObject(encodeStoredSourceData(source.config)), ); yield* db.create({ model: "google_discovery_source", @@ -477,11 +466,7 @@ export const makeGoogleDiscoveryStore = ( }, forceAllowId: true, }); - yield* writeCredentialRows( - source.namespace, - source.scope, - source.config.credentials, - ); + yield* writeCredentialRows(source.namespace, source.scope, source.config.credentials); }), updateSourceMeta: (sourceId, scope, update) => @@ -502,7 +487,7 @@ export const makeGoogleDiscoveryStore = ( { field: "scope_id", value: scope }, ], update: { - name: update.name ?? (row.name as string), + name: update.name ?? decodeString(row.name), updated_at: new Date(), ...authToColumns(auth), }, @@ -532,9 +517,9 @@ export const makeGoogleDiscoveryStore = ( }); if (!row) return null; return { - namespace: row.id as string, - scope: row.scope_id as string, - name: row.name as string, + namespace: decodeString(row.id), + scope: decodeString(row.scope_id), + name: decodeString(row.name), config: yield* hydrateStoredSourceData(row, sourceId, scope), }; }), @@ -558,15 +543,11 @@ export const makeGoogleDiscoveryStore = ( [ db.findMany({ model: "google_discovery_source", - where: [ - { field: "auth_client_id_secret_id", value: secretId }, - ], + where: [{ field: "auth_client_id_secret_id", value: secretId }], }), db.findMany({ model: "google_discovery_source", - where: [ - { field: "auth_client_secret_secret_id", value: secretId }, - ], + where: [{ field: "auth_client_secret_secret_id", value: secretId }], }), ], { concurrency: "unbounded" }, @@ -579,17 +560,17 @@ export const makeGoogleDiscoveryStore = ( }[] = []; for (const r of byClientId) { out.push({ - namespace: r.id as string, - scope_id: r.scope_id as string, - name: r.name as string, + namespace: decodeString(r.id), + scope_id: decodeString(r.scope_id), + name: decodeString(r.name), slot: "auth.oauth2.client_id", }); } for (const r of byClientSecret) { out.push({ - namespace: r.id as string, - scope_id: r.scope_id as string, - name: r.name as string, + namespace: decodeString(r.id), + scope_id: decodeString(r.scope_id), + name: decodeString(r.name), slot: "auth.oauth2.client_secret", }); } @@ -605,9 +586,9 @@ export const makeGoogleDiscoveryStore = ( .pipe( Effect.map((rows) => rows.map((r) => ({ - namespace: r.id as string, - scope_id: r.scope_id as string, - name: r.name as string, + namespace: decodeString(r.id), + scope_id: decodeString(r.scope_id), + name: decodeString(r.name), slot: "auth.oauth2.connection", })), ), @@ -631,15 +612,15 @@ export const makeGoogleDiscoveryStore = ( return [ ...headers.map((r) => ({ kind: "credential_header" as const, - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, + source_id: decodeString(r.source_id), + scope_id: decodeString(r.scope_id), + name: decodeString(r.name), })), ...params.map((r) => ({ kind: "credential_query_param" as const, - source_id: r.source_id as string, - scope_id: r.scope_id as string, - name: r.name as string, + source_id: decodeString(r.source_id), + scope_id: decodeString(r.scope_id), + name: decodeString(r.name), })), ]; }), @@ -651,8 +632,8 @@ export const makeGoogleDiscoveryStore = ( const requested = new Set(keys); const out = new Map(); for (const r of rows) { - const key = `${r.scope_id as string}:${r.id as string}`; - if (requested.has(key)) out.set(key, r.name as string); + const key = `${decodeString(r.scope_id)}:${decodeString(r.id)}`; + if (requested.has(key)) out.set(key, decodeString(r.name)); } return out; }), @@ -698,11 +679,7 @@ export const makeGoogleDiscoveryStore = ( forceAllowId: true, }); } - const paramRows = valueMapToRows( - sourceId, - scope, - credentials.queryParams, - ); + const paramRows = valueMapToRows(sourceId, scope, credentials.queryParams); if (paramRows.length > 0) { yield* db.createMany({ model: "google_discovery_source_credential_query_param", @@ -719,7 +696,7 @@ export const makeGoogleDiscoveryStore = ( scope: string, ): Effect.Effect { return Effect.gen(function* () { - const partial = decodeJson(row.config) as Record; + const partial = decodeJsonObject(decodeJson(row.config)); const headerRows = yield* db.findMany({ model: "google_discovery_source_credential_header", where: [ @@ -737,8 +714,7 @@ export const makeGoogleDiscoveryStore = ( const headers = rowsToValueMap(headerRows); const queryParams = rowsToValueMap(paramRows); const credentials = - Object.keys(headers).length === 0 && - Object.keys(queryParams).length === 0 + Object.keys(headers).length === 0 && Object.keys(queryParams).length === 0 ? undefined : { ...(Object.keys(headers).length > 0 ? { headers } : {}), @@ -757,9 +733,7 @@ export const makeGoogleDiscoveryStore = ( // Strip auth/credentials from the encoded source-data shape. Those // moved to columns and child tables; the remaining structural fields // live in the `config` JSON. -const stripExtractedFields = ( - encoded: Record, -): Record => { +const stripExtractedFields = (encoded: Record): Record => { const { auth, credentials, ...rest } = encoded; void auth; void credentials; diff --git a/packages/plugins/google-discovery/src/sdk/plugin.ts b/packages/plugins/google-discovery/src/sdk/plugin.ts index 268757289..94df218e3 100644 --- a/packages/plugins/google-discovery/src/sdk/plugin.ts +++ b/packages/plugins/google-discovery/src/sdk/plugin.ts @@ -1,4 +1,4 @@ -import { Effect, Option } from "effect"; +import { Effect, Option, Predicate, Schema } from "effect"; import { ScopeId, @@ -12,16 +12,12 @@ import { } from "@executor-js/sdk/core"; import { GoogleDiscoveryGroup } from "../api/group"; -import { - GoogleDiscoveryExtensionService, - GoogleDiscoveryHandlers, -} from "../api/handlers"; +import { GoogleDiscoveryExtensionService, GoogleDiscoveryHandlers } from "../api/handlers"; import { googleDiscoverySchema, makeGoogleDiscoveryStore, type GoogleDiscoveryStore, - type GoogleDiscoveryStoredSource, } from "./binding-store"; import { extractGoogleDiscoveryManifest } from "./document"; import { annotationsForOperation, invokeGoogleDiscoveryTool } from "./invoke"; @@ -86,46 +82,20 @@ export type GoogleDiscoveryExtensionFailure = | GoogleDiscoverySourceError | StorageFailure; -export interface GoogleDiscoveryPluginExtension { - readonly probeDiscovery: ( - input: string | GoogleDiscoveryProbeInput, - ) => Effect.Effect< - GoogleDiscoveryProbeResult, - GoogleDiscoveryParseError | GoogleDiscoverySourceError - >; - readonly addSource: ( - input: GoogleDiscoveryAddSourceInput, - ) => Effect.Effect< - { readonly toolCount: number; readonly namespace: string }, - GoogleDiscoveryParseError | GoogleDiscoverySourceError | StorageFailure - >; - readonly removeSource: (namespace: string, scope: string) => Effect.Effect; - readonly getSource: ( - namespace: string, - scope: string, - ) => Effect.Effect; - readonly updateSource: ( - namespace: string, - scope: string, - input: GoogleDiscoveryUpdateSourceInput, - ) => Effect.Effect; -} - // --------------------------------------------------------------------------- // URL normalization + slug helpers (unchanged) // --------------------------------------------------------------------------- const DISCOVERY_SERVICE_HOST = "https://www.googleapis.com/discovery/v1/apis"; +const decodeString = Schema.decodeUnknownSync(Schema.String); +const isGoogleDiscoverySourceError = (error: unknown): error is GoogleDiscoverySourceError => + Predicate.isTagged("GoogleDiscoverySourceError")(error); const normalizeDiscoveryUrl = (discoveryUrl: string): string => { const trimmed = discoveryUrl.trim(); if (trimmed.length === 0) return trimmed; - let parsed: URL; - try { - parsed = new URL(trimmed); - } catch { - return trimmed; - } + if (!URL.canParse(trimmed)) return trimmed; + const parsed = new URL(trimmed); if (parsed.pathname !== "/$discovery/rest") return trimmed; const version = parsed.searchParams.get("version")?.trim(); if (!version) return trimmed; @@ -164,7 +134,7 @@ const resolveGoogleDiscoveryCredentials = ( }), }).pipe( Effect.mapError((err) => - err instanceof GoogleDiscoverySourceError + isGoogleDiscoverySourceError(err) ? err : new GoogleDiscoverySourceError({ message: "Secret resolution failed" }), ), @@ -182,7 +152,7 @@ const resolveGoogleDiscoveryCredentials = ( }), }).pipe( Effect.mapError((err) => - err instanceof GoogleDiscoverySourceError + isGoogleDiscoverySourceError(err) ? err : new GoogleDiscoverySourceError({ message: "Secret resolution failed" }), ), @@ -200,29 +170,35 @@ const fetchDiscoveryDocument = ( readonly queryParams?: Record; }, ) => - Effect.tryPromise({ - try: async () => { - const url = new URL(normalizeDiscoveryUrl(discoveryUrl)); - for (const [key, value] of Object.entries(credentials?.queryParams ?? {})) { - url.searchParams.set(key, value); - } - const response = await fetch(url.toString(), { - headers: credentials?.headers, - signal: AbortSignal.timeout(20_000), - }); - if (!response.ok) { - throw new GoogleDiscoverySourceError({ - message: `Google Discovery fetch failed with status ${response.status}`, + Effect.gen(function* () { + const response = yield* Effect.tryPromise({ + try: () => { + const url = new URL(normalizeDiscoveryUrl(discoveryUrl)); + for (const [key, value] of Object.entries(credentials?.queryParams ?? {})) { + url.searchParams.set(key, value); + } + return fetch(url.toString(), { + headers: credentials?.headers, + signal: AbortSignal.timeout(20_000), }); - } - return response.text(); - }, - catch: (cause) => - cause instanceof GoogleDiscoverySourceError - ? cause - : new GoogleDiscoverySourceError({ - message: cause instanceof Error ? cause.message : String(cause), - }), + }, + catch: () => + new GoogleDiscoverySourceError({ + message: "Google Discovery fetch failed", + }), + }); + if (!response.ok) { + return yield* new GoogleDiscoverySourceError({ + message: `Google Discovery fetch failed with status ${response.status}`, + }); + } + return yield* Effect.tryPromise({ + try: () => response.text(), + catch: () => + new GoogleDiscoverySourceError({ + message: "Google Discovery response body read failed", + }), + }); }); const normalizeSlug = (value: string): string => @@ -302,6 +278,97 @@ const registerManifest = ( return manifest.methods.length; }); +const makeGoogleDiscoveryPluginExtension = (ctx: PluginCtx) => ({ + probeDiscovery: (input: string | GoogleDiscoveryProbeInput) => + Effect.gen(function* () { + const discoveryUrl = typeof input === "string" ? input : input.discoveryUrl; + const credentials = + typeof input === "string" + ? undefined + : yield* resolveGoogleDiscoveryCredentials(input.credentials, ctx); + const text = yield* fetchDiscoveryDocument(discoveryUrl, credentials); + const manifest = yield* extractGoogleDiscoveryManifest(text); + const scopes = Object.keys( + Option.isSome(manifest.oauthScopes) ? manifest.oauthScopes.value : {}, + ).sort(); + const operations = manifest.methods.map((method) => ({ + toolPath: method.toolPath, + method: method.binding.method, + pathTemplate: method.binding.pathTemplate, + description: Option.isSome(method.description) ? method.description.value : null, + })); + return { + name: Option.isSome(manifest.title) + ? manifest.title.value + : `${manifest.service} ${manifest.version}`, + title: Option.isSome(manifest.title) ? manifest.title.value : null, + service: manifest.service, + version: manifest.version, + toolCount: manifest.methods.length, + scopes, + operations, + }; + }), + + addSource: (input: GoogleDiscoveryAddSourceInput) => + ctx.transaction( + Effect.gen(function* () { + const credentials = yield* resolveGoogleDiscoveryCredentials(input.credentials, ctx); + const text = yield* fetchDiscoveryDocument(input.discoveryUrl, credentials); + const manifest = yield* extractGoogleDiscoveryManifest(text); + const namespace = + input.namespace ?? + deriveNamespace({ + name: input.name, + service: manifest.service, + version: manifest.version, + }); + const sourceData = new GoogleDiscoveryStoredSourceDataSchema({ + name: input.name, + discoveryUrl: normalizeDiscoveryUrl(input.discoveryUrl), + credentials: input.credentials, + service: manifest.service, + version: manifest.version, + rootUrl: manifest.rootUrl, + servicePath: manifest.servicePath, + auth: input.auth, + }); + const toolCount = yield* registerManifest( + ctx, + namespace, + input.scope, + manifest, + sourceData, + ); + return { toolCount, namespace }; + }), + ), + + removeSource: (namespace: string, scope: string) => + ctx.transaction( + Effect.gen(function* () { + yield* ctx.storage.removeBindingsBySource(namespace, scope); + yield* ctx.storage.removeSource(namespace, scope); + yield* ctx.core.sources.unregister(namespace).pipe(Effect.ignore); + }), + ), + + // OAuth start/complete live on `ctx.oauth` now — the UI calls + // the shared `/scopes/:scopeId/oauth/*` endpoints directly with a + // Google-specific `authorization-code` strategy and writes the + // resulting connection back via `updateSource`. + + getSource: (namespace: string, scope: string) => ctx.storage.getSource(namespace, scope), + + updateSource: (namespace: string, scope: string, input: GoogleDiscoveryUpdateSourceInput) => + ctx.storage.updateSourceMeta(namespace, scope, { + name: input.name?.trim() || undefined, + auth: input.auth, + }), +}); + +export type GoogleDiscoveryPluginExtension = ReturnType; + // --------------------------------------------------------------------------- // Plugin // --------------------------------------------------------------------------- @@ -312,101 +379,13 @@ export const googleDiscoveryPlugin = definePlugin(() => ({ schema: googleDiscoverySchema, storage: (deps) => makeGoogleDiscoveryStore(deps), - extension: (ctx) => - ({ - probeDiscovery: (input) => - Effect.gen(function* () { - const discoveryUrl = typeof input === "string" ? input : input.discoveryUrl; - const credentials = - typeof input === "string" - ? undefined - : yield* resolveGoogleDiscoveryCredentials(input.credentials, ctx); - const text = yield* fetchDiscoveryDocument(discoveryUrl, credentials); - const manifest = yield* extractGoogleDiscoveryManifest(text); - const scopes = Object.keys( - Option.isSome(manifest.oauthScopes) ? manifest.oauthScopes.value : {}, - ).sort(); - const operations = manifest.methods.map((method) => ({ - toolPath: method.toolPath, - method: method.binding.method, - pathTemplate: method.binding.pathTemplate, - description: Option.isSome(method.description) ? method.description.value : null, - })); - return { - name: Option.isSome(manifest.title) - ? manifest.title.value - : `${manifest.service} ${manifest.version}`, - title: Option.isSome(manifest.title) ? manifest.title.value : null, - service: manifest.service, - version: manifest.version, - toolCount: manifest.methods.length, - scopes, - operations, - }; - }), - - addSource: (input) => - ctx.transaction( - Effect.gen(function* () { - const credentials = yield* resolveGoogleDiscoveryCredentials(input.credentials, ctx); - const text = yield* fetchDiscoveryDocument(input.discoveryUrl, credentials); - const manifest = yield* extractGoogleDiscoveryManifest(text); - const namespace = - input.namespace ?? - deriveNamespace({ - name: input.name, - service: manifest.service, - version: manifest.version, - }); - const sourceData = new GoogleDiscoveryStoredSourceDataSchema({ - name: input.name, - discoveryUrl: normalizeDiscoveryUrl(input.discoveryUrl), - credentials: input.credentials, - service: manifest.service, - version: manifest.version, - rootUrl: manifest.rootUrl, - servicePath: manifest.servicePath, - auth: input.auth, - }); - const toolCount = yield* registerManifest( - ctx, - namespace, - input.scope, - manifest, - sourceData, - ); - return { toolCount, namespace }; - }), - ), - - removeSource: (namespace, scope) => - ctx.transaction( - Effect.gen(function* () { - yield* ctx.storage.removeBindingsBySource(namespace, scope); - yield* ctx.storage.removeSource(namespace, scope); - yield* ctx.core.sources.unregister(namespace).pipe(Effect.ignore); - }), - ), - - // OAuth start/complete live on `ctx.oauth` now — the UI calls - // the shared `/scopes/:scopeId/oauth/*` endpoints directly with a - // Google-specific `authorization-code` strategy and writes the - // resulting connection back via `updateSource`. - - getSource: (namespace, scope) => ctx.storage.getSource(namespace, scope), - - updateSource: (namespace, scope, input) => - ctx.storage.updateSourceMeta(namespace, scope, { - name: input.name?.trim() || undefined, - auth: input.auth, - }), - }) satisfies GoogleDiscoveryPluginExtension, + extension: makeGoogleDiscoveryPluginExtension, invokeTool: ({ ctx, toolRow, args }) => invokeGoogleDiscoveryTool({ ctx: ctx as PluginCtx, toolId: toolRow.id, - toolScope: toolRow.scope_id as string, + toolScope: decodeString(toolRow.scope_id), args, }), @@ -414,7 +393,7 @@ export const googleDiscoveryPlugin = definePlugin(() => ({ Effect.gen(function* () { const typedCtx = ctx as PluginCtx; const scopes = new Set(); - for (const row of toolRows) scopes.add(row.scope_id as string); + for (const row of toolRows) scopes.add(decodeString(row.scope_id)); const byScope = new Map>(); for (const scope of scopes) { const bindings = yield* typedCtx.storage.getBindingsForSource(sourceId, scope); @@ -422,7 +401,7 @@ export const googleDiscoveryPlugin = definePlugin(() => ({ } const out: Record = {}; for (const row of toolRows) { - const binding = byScope.get(row.scope_id as string)?.get(row.id); + const binding = byScope.get(decodeString(row.scope_id))?.get(row.id); if (binding) { out[row.id] = annotationsForOperation(binding.method, binding.pathTemplate); } @@ -443,16 +422,11 @@ export const googleDiscoveryPlugin = definePlugin(() => ({ usagesForSecret: ({ ctx, args }) => Effect.gen(function* () { const typedCtx = ctx as PluginCtx; - const sources = yield* typedCtx.storage.findSourcesBySecret( - args.secretId, - ); - const childRows = yield* typedCtx.storage.findCredentialRowsBySecret( - args.secretId, - ); + const sources = yield* typedCtx.storage.findSourcesBySecret(args.secretId); + const childRows = yield* typedCtx.storage.findCredentialRowsBySecret(args.secretId); const sourceKeys = new Set(); for (const s of sources) sourceKeys.add(`${s.scope_id}:${s.namespace}`); - for (const r of childRows) - sourceKeys.add(`${r.scope_id}:${r.source_id}`); + for (const r of childRows) sourceKeys.add(`${r.scope_id}:${r.source_id}`); const names = yield* typedCtx.storage.lookupSourceNames([...sourceKeys]); const out: Usage[] = []; @@ -486,9 +460,7 @@ export const googleDiscoveryPlugin = definePlugin(() => ({ usagesForConnection: ({ ctx, args }) => Effect.gen(function* () { const typedCtx = ctx as PluginCtx; - const sources = yield* typedCtx.storage.findSourcesByConnection( - args.connectionId, - ); + const sources = yield* typedCtx.storage.findSourcesByConnection(args.connectionId); return sources.map( (s) => new Usage({ @@ -563,7 +535,7 @@ export const googleDiscoveryPlugin = definePlugin(() => ({ servicePath: manifest.servicePath, }); yield* registerManifest(typedCtx, sourceId, scope, manifest, next); - }).pipe(Effect.mapError((err) => (err instanceof Error ? err : new Error(String(err))))), + }), // Connection refresh is owned by the canonical `"oauth2"` // ConnectionProvider registered by core — no plugin-specific handler From 9a04145bf91b6e6456308600075e555545df7f68 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:48:15 -0700 Subject: [PATCH 079/108] Fix MCP plugin boundary lint --- packages/plugins/mcp/src/sdk/plugin.ts | 211 ++++++++++++++----------- 1 file changed, 121 insertions(+), 90 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/plugin.ts b/packages/plugins/mcp/src/sdk/plugin.ts index 6b227dd19..ec0f7d0ec 100644 --- a/packages/plugins/mcp/src/sdk/plugin.ts +++ b/packages/plugins/mcp/src/sdk/plugin.ts @@ -1,4 +1,4 @@ -import { Duration, Effect, Exit, Result, Scope, ScopedCache } from "effect"; +import { Duration, Effect, Exit, Option, Predicate, Result, Scope, ScopedCache } from "effect"; import type { OAuthClientProvider } from "@modelcontextprotocol/sdk/client/auth.js"; @@ -24,7 +24,7 @@ import { } from "./binding-store"; import { createMcpConnector, type ConnectorInput, type McpConnection } from "./connection"; import { discoverTools } from "./discover"; -import { McpConnectionError, McpToolDiscoveryError } from "./errors"; +import { McpConnectionError, McpInvocationError, McpToolDiscoveryError } from "./errors"; import { invokeMcpTool } from "./invoke"; import { deriveMcpNamespace, type McpToolManifest, type McpToolManifestEntry } from "./manifest"; import { probeMcpEndpointShape } from "./probe-shape"; @@ -182,22 +182,18 @@ const makeOAuthProvider = (accessToken: string): OAuthClientProvider => ({ tokens: () => ({ access_token: accessToken, token_type: "Bearer" }), saveTokens: () => undefined, redirectToAuthorization: async () => { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: MCP SDK OAuthClientProvider callback can only signal reauthorization by throwing throw new Error("MCP OAuth re-authorization required"); }, saveCodeVerifier: () => undefined, codeVerifier: () => { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: MCP SDK OAuthClientProvider callback requires a thrown verifier failure throw new Error("No active PKCE verifier"); }, saveDiscoveryState: () => undefined, discoveryState: () => undefined, }); -const remoteConnectionError = (message: string) => - new McpConnectionError({ transport: "remote", message }); - -const mcpDiscoveryError = (message: string) => - new McpToolDiscoveryError({ stage: "list_tools", message }); - const resolveSecretBackedMap = ( values: Record | undefined, ctx: PluginCtx, @@ -206,15 +202,21 @@ const resolveSecretBackedMap = ( values, getSecret: ctx.secrets.get, onMissing: (_name, value) => - remoteConnectionError(`Failed to resolve secret "${value.secretId}"`), + new McpConnectionError({ + transport: "remote", + message: `Failed to resolve secret "${value.secretId}"`, + }), onError: (err, _name, value) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" - ? remoteConnectionError(`Failed to resolve secret "${value.secretId}"`) + Predicate.isTagged("SecretOwnedByConnectionError")(err) + ? new McpConnectionError({ + transport: "remote", + message: `Failed to resolve secret "${value.secretId}"`, + }) : err, }).pipe( Effect.mapError((err) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" - ? remoteConnectionError("Failed to resolve secret") + Predicate.isTagged("SecretOwnedByConnectionError")(err) + ? new McpConnectionError({ transport: "remote", message: "Failed to resolve secret" }) : err, ), ); @@ -265,19 +267,21 @@ const resolveConnectorInput = ( const auth = sd.auth; if (auth.kind === "header") { - const val = yield* ctx.secrets - .get(auth.secretId) - .pipe( - Effect.mapError((err) => - "_tag" in err && err._tag === "SecretOwnedByConnectionError" - ? remoteConnectionError(`Failed to resolve secret "${auth.secretId}"`) - : err, - ), - ); + const val = yield* ctx.secrets.get(auth.secretId).pipe( + Effect.mapError((err) => + Predicate.isTagged("SecretOwnedByConnectionError")(err) + ? new McpConnectionError({ + transport: "remote", + message: `Failed to resolve secret "${auth.secretId}"`, + }) + : err, + ), + ); if (val === null) { - return yield* Effect.fail( - remoteConnectionError(`Failed to resolve secret "${auth.secretId}"`), - ); + return yield* new McpConnectionError({ + transport: "remote", + message: `Failed to resolve secret "${auth.secretId}"`, + }); } headers[auth.headerName] = auth.prefix ? `${auth.prefix}${val}` : val; } else if (auth.kind === "oauth2") { @@ -286,17 +290,15 @@ const resolveConnectorInput = ( // The canonical `"oauth2"` ConnectionProvider registered by // core owns the refresh lifecycle; we just wrap the current // token for the SDK's transport. - const accessToken = yield* ctx.connections - .accessToken(auth.connectionId) - .pipe( - Effect.mapError((err) => - remoteConnectionError( - `Failed to resolve OAuth connection "${auth.connectionId}": ${ - "message" in err ? (err as { message: string }).message : String(err) - }`, - ), - ), - ); + const accessToken = yield* ctx.connections.accessToken(auth.connectionId).pipe( + Effect.mapError( + ({ message }) => + new McpConnectionError({ + transport: "remote", + message: `Failed to resolve OAuth connection "${auth.connectionId}": ${message}`, + }), + ), + ); authProvider = makeOAuthProvider(accessToken); } @@ -342,7 +344,17 @@ const makeRuntime = (): Effect.Effect => } return connector; }), - (connection) => Effect.promise(() => connection.close().catch(() => {})), + (connection) => + Effect.ignore( + Effect.tryPromise({ + try: () => connection.close(), + catch: () => + new McpConnectionError({ + transport: "auto", + message: "Failed to close MCP connection", + }), + }), + ), ), capacity: 64, timeToLive: Duration.minutes(5), @@ -455,15 +467,16 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { const endpoint = typeof input === "string" ? input : input.endpoint; const trimmed = endpoint.trim(); if (!trimmed) { - return yield* Effect.fail(remoteConnectionError("Endpoint URL is required")); + return yield* new McpConnectionError({ + transport: "remote", + message: "Endpoint URL is required", + }); } const name = yield* Effect.try({ try: () => new URL(trimmed).hostname, catch: () => "mcp", - }).pipe( - Effect.orElseSucceed(() => "mcp"), - ); + }).pipe(Effect.orElseSucceed(() => "mcp")); const namespace = deriveMcpNamespace({ endpoint: trimmed }); const probeHeaders = @@ -510,13 +523,13 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { queryParams: probeQueryParams, }); if (shape.kind !== "mcp") { - return yield* Effect.fail( - remoteConnectionError( + return yield* new McpConnectionError({ + transport: "remote", + message: shape.kind === "not-mcp" ? `Endpoint does not look like an MCP server: ${shape.reason}` : `Could not reach endpoint: ${shape.reason}`, - ), - ); + }); } const probeResult = yield* ctx.oauth @@ -542,9 +555,10 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { } satisfies McpProbeResult; } - return yield* Effect.fail( - remoteConnectionError("MCP server requires authentication but OAuth discovery failed"), - ); + return yield* new McpConnectionError({ + transport: "remote", + message: "MCP server requires authentication but OAuth discovery failed", + }); }).pipe( Effect.withSpan("mcp.plugin.probe_endpoint", { attributes: { "mcp.endpoint": typeof input === "string" ? input : input.endpoint }, @@ -586,22 +600,24 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { const discovery: Result.Result< McpToolManifest, McpToolDiscoveryError | McpConnectionError | StorageFailure - > = - Result.isSuccess(resolved) - ? yield* discoverTools(createMcpConnector(resolved.success)).pipe( - Effect.mapError((err) => - mcpDiscoveryError(`MCP discovery failed: ${err.message}`), - ), - Effect.result, - Effect.withSpan("mcp.plugin.discover_tools", { - attributes: { "mcp.source.namespace": namespace }, - }), - ) - : Result.fail(resolved.failure); - const manifest = - Result.isSuccess(discovery) - ? discovery.success - : { server: undefined, tools: [] as const }; + > = Result.isSuccess(resolved) + ? yield* discoverTools(createMcpConnector(resolved.success)).pipe( + Effect.mapError( + ({ message }) => + new McpToolDiscoveryError({ + stage: "list_tools", + message: `MCP discovery failed: ${message}`, + }), + ), + Effect.result, + Effect.withSpan("mcp.plugin.discover_tools", { + attributes: { "mcp.source.namespace": namespace }, + }), + ) + : Result.fail(resolved.failure); + const manifest = Result.isSuccess(discovery) + ? discovery.success + : { server: undefined, tools: [] as const }; const sourceName = config.name ?? manifest.server?.name ?? namespace; @@ -706,9 +722,10 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { }), ); if (!sd) { - return yield* Effect.fail( - remoteConnectionError(`No stored config for MCP source "${namespace}"`), - ); + return yield* new McpConnectionError({ + transport: "remote", + message: `No stored config for MCP source "${namespace}"`, + }); } const ci = yield* resolveConnectorInput(sd, ctx, allowStdio).pipe( @@ -720,7 +737,13 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { }), ); const manifest = yield* discoverTools(createMcpConnector(ci)).pipe( - Effect.mapError((err) => mcpDiscoveryError(`MCP refresh failed: ${err.message}`)), + Effect.mapError( + ({ message }) => + new McpToolDiscoveryError({ + stage: "list_tools", + message: `MCP refresh failed: ${message}`, + }), + ), Effect.withSpan("mcp.plugin.discover_tools", { attributes: { "mcp.source.namespace": namespace }, }), @@ -817,7 +840,7 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { refreshSource, getSource, updateSource, - } satisfies McpPluginExtension; + }; }, invokeTool: ({ ctx, toolRow, args, elicit }) => @@ -829,14 +852,17 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { // mcp_binding + mcp_source rows live at the same scope, so // pin every store lookup to it instead of relying on the // scoped adapter's stack-wide fall-through. - const toolScope = toolRow.scope_id as string; + const toolScope = toolRow.scope_id; const entry = yield* ctx.storage.getBinding(toolRow.id, toolScope).pipe( Effect.withSpan("mcp.plugin.load_binding", { attributes: { "mcp.tool.name": toolRow.id }, }), ); if (!entry) { - return yield* Effect.fail(new Error(`No MCP binding found for tool "${toolRow.id}"`)); + return yield* new McpInvocationError({ + toolName: toolRow.id, + message: `No MCP binding found for tool "${toolRow.id}"`, + }); } const sd = yield* ctx.storage.getSourceConfig(entry.namespace, toolScope).pipe( @@ -845,9 +871,10 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { }), ); if (!sd) { - return yield* Effect.fail( - new Error(`No MCP source config for namespace "${entry.namespace}"`), - ); + return yield* new McpConnectionError({ + transport: "auto", + message: `No MCP source config for namespace "${entry.namespace}"`, + }); } return yield* invokeMcpTool({ @@ -855,18 +882,26 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { toolName: entry.binding.toolName, args, sourceData: sd, - invokerScope: ctx.scopes[0]!.id as string, + invokerScope: ctx.scopes[0]!.id, resolveConnector: () => resolveConnectorInput(sd, ctx, allowStdio).pipe( - Effect.flatMap((ci) => createMcpConnector(ci)), - Effect.mapError((err) => - err instanceof McpConnectionError - ? err - : new McpConnectionError({ - transport: "auto", - message: err instanceof Error ? err.message : String(err), + Effect.catchTags({ + StorageError: () => + Effect.fail( + new McpConnectionError({ + transport: sd.transport, + message: "Failed to resolve MCP connector storage state", + }), + ), + UniqueViolationError: () => + Effect.fail( + new McpConnectionError({ + transport: sd.transport, + message: "Failed to resolve MCP connector storage state", }), - ), + ), + }), + Effect.flatMap((ci) => createMcpConnector(ci)), Effect.withSpan("mcp.plugin.resolve_connector", { attributes: { "mcp.source.namespace": entry.namespace, @@ -896,7 +931,7 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { try: () => new URL(trimmed), catch: (cause) => cause, }).pipe(Effect.option); - if (parsed._tag === "None") return null; + if (Option.isNone(parsed)) return null; const name = parsed.value.hostname || "mcp"; const namespace = deriveMcpNamespace({ endpoint: trimmed }); @@ -1001,9 +1036,7 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { usagesForSecret: ({ ctx, args }) => Effect.gen(function* () { const sources = yield* ctx.storage.findSourcesBySecret(args.secretId); - const childRows = yield* ctx.storage.findChildRowsBySecret( - args.secretId, - ); + const childRows = yield* ctx.storage.findChildRowsBySecret(args.secretId); const sourceKeys = new Set(); for (const s of sources) { @@ -1044,9 +1077,7 @@ export const mcpPlugin = definePlugin((options?: McpPluginOptions) => { usagesForConnection: ({ ctx, args }) => Effect.gen(function* () { - const sources = yield* ctx.storage.findSourcesByConnection( - args.connectionId, - ); + const sources = yield* ctx.storage.findSourcesByConnection(args.connectionId); return sources.map( (s) => new Usage({ From 5ad3094051db6c25d11ed528d8e1d8bbd2407b6f Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:48:43 -0700 Subject: [PATCH 080/108] Fix local MCP host boundary lint --- apps/local/src/server/mcp.ts | 40 +++++-- apps/local/src/server/migrate-connections.ts | 60 +++++----- packages/hosts/mcp/src/server.ts | 109 +++++++++++-------- 3 files changed, 126 insertions(+), 83 deletions(-) diff --git a/apps/local/src/server/mcp.ts b/apps/local/src/server/mcp.ts index 2ec74ae08..6c98cff11 100644 --- a/apps/local/src/server/mcp.ts +++ b/apps/local/src/server/mcp.ts @@ -20,6 +20,24 @@ const jsonError = (status: number, code: number, message: string): Response => headers: { "content-type": "application/json" }, }); +const formatBoundaryError = (error: unknown): unknown => { + // oxlint-disable-next-line executor/no-instanceof-error, executor/no-unknown-error-message -- boundary: MCP request handler catches unknown SDK/runtime failures for process logging + if (error instanceof Error) return error.stack ?? error.message; + return error; +}; + +const ignoreClose = (close: (() => Promise) | undefined): Promise => + close + ? Effect.runPromise( + Effect.ignore( + Effect.tryPromise({ + try: close, + catch: () => undefined, + }), + ), + ) + : Promise.resolve(); + export const createMcpRequestHandler = (config: ExecutorMcpServerConfig): McpRequestHandler => { const transports = new Map(); const servers = new Map(); @@ -29,8 +47,8 @@ export const createMcpRequestHandler = (config: ExecutorMcpServerConfig): McpReq const s = servers.get(id); transports.delete(id); servers.delete(id); - if (opts.transport) await t?.close().catch(() => undefined); - if (opts.server) await s?.close().catch(() => undefined); + if (opts.transport) await ignoreClose(t ? () => t.close() : undefined); + if (opts.server) await ignoreClose(s ? () => s.close() : undefined); }; return { @@ -59,21 +77,24 @@ export const createMcpRequestHandler = (config: ExecutorMcpServerConfig): McpReq if (sid) void dispose(sid, { server: true }); }; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: MCP SDK handler must return JSON-RPC errors from thrown Promise APIs try { created = await Effect.runPromise(createExecutorMcpServer(config)); await created.connect(transport); const response = await transport.handleRequest(request); if (!transport.sessionId) { - await transport.close().catch(() => undefined); - await created.close().catch(() => undefined); + await ignoreClose(() => transport.close()); + const server = created; + await ignoreClose(server ? () => server.close() : undefined); } return response; } catch (error) { - console.error("[mcp] handleRequest error:", error instanceof Error ? error.stack : error); + console.error("[mcp] handleRequest error:", formatBoundaryError(error)); if (!transport.sessionId) { - await transport.close().catch(() => undefined); - await created?.close().catch(() => undefined); + await ignoreClose(() => transport.close()); + const server = created; + await ignoreClose(server ? () => server.close() : undefined); } return jsonError(500, -32603, "Internal server error"); } @@ -109,11 +130,12 @@ export const runMcpStdioServer = async (config: ExecutorMcpServerConfig): Promis process.stdin.once("close", finish); }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: stdio server lifetime uses Promise-based SDK/process APIs and always closes resources try { await server.connect(transport); await waitForExit(); } finally { - await transport.close().catch(() => undefined); - await server.close().catch(() => undefined); + await ignoreClose(() => transport.close()); + await ignoreClose(() => server.close()); } }; diff --git a/apps/local/src/server/migrate-connections.ts b/apps/local/src/server/migrate-connections.ts index e70981b8d..1a1880587 100644 --- a/apps/local/src/server/migrate-connections.ts +++ b/apps/local/src/server/migrate-connections.ts @@ -32,12 +32,22 @@ const isRecord = (v: unknown): v is Record => const isString = (v: unknown): v is string => typeof v === "string"; const JsonObject = Schema.Record(Schema.String, Schema.Unknown); +const JsonObjectFromString = Schema.fromJsonString(JsonObject); const decodeUnknownOptionAs = (schema: Schema.Decoder) => (input: unknown): Option.Option => Schema.decodeUnknownOption(schema)(input); +const decodeJsonObjectString = Schema.decodeUnknownOption(JsonObjectFromString); + +const formatBoundaryError = (error: unknown): string => { + // oxlint-disable-next-line executor/no-instanceof-error, executor/no-unknown-error-message -- boundary: sqlite transaction throws expose only an unknown JS error value for logging + if (error instanceof Error) return error.message; + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: fallback log formatting for unknown sqlite transaction failures + return String(error); +}; + /** Pre-flight: bail unless the drizzle migration that added the Connection * table + `secret.owned_by_connection_id` has completed. */ const connectionsReady = (sqlite: Database): boolean => { @@ -280,20 +290,14 @@ const migrateOpenApi = async (sqlite: Database): Promise => { for (const row of rows) { let invocation: Record = {}; if (row.invocation_config) { - try { - const parsed = JSON.parse(row.invocation_config) as unknown; - if (isRecord(parsed)) invocation = parsed; - } catch { - continue; - } + const parsed = decodeJsonObjectString(row.invocation_config); + if (Option.isNone(parsed)) continue; + invocation = parsed.value; } let oauth2Col: unknown = null; if (row.oauth2) { - try { - oauth2Col = JSON.parse(row.oauth2) as unknown; - } catch { - // fall through - } + const parsed = decodeJsonObjectString(row.oauth2); + if (Option.isSome(parsed)) oauth2Col = parsed.value; } const primary = invocation.oauth2 ?? oauth2Col; if (primary == null) continue; @@ -355,6 +359,7 @@ const migrateOpenApi = async (sqlite: Database): Promise => { providerState, }); const err = rewireSecrets(sqlite, row.scope_id, connectionId, secretIds, secretNames); + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: bun:sqlite transaction callback must throw to roll back if (err) throw new Error(err); if (hasInvocationConfig) { const nextInvocation = { ...invocation, oauth2: oauth2Pointer }; @@ -372,6 +377,7 @@ const migrateOpenApi = async (sqlite: Database): Promise => { ); } }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: bun:sqlite transaction reports rollback failures by throwing try { txn(); console.log( @@ -379,7 +385,7 @@ const migrateOpenApi = async (sqlite: Database): Promise => { ); } catch (err) { console.warn( - `[migrate-connections] fail openapi ${row.scope_id}/${row.id}: ${err instanceof Error ? err.message : String(err)}`, + `[migrate-connections] fail openapi ${row.scope_id}/${row.id}: ${formatBoundaryError(err)}`, ); } } @@ -450,14 +456,9 @@ const migrateMcp = (sqlite: Database): void => { : null; for (const row of rows) { - let config: Record = {}; - try { - const parsed = JSON.parse(row.config) as unknown; - if (!isRecord(parsed)) continue; - config = parsed; - } catch { - continue; - } + const parsedConfig = decodeJsonObjectString(row.config); + if (Option.isNone(parsedConfig)) continue; + const config = parsedConfig.value; if (config.transport !== "remote") continue; const auth = config.auth; if (!isRecord(auth) || auth.kind !== "oauth2") continue; @@ -514,6 +515,7 @@ const migrateMcp = (sqlite: Database): void => { providerState, }); const err = rewireSecrets(sqlite, row.scope_id, connectionId, secretIds, secretNames); + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: bun:sqlite transaction callback must throw to roll back if (err) throw new Error(err); if (updateConfigAndAuth) { updateConfigAndAuth.run( @@ -526,6 +528,7 @@ const migrateMcp = (sqlite: Database): void => { updateConfig.run(JSON.stringify(nextConfig), row.scope_id, row.id); } }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: bun:sqlite transaction reports rollback failures by throwing try { txn(); console.log( @@ -533,7 +536,7 @@ const migrateMcp = (sqlite: Database): void => { ); } catch (err) { console.warn( - `[migrate-connections] fail mcp ${row.scope_id}/${row.id}: ${err instanceof Error ? err.message : String(err)}`, + `[migrate-connections] fail mcp ${row.scope_id}/${row.id}: ${formatBoundaryError(err)}`, ); } } @@ -591,14 +594,9 @@ const migrateGoogleDiscovery = (sqlite: Database): void => { ); for (const row of rows) { - let config: Record = {}; - try { - const parsed = JSON.parse(row.config) as unknown; - if (!isRecord(parsed)) continue; - config = parsed; - } catch { - continue; - } + const parsedConfig = decodeJsonObjectString(row.config); + if (Option.isNone(parsedConfig)) continue; + const config = parsedConfig.value; const auth = config.auth; if (!isRecord(auth) || auth.kind !== "oauth2") continue; @@ -643,9 +641,11 @@ const migrateGoogleDiscovery = (sqlite: Database): void => { providerState, }); const err = rewireSecrets(sqlite, row.scope_id, connectionId, secretIds, secretNames); + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: bun:sqlite transaction callback must throw to roll back if (err) throw new Error(err); updateSource.run(JSON.stringify(nextConfig), Date.now(), row.scope_id, row.id); }); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: bun:sqlite transaction reports rollback failures by throwing try { txn(); console.log( @@ -653,7 +653,7 @@ const migrateGoogleDiscovery = (sqlite: Database): void => { ); } catch (err) { console.warn( - `[migrate-connections] fail google-discovery ${row.scope_id}/${row.id}: ${err instanceof Error ? err.message : String(err)}`, + `[migrate-connections] fail google-discovery ${row.scope_id}/${row.id}: ${formatBoundaryError(err)}`, ); } } diff --git a/packages/hosts/mcp/src/server.ts b/packages/hosts/mcp/src/server.ts index b6bfeaf6c..2303447ca 100644 --- a/packages/hosts/mcp/src/server.ts +++ b/packages/hosts/mcp/src/server.ts @@ -1,4 +1,4 @@ -import { Effect, Match } from "effect"; +import { Effect, Match, Option, Schema } from "effect"; import * as Cause from "effect/Cause"; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import type { @@ -112,6 +112,28 @@ type ElicitInputParams = } | { mode: "url"; message: string; url: string; elicitationId: string }; +const elicitationRequestTag = (request: ElicitationRequest): ElicitationRequest["_tag"] => + Match.value(request).pipe( + Match.tag("UrlElicitation", () => "UrlElicitation" as const), + Match.tag("FormElicitation", () => "FormElicitation" as const), + Match.exhaustive, + ); + +const requestedSchemaIsNonEmpty = (request: ElicitationRequest): boolean => + Match.value(request).pipe( + Match.tag("FormElicitation", (req) => Object.keys(req.requestedSchema).length > 0), + Match.orElse(() => false), + ); + +const elicitationRequestUrl = (request: ElicitationRequest): string | undefined => + Match.value(request).pipe( + Match.tag("UrlElicitation", (req) => req.url), + Match.orElse(() => undefined), + ); + +const pausedInteractionKind = (request: ElicitationRequest): ElicitationRequest["_tag"] => + elicitationRequestTag(request); + const elicitationRequestToParams: (request: ElicitationRequest) => ElicitInputParams = Match.type().pipe( Match.tag("UrlElicitation", (req) => ({ @@ -143,33 +165,37 @@ const makeMcpElicitationHandler = // If client doesn't support url mode, fall back to a form asking the user // to visit the URL manually and confirm when done. - const params = - ctx.request._tag === "UrlElicitation" && !supportsUrl - ? { - message: `${ctx.request.message}\n\nPlease visit this URL:\n${ctx.request.url}\n\nClick accept once you have completed the flow.`, - requestedSchema: { type: "object" as const, properties: {} }, - } - : elicitationRequestToParams(ctx.request); + const params = Match.value(ctx.request).pipe( + Match.tag("UrlElicitation", (req) => + !supportsUrl + ? { + message: `${req.message}\n\nPlease visit this URL:\n${req.url}\n\nClick accept once you have completed the flow.`, + requestedSchema: { type: "object" as const, properties: {} }, + } + : elicitationRequestToParams(req), + ), + Match.orElse((req) => elicitationRequestToParams(req)), + ); return Effect.promise(async (): Promise => { + const requestTag = elicitationRequestTag(ctx.request); debugLog?.("elicitation.request", { - requestTag: ctx.request._tag, + requestTag, supportsUrl, message: ctx.request.message, - hasRequestedSchema: - ctx.request._tag === "FormElicitation" - ? Object.keys(ctx.request.requestedSchema).length > 0 - : false, - url: ctx.request._tag === "UrlElicitation" ? ctx.request.url : undefined, + hasRequestedSchema: requestedSchemaIsNonEmpty(ctx.request), + url: elicitationRequestUrl(ctx.request), clientCapabilities: server.server.getClientCapabilities() ?? null, }); + + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: MCP SDK elicitInput is a Promise API; failures become a cancel response try { const response = await server.server.elicitInput( params as Parameters[0], ); debugLog?.("elicitation.response", { - requestTag: ctx.request._tag, + requestTag, action: response.action, hasContent: typeof response.content === "object" && @@ -182,19 +208,17 @@ const makeMcpElicitationHandler = content: response.content, }; } catch (err) { + const error = formatBoundaryError(err); debugLog?.("elicitation.error", { - requestTag: ctx.request._tag, - error: - err instanceof Error - ? { name: err.name, message: err.message, stack: err.stack } - : { message: String(err) }, + requestTag, + error, clientCapabilities: server.server.getClientCapabilities() ?? null, }); console.error( - "[executor] elicitInput failed — falling back to cancel.", + "[executor] elicitInput failed - falling back to cancel.", JSON.stringify({ - error: err instanceof Error ? err.message : String(err), - requestTag: ctx.request._tag, + error, + requestTag, ...capabilitySnapshot(server), }), ); @@ -203,6 +227,13 @@ const makeMcpElicitationHandler = }); }; +const formatBoundaryError = (err: unknown): { name?: string; message: string; stack?: string } => { + // oxlint-disable-next-line executor/no-instanceof-error, executor/no-unknown-error-message -- boundary: SDK Promise rejection supplies unknown JS errors for logging only + if (err instanceof Error) return { name: err.name, message: err.message, stack: err.stack }; + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: fallback log formatting for unknown SDK Promise rejection values + return { message: String(err) }; +}; + // --------------------------------------------------------------------------- // MCP result formatting // --------------------------------------------------------------------------- @@ -225,7 +256,6 @@ const toMcpPausedResult = (formatted: ReturnType): }); const formatFailureMessage = (value: unknown): string | null => { - if (value instanceof Error) return value.message; if (typeof value === "object" && value !== null && "message" in value) { const message = (value as { readonly message?: unknown }).message; if (typeof message === "string" && message.length > 0) return message; @@ -246,17 +276,13 @@ const toMcpFailureResult = (cause: Cause.Cause): McpToolResult => { }; }; +const JsonObjectFromString = Schema.fromJsonString(Schema.Record(Schema.String, Schema.Unknown)); +const decodeJsonObjectString = Schema.decodeUnknownOption(JsonObjectFromString); + const parseJsonContent = (raw: string): Record | undefined => { if (raw === "{}") return undefined; - let parsed: unknown; - try { - parsed = JSON.parse(raw); - } catch { - return undefined; - } - return typeof parsed === "object" && parsed !== null && !Array.isArray(parsed) - ? (parsed as Record) - : undefined; + const parsed = decodeJsonObjectString(raw); + return Option.isSome(parsed) ? parsed.value : undefined; }; // --------------------------------------------------------------------------- @@ -270,9 +296,7 @@ export const createExecutorMcpServer = ( const engine = "engine" in config ? config.engine : createExecutionEngine(config); const description = config.description ?? - (yield* engine.getDescription.pipe( - Effect.withSpan("mcp.host.get_description"), - )); + (yield* engine.getDescription.pipe(Effect.withSpan("mcp.host.get_description"))); // Captured at construction time. SDK callbacks fire later (often // deferred past the outer Effect's await), so we use the runtime to @@ -281,6 +305,7 @@ export const createExecutorMcpServer = ( const debugEnabled = config.debug ?? readDebugDefault(); const debugLog = (event: string, data: Record) => { if (!debugEnabled) return; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: debug logging must tolerate non-serializable SDK capability snapshots try { console.error(`[executor:mcp] ${event} ${JSON.stringify(data)}`); } catch { @@ -299,9 +324,7 @@ export const createExecutorMcpServer = ( const runToolEffect = (effect: Effect.Effect) => Effect.runPromiseWith(context)( anchor(effect).pipe( - Effect.catchCause((cause) => - Effect.succeed(toMcpFailureResult(cause)), - ), + Effect.catchCause((cause) => Effect.succeed(toMcpFailureResult(cause))), ), ); @@ -336,7 +359,7 @@ export const createExecutorMcpServer = ( executionId: outcome.status === "paused" ? outcome.execution.id : undefined, interactionKind: outcome.status === "paused" - ? outcome.execution.elicitationContext.request._tag + ? pausedInteractionKind(outcome.execution.elicitationContext.request) : undefined, }); return outcome.status === "completed" @@ -367,9 +390,7 @@ export const createExecutorMcpServer = ( if (!outcome) { debugLog("resume.missing_execution", { executionId }); return { - content: [ - { type: "text" as const, text: `No paused execution: ${executionId}` }, - ], + content: [{ type: "text" as const, text: `No paused execution: ${executionId}` }], isError: true, } satisfies McpToolResult; } @@ -379,7 +400,7 @@ export const createExecutorMcpServer = ( nextExecutionId: outcome.status === "paused" ? outcome.execution.id : undefined, interactionKind: outcome.status === "paused" - ? outcome.execution.elicitationContext.request._tag + ? pausedInteractionKind(outcome.execution.elicitationContext.request) : undefined, }); return outcome.status === "completed" From 707e5c29533f941162082bfcdfb11f5b876a0c3f Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:55:29 -0700 Subject: [PATCH 081/108] Use typed WorkOS Vault plugin failures --- .../plugins/workos-vault/src/sdk/plugin.ts | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/plugins/workos-vault/src/sdk/plugin.ts b/packages/plugins/workos-vault/src/sdk/plugin.ts index 077aad260..4f5377d02 100644 --- a/packages/plugins/workos-vault/src/sdk/plugin.ts +++ b/packages/plugins/workos-vault/src/sdk/plugin.ts @@ -5,6 +5,7 @@ import { definePlugin } from "@executor-js/sdk/core"; import { makeConfiguredWorkOSVaultClient, type WorkOSVaultClient, + WorkOSVaultClientInstantiationError, type WorkOSVaultCredentials, } from "./client"; import { @@ -36,9 +37,11 @@ export interface WorkOSVaultPluginOptions { readonly contextForScope?: WorkOSVaultContextForScope; } -export interface WorkOSVaultExtension { - readonly providerKey: typeof WORKOS_VAULT_PROVIDER_KEY; -} +const makeWorkOSVaultExtension = () => ({ + providerKey: WORKOS_VAULT_PROVIDER_KEY, +} as const); + +export type WorkOSVaultExtension = ReturnType; // The plugin's typed store is just its metadata-store wrapper. The // secret provider closes over this store plus the resolved WorkOS @@ -48,15 +51,15 @@ type WorkosVaultPluginStore = WorkosVaultStore; const buildClient = ( options: WorkOSVaultPluginOptions | undefined, -): Effect.Effect => { +): Effect.Effect => { if (options?.client) return Effect.succeed(options.client); if (options?.credentials) { return makeConfiguredWorkOSVaultClient(options.credentials); } return Effect.fail( - new Error( - "workosVaultPlugin requires either `client` or `credentials` to be provided", - ), + new WorkOSVaultClientInstantiationError({ + cause: "workosVaultPlugin requires either `client` or `credentials` to be provided", + }), ); }; @@ -67,9 +70,7 @@ export const workosVaultPlugin = definePlugin( schema: workosVaultSchema, storage: (deps): WorkosVaultPluginStore => makeWorkosVaultStore(deps), - extension: (_ctx): WorkOSVaultExtension => ({ - providerKey: WORKOS_VAULT_PROVIDER_KEY, - }), + extension: makeWorkOSVaultExtension, secretProviders: (ctx) => { // Build (or accept) the WorkOS client once at startup. If From 0b344983c940a20a31aa275c0d61359dbfead23f Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:55:39 -0700 Subject: [PATCH 082/108] Use schema guards for page tagged errors --- packages/react/src/pages/connections.tsx | 5 ++++- packages/react/src/pages/secrets.tsx | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/react/src/pages/connections.tsx b/packages/react/src/pages/connections.tsx index 697c94dff..3edfd5f32 100644 --- a/packages/react/src/pages/connections.tsx +++ b/packages/react/src/pages/connections.tsx @@ -3,6 +3,7 @@ import { useAtomValue, useAtomSet } from "@effect/atom-react"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import * as Exit from "effect/Exit"; import * as Option from "effect/Option"; +import * as Schema from "effect/Schema"; import { ConnectionId, ConnectionInUseError, type ScopeId } from "@executor-js/sdk"; import { toast } from "sonner"; @@ -43,6 +44,8 @@ const providerDisplayNames: Record = { const displayProvider = (provider: string): string => providerDisplayNames[provider] ?? provider; +const isConnectionInUseError = Schema.is(ConnectionInUseError); + const connectionScopeLabel = ( scopeId: string, stack: readonly { readonly id: string; readonly name: string }[], @@ -174,7 +177,7 @@ export function ConnectionsPage() { if (Exit.isFailure(exit)) { pending.undo(); const error = Exit.findErrorOption(exit); - if (Option.isSome(error) && error.value instanceof ConnectionInUseError) { + if (Option.isSome(error) && isConnectionInUseError(error.value)) { const count = error.value.usageCount; toast.error( `Connection is used by ${count} ${count === 1 ? "source" : "sources"}. Detach it before removing it.`, diff --git a/packages/react/src/pages/secrets.tsx b/packages/react/src/pages/secrets.tsx index 66fb592ef..627cb978c 100644 --- a/packages/react/src/pages/secrets.tsx +++ b/packages/react/src/pages/secrets.tsx @@ -3,6 +3,7 @@ import { useAtomValue, useAtomSet } from "@effect/atom-react"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; import * as Exit from "effect/Exit"; import * as Option from "effect/Option"; +import * as Schema from "effect/Schema"; import { toast } from "sonner"; import { removeSecretOptimistic, @@ -53,6 +54,8 @@ const defaultStorageOptions: readonly SecretStorageOption[] = [ { value: "file", label: "File" }, ]; +const isSecretInUseError = Schema.is(SecretInUseError); + // --------------------------------------------------------------------------- // Add secret dialog // @@ -265,7 +268,7 @@ export function SecretsPage(props: { }); if (Exit.isFailure(exit)) { const error = Exit.findErrorOption(exit); - if (Option.isSome(error) && error.value instanceof SecretInUseError) { + if (Option.isSome(error) && isSecretInUseError(error.value)) { const count = error.value.usageCount; toast.error( `Secret is used by ${count} ${count === 1 ? "source" : "sources"}. Detach it before removing it.`, From 9352af4d817ffae5802bf20f385f0bd52aeee552 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:55:50 -0700 Subject: [PATCH 083/108] Clean typed test boundary assertions --- packages/core/sdk/src/promise.test.ts | 21 ++++++------- packages/core/sdk/src/scoped-adapter.test.ts | 12 ++++++-- tests/daemon-bootstrap.test.ts | 32 +++++++++++--------- 3 files changed, 37 insertions(+), 28 deletions(-) diff --git a/packages/core/sdk/src/promise.test.ts b/packages/core/sdk/src/promise.test.ts index 7e2f30eb6..f02c74df3 100644 --- a/packages/core/sdk/src/promise.test.ts +++ b/packages/core/sdk/src/promise.test.ts @@ -102,24 +102,21 @@ describe("promise/createExecutor", () => { const ran = await executor.tools.invoke("ap.ctl.go", {}); expect(ran).toBe("ran"); - // Override with a declining handler → rejects with ElicitationDeclinedError. - // Effect.runPromise rejects with a FiberFailure that wraps the typed - // error; both `name` and `message` carry the tag. - let caught: unknown; - try { - await executor.tools.invoke( + // Override with a declining handler -> rejects with ElicitationDeclinedError. + // Effect.runPromise rejects with a FiberFailure that carries the tag in + // the error name. + await expect( + executor.tools.invoke( "ap.ctl.go", {}, { onElicitation: () => Effect.succeed({ action: "decline" as const }) as any, }, - ); - } catch (e) { - caught = e; - } - expect(caught).toBeDefined(); - expect((caught as Error).name).toMatch(/ElicitationDeclinedError/); + ), + ).rejects.toMatchObject({ + name: expect.stringMatching(/ElicitationDeclinedError/), + }); await executor.close(); }); diff --git a/packages/core/sdk/src/scoped-adapter.test.ts b/packages/core/sdk/src/scoped-adapter.test.ts index d7851d941..0386eade4 100644 --- a/packages/core/sdk/src/scoped-adapter.test.ts +++ b/packages/core/sdk/src/scoped-adapter.test.ts @@ -45,7 +45,11 @@ describe("scopeAdapter — write rejection on scoped tables", () => { const reason = result.cause.reasons.find(Cause.isFailReason); const err = reason?.error ?? null; expect(err).toBeInstanceOf(StorageError); - expect((err as StorageError).message).toContain("not in the executor"); + expect(err).toEqual( + expect.objectContaining({ + message: expect.stringContaining("not in the executor"), + }), + ); }), ); @@ -70,7 +74,11 @@ describe("scopeAdapter — write rejection on scoped tables", () => { const reason = result.cause.reasons.find(Cause.isFailReason); const err = reason?.error ?? null; expect(err).toBeInstanceOf(StorageError); - expect((err as StorageError).message).toContain("missing required"); + expect(err).toEqual( + expect.objectContaining({ + message: expect.stringContaining("missing required"), + }), + ); }), ); diff --git a/tests/daemon-bootstrap.test.ts b/tests/daemon-bootstrap.test.ts index 1ee778ebc..11d6af464 100644 --- a/tests/daemon-bootstrap.test.ts +++ b/tests/daemon-bootstrap.test.ts @@ -116,26 +116,30 @@ describe("daemon bootstrap helpers", () => { it("falls back when preferred daemon port is occupied", async () => { const blocker = createServer(); - await new Promise((resolve, reject) => { - blocker.once("error", reject); - blocker.listen({ port: 0, host: "127.0.0.1" }, () => resolve()); - }); + await Effect.runPromise(Effect.scoped(Effect.gen(function*() { + yield* Effect.acquireRelease( + Effect.callback((resume) => { + blocker.once("error", (error) => resume(Effect.fail(error))); + blocker.listen({ port: 0, host: "127.0.0.1" }, () => + resume(Effect.succeed(undefined))); + }), + () => Effect.promise(() => new Promise((resolve) => { + blocker.close(() => resolve()); + })), + ); - const occupied = (() => { - const address = blocker.address(); - return typeof address === "object" && address !== null ? address.port : 0; - })(); + const occupied = (() => { + const address = blocker.address(); + return typeof address === "object" && address !== null ? address.port : 0; + })(); - try { - const picked = await Effect.runPromise( + const picked = yield* ( chooseDaemonPort({ preferredPort: occupied, hostname: "127.0.0.1", - }), + }) ); expect(picked).not.toBe(occupied); - } finally { - await new Promise((resolve) => blocker.close(() => resolve())); - } + }))); }); }); From 71332e767f6538365818756eb7cc2e4f5853da43 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:56:00 -0700 Subject: [PATCH 084/108] Keep config write failures typed --- packages/core/config/src/config.test.ts | 8 +++++--- packages/core/config/src/write.ts | 14 +++++++++++--- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/packages/core/config/src/config.test.ts b/packages/core/config/src/config.test.ts index bc6c5289f..c0f6ede92 100644 --- a/packages/core/config/src/config.test.ts +++ b/packages/core/config/src/config.test.ts @@ -5,7 +5,7 @@ import { FileSystem } from "effect"; import { join } from "node:path"; import { ExecutorFileConfig } from "./schema"; -import { loadConfig } from "./load"; +import { ConfigParseError, loadConfig } from "./load"; import { addSourceToConfig, removeSourceFromConfig, @@ -128,8 +128,10 @@ describe("loadConfig", () => { const path = join(dir, "executor.jsonc"); yield* fs.writeFileString(path, "{ invalid json }"); - const result = yield* loadConfig(path).pipe(Effect.flip); - expect(result._tag).toBe("ConfigParseError"); + const result = yield* loadConfig(path).pipe( + Effect.catchTag("ConfigParseError", (error) => Effect.succeed(error)), + ); + expect(result).toBeInstanceOf(ConfigParseError); }), ), ); diff --git a/packages/core/config/src/write.ts b/packages/core/config/src/write.ts index 46b99d5e6..dbe7e340b 100644 --- a/packages/core/config/src/write.ts +++ b/packages/core/config/src/write.ts @@ -4,6 +4,14 @@ import type { PlatformError } from "effect/PlatformError"; import * as jsonc from "jsonc-parser"; import type { SourceConfig, ExecutorFileConfig } from "./schema"; +export class ConfigWriteError { + readonly _tag = "ConfigWriteError"; + constructor( + readonly path: string, + readonly cause: unknown, + ) {} +} + const FORMATTING: jsonc.FormattingOptions = { tabSize: 2, insertSpaces: true, @@ -118,13 +126,13 @@ export const removeSourceFromConfig = ( export const writeConfig = ( path: string, config: ExecutorFileConfig, -): Effect.Effect => +): Effect.Effect => Effect.gen(function* () { const fs = yield* FileSystem.FileSystem; const text = yield* Effect.try({ try: () => JSON.stringify(config, null, 2) + "\n", - catch: (cause) => cause, - }).pipe(Effect.orDie); + catch: (cause) => new ConfigWriteError(path, cause), + }); yield* fs.writeFileString(path, text); }); From a995f28589d424de550f08b8593f0682afefece3 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:56:12 -0700 Subject: [PATCH 085/108] Normalize cloud startup boundaries --- apps/cloud/src/services/db.ts | 9 +++- apps/cloud/src/start.ts | 78 ++++++++++++++++++++++++----------- 2 files changed, 63 insertions(+), 24 deletions(-) diff --git a/apps/cloud/src/services/db.ts b/apps/cloud/src/services/db.ts index 1e31c4d37..0863d8ecb 100644 --- a/apps/cloud/src/services/db.ts +++ b/apps/cloud/src/services/db.ts @@ -73,7 +73,14 @@ export class DbService extends Context.Service< // Fire-and-forget: the Terminate round-trip sometimes hangs, and // we don't need to block scope close waiting for it. Effect.sync(() => { - sql.end({ timeout: 0 }).catch(() => undefined); + void Effect.runFork( + Effect.ignore( + Effect.tryPromise({ + try: () => sql.end({ timeout: 0 }), + catch: (cause) => cause, + }), + ), + ); }), ), ); diff --git a/apps/cloud/src/start.ts b/apps/cloud/src/start.ts index 92339f7f9..82fdce0ed 100644 --- a/apps/cloud/src/start.ts +++ b/apps/cloud/src/start.ts @@ -1,5 +1,6 @@ import { env } from "cloudflare:workers"; import { createMiddleware, createStart } from "@tanstack/react-start"; +import { Data, Effect, Schema } from "effect"; import { handleApiRequest } from "./api"; import { mcpFetch } from "./mcp"; @@ -72,7 +73,7 @@ const mcpRequestMiddleware = createMiddleware({ type: "request" }).server( // --------------------------------------------------------------------------- const sentryTunnelMiddleware = createMiddleware({ type: "request" }).server( - async ({ pathname, request, next }) => { + ({ pathname, request, next }) => { if (pathname !== "/api/sentry-tunnel" || request.method !== "POST") { return next(); } @@ -80,31 +81,62 @@ const sentryTunnelMiddleware = createMiddleware({ type: "request" }).server( const configuredDsn = (env as { SENTRY_DSN?: string }).SENTRY_DSN; if (!configuredDsn) return new Response(null, { status: 204 }); - try { - const envelope = await request.text(); - const firstLine = envelope.slice(0, envelope.indexOf("\n")); - const header = JSON.parse(firstLine) as { dsn?: string }; - if (!header.dsn) return new Response("missing dsn", { status: 400 }); - - const envelopeDsn = new URL(header.dsn); - const ourDsn = new URL(configuredDsn); - if (envelopeDsn.host !== ourDsn.host || envelopeDsn.pathname !== ourDsn.pathname) { - return new Response("dsn mismatch", { status: 400 }); - } - - const projectId = envelopeDsn.pathname.replace(/^\//, ""); - const ingestUrl = `https://${envelopeDsn.host}/api/${projectId}/envelope/`; - return fetch(ingestUrl, { - method: "POST", - body: envelope, - headers: { "Content-Type": "application/x-sentry-envelope" }, - }); - } catch { - return new Response("bad envelope", { status: 400 }); - } + return Effect.runPromise(handleSentryTunnelRequest(request, configuredDsn)); }, ); +class SentryTunnelError extends Data.TaggedError("SentryTunnelError")<{ + readonly cause?: unknown; +}> {} + +const SentryEnvelopeHeader = Schema.Struct({ + dsn: Schema.optional(Schema.String), +}); + +const decodeSentryEnvelopeHeader = Schema.decodeUnknownEffect( + Schema.fromJsonString(SentryEnvelopeHeader), +); + +const badSentryEnvelopeResponse = () => new Response("bad envelope", { status: 400 }); + +const handleSentryTunnelRequest = (request: Request, configuredDsn: string) => + Effect.gen(function* () { + const envelope = yield* Effect.tryPromise({ + try: () => request.text(), + catch: (cause) => new SentryTunnelError({ cause }), + }); + const firstLine = envelope.slice(0, envelope.indexOf("\n")); + const header = yield* decodeSentryEnvelopeHeader(firstLine).pipe( + Effect.mapError((cause) => new SentryTunnelError({ cause })), + ); + const dsn = header.dsn; + if (!dsn) return new Response("missing dsn", { status: 400 }); + + const envelopeDsn = yield* Effect.try({ + try: () => new URL(dsn), + catch: (cause) => new SentryTunnelError({ cause }), + }); + const ourDsn = yield* Effect.try({ + try: () => new URL(configuredDsn), + catch: (cause) => new SentryTunnelError({ cause }), + }); + if (envelopeDsn.host !== ourDsn.host || envelopeDsn.pathname !== ourDsn.pathname) { + return new Response("dsn mismatch", { status: 400 }); + } + + const projectId = envelopeDsn.pathname.replace(/^\//, ""); + const ingestUrl = `https://${envelopeDsn.host}/api/${projectId}/envelope/`; + return yield* Effect.tryPromise({ + try: () => + fetch(ingestUrl, { + method: "POST", + body: envelope, + headers: { "Content-Type": "application/x-sentry-envelope" }, + }), + catch: (cause) => new SentryTunnelError({ cause }), + }); + }).pipe(Effect.catch(() => Effect.succeed(badSentryEnvelopeResponse()))); + // --------------------------------------------------------------------------- // PostHog reverse proxy — the browser SDK targets a build-randomized // first-party path and we forward to PostHog's ingest + asset hosts. Keeps From ff422a5482f7345a6ae77b6d14f46904289aa629 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 21:56:23 -0700 Subject: [PATCH 086/108] Handle all-plugins entrypoint failures in Effect --- examples/all-plugins/src/main.ts | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/examples/all-plugins/src/main.ts b/examples/all-plugins/src/main.ts index 51ae2a966..5691ec445 100644 --- a/examples/all-plugins/src/main.ts +++ b/examples/all-plugins/src/main.ts @@ -17,7 +17,7 @@ // backends are gated behind env vars and skipped by default. // --------------------------------------------------------------------------- -import { Effect } from "effect"; +import { Cause, Effect } from "effect"; import { SecretId, @@ -448,7 +448,13 @@ const program = Effect.gen(function* () { // 4. Run. // --------------------------------------------------------------------------- -Effect.runPromise(program).catch((err) => { - console.error("Example failed:", err); - process.exit(1); -}); +Effect.runPromise( + program.pipe( + Effect.catchCause((cause) => + Effect.sync(() => { + console.error("Example failed:", Cause.squash(cause)); + process.exit(1); + }), + ), + ), +); From 14060d62796a1869df202b4b74037d82c2a10d39 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:04:14 -0700 Subject: [PATCH 087/108] Keep cloud API errors typed --- apps/cloud/src/api/error-response.ts | 9 ++-- apps/cloud/src/api/slack.ts | 68 ++++++++++++---------------- 2 files changed, 35 insertions(+), 42 deletions(-) diff --git a/apps/cloud/src/api/error-response.ts b/apps/cloud/src/api/error-response.ts index 129dcbec2..6d464a976 100644 --- a/apps/cloud/src/api/error-response.ts +++ b/apps/cloud/src/api/error-response.ts @@ -1,4 +1,4 @@ -import { Cause, Data, Effect, Result } from "effect"; +import { Cause, Data, Effect, Predicate, Result } from "effect"; import { HttpServerRespondable, HttpServerResponse, @@ -38,9 +38,12 @@ const unwrapCause = (error: unknown): unknown => { return error; }; +const isHttpResponseError = (error: unknown): error is HttpResponseError => + Predicate.isTagged(error, "HttpResponseError"); + const toHttpResponseError = (error: unknown): HttpResponseError => { const unwrapped = unwrapCause(error); - return unwrapped instanceof HttpResponseError + return isHttpResponseError(unwrapped) ? unwrapped : new HttpResponseError({ status: 500, @@ -62,7 +65,7 @@ export const toErrorServerResponse = (error: unknown): HttpServerResponse.HttpSe if (mapped.status >= 500) { console.error( "[api] toErrorServerResponse error:", - Cause.isCause(error) ? Cause.pretty(error) : error instanceof Error ? error.stack : error, + Cause.isCause(error) ? Cause.pretty(error) : error, ); captureCause(error); } diff --git a/apps/cloud/src/api/slack.ts b/apps/cloud/src/api/slack.ts index 2e8ef19b0..4f2b4911e 100644 --- a/apps/cloud/src/api/slack.ts +++ b/apps/cloud/src/api/slack.ts @@ -1,5 +1,5 @@ import { env } from "cloudflare:workers"; -import { Effect } from "effect"; +import { Cause, Effect } from "effect"; import { HttpRouter, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"; import { SlackService } from "../services/slack"; @@ -31,20 +31,18 @@ const verifyTurnstile = (token: string, remoteIp: string | null) => const json = (await res.json()) as { success: boolean; "error-codes"?: string[] }; return { success: json.success, errorCodes: json["error-codes"] ?? [] }; }, - catch: (cause) => ({ success: false as const, fetchError: String(cause) }), + catch: (cause) => ({ success: false as const, fetchError: cause }), }); const handler = Effect.gen(function* () { const request = yield* HttpServerRequest.HttpServerRequest; if (request.method !== "POST") { - return yield* Effect.fail( - new HttpResponseError({ - status: 405, - code: "method_not_allowed", - message: "Method not allowed", - }), - ); + return yield* new HttpResponseError({ + status: 405, + code: "method_not_allowed", + message: "Method not allowed", + }); } const body = (yield* Effect.mapError( @@ -73,36 +71,30 @@ const handler = Effect.gen(function* () { const turnstileToken = typeof body.turnstileToken === "string" ? body.turnstileToken : ""; if (!isValidEmail(email)) { - return yield* Effect.fail( - new HttpResponseError({ - status: 400, - code: "invalid_email", - message: "A valid email is required", - }), - ); + return yield* new HttpResponseError({ + status: 400, + code: "invalid_email", + message: "A valid email is required", + }); } if (!turnstileToken) { - return yield* Effect.fail( - new HttpResponseError({ - status: 400, - code: "captcha_required", - message: "Captcha verification is required.", - }), - ); + return yield* new HttpResponseError({ + status: 400, + code: "captcha_required", + message: "Captcha verification is required.", + }); } const remoteIp = request.headers["cf-connecting-ip"] ?? null; const verification = yield* verifyTurnstile(turnstileToken, remoteIp); if (!verification.success) { console.error("[slack] turnstile verification failed:", verification); - return yield* Effect.fail( - new HttpResponseError({ - status: 403, - code: "captcha_failed", - message: "Captcha verification failed. Please try again.", - }), - ); + return yield* new HttpResponseError({ + status: 403, + code: "captcha_failed", + message: "Captcha verification failed. Please try again.", + }); } // Global daily channel-creation cap — bounds the worst case if Turnstile is @@ -110,17 +102,15 @@ const handler = Effect.gen(function* () { // this binding is a single shared bucket keyed at "global". const limit = yield* Effect.tryPromise({ try: () => env.SLACK_INVITE_LIMITER.limit({ key: "global" }), - catch: (cause) => ({ success: false as const, fetchError: String(cause) }), + catch: (cause) => ({ success: false as const, fetchError: cause }), }); if (!limit.success) { console.error("[slack] global rate limit hit"); - return yield* Effect.fail( - new HttpResponseError({ - status: 429, - code: "rate_limited", - message: "We're getting more contact requests than usual. Please try again later.", - }), - ); + return yield* new HttpResponseError({ + status: 429, + code: "rate_limited", + message: "We're getting more contact requests than usual. Please try again later.", + }); } const slack = yield* SlackService; @@ -144,7 +134,7 @@ const handler = Effect.gen(function* () { }).pipe( Effect.catchCause((err) => { if (isServerError(err)) { - console.error("[slack] request failed:", err instanceof Error ? err.stack : err); + console.error("[slack] request failed:", Cause.pretty(err)); } return Effect.succeed(toErrorServerResponse(err)); }), From 139dd5c70b21e75ceabfd22332955f752d5120a0 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:04:31 -0700 Subject: [PATCH 088/108] Validate cloud JWT boundaries with schema --- apps/cloud/src/jwks-cache.ts | 61 +++++++++++++++++++++++------------- apps/cloud/src/mcp-auth.ts | 32 +++++++++++++------ 2 files changed, 61 insertions(+), 32 deletions(-) diff --git a/apps/cloud/src/jwks-cache.ts b/apps/cloud/src/jwks-cache.ts index 9d2521663..e187c8718 100644 --- a/apps/cloud/src/jwks-cache.ts +++ b/apps/cloud/src/jwks-cache.ts @@ -29,6 +29,7 @@ import { type JWTVerifyGetKey, type KeyLike, } from "jose"; +import { Schema } from "effect"; import { JWKSNoMatchingKey } from "jose/errors"; export interface CachedRemoteJWKSetOptions { @@ -54,13 +55,19 @@ export interface CachedRemoteJWKSet extends JWTVerifyGetKey { const DEFAULT_TTL_MS = 60 * 60 * 1000; const DEFAULT_TIMEOUT_MS = 5000; +const JsonWebKey = Schema.Record(Schema.String, Schema.Unknown); +const JsonWebKeySetPayload = Schema.Struct({ + keys: Schema.Array(JsonWebKey), +}); +const decodeJsonWebKeySetPayload = Schema.decodeUnknownPromise(JsonWebKeySetPayload); + +const isJwksNoMatchingKey = (cause: unknown): boolean => + Schema.is(Schema.Struct({ code: Schema.String }))(cause) && cause.code === JWKSNoMatchingKey.code; + interface CacheEntry { jwks: JSONWebKeySet; fetchedAt: number; - resolver: ( - protectedHeader: JWTHeaderParameters, - token?: FlattenedJWSInput, - ) => Promise; + resolver: (protectedHeader: JWTHeaderParameters, token?: FlattenedJWSInput) => Promise; } const fetchJwksOnce = async ( @@ -70,20 +77,28 @@ const fetchJwksOnce = async ( ): Promise => { const controller = new AbortController(); const timer = setTimeout(() => controller.abort(), timeoutMs); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fetch adapter must clear abort timer while preserving promise rejection behavior try { const response = await fetchImpl(url.toString(), { method: "GET", headers: { accept: "application/json" }, signal: controller.signal, }); + if (!response.ok) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: fetch-backed JWT key resolver must reject with the existing Error cause shape throw new Error(`JWKS fetch failed: ${response.status} ${response.statusText}`); } - const body = (await response.json()) as JSONWebKeySet; - if (!body || !Array.isArray((body as JSONWebKeySet).keys)) { + + const body = await response.json(); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: fetch JSON validation maps Schema failures to the existing malformed JWKS rejection + try { + await decodeJsonWebKeySetPayload(body); + return body as JSONWebKeySet; + } catch { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: fetch JSON validation preserves the existing malformed JWKS rejection throw new Error("JWKS fetch returned malformed payload"); } - return body; } finally { clearTimeout(timer); } @@ -111,19 +126,17 @@ export const createCachedRemoteJWKSet = ( const refresh = (): Promise => { if (inflight) return inflight; inflight = (async () => { - try { - const jwks = await fetchJwksOnce(url, fetchImpl(), timeoutMs); - const next: CacheEntry = { - jwks, - fetchedAt: Date.now(), - resolver: createLocalJWKSet(jwks), - }; - entry = next; - return next; - } finally { - inflight = null; - } - })(); + const jwks = await fetchJwksOnce(url, fetchImpl(), timeoutMs); + const next: CacheEntry = { + jwks, + fetchedAt: Date.now(), + resolver: createLocalJWKSet(jwks), + }; + entry = next; + return next; + })().finally(() => { + inflight = null; + }); return inflight; }; @@ -135,15 +148,19 @@ export const createCachedRemoteJWKSet = ( const get: JWTVerifyGetKey = async (protectedHeader, token) => { const current = await ensureFresh(false); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: jose JWTVerifyGetKey retry path is defined by thrown resolver failures try { return await current.resolver(protectedHeader, token); } catch (error) { // Likely cause: keys rotated upstream after our TTL window started. // Refetch once and try again. Anything still failing bubbles up so // jose can classify it (we do not silently swallow real failures). - if (!(error instanceof JWKSNoMatchingKey)) throw error; + if (!isJwksNoMatchingKey(error)) { + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: jose JWTVerifyGetKey requires preserving upstream resolver rejection + throw error; + } const refreshed = await ensureFresh(true); - return await refreshed.resolver(protectedHeader, token); + return refreshed.resolver(protectedHeader, token); } }; diff --git a/apps/cloud/src/mcp-auth.ts b/apps/cloud/src/mcp-auth.ts index fe7188a88..534ad4bf0 100644 --- a/apps/cloud/src/mcp-auth.ts +++ b/apps/cloud/src/mcp-auth.ts @@ -1,6 +1,6 @@ -import { Data, Effect, Result } from "effect"; +import { Data, Effect, Result, Schema } from "effect"; import { jwtVerify, type JWTVerifyGetKey } from "jose"; -import { JOSEError, JWKSInvalid, JWKSTimeout, JWTExpired } from "jose/errors"; +import { JWKSInvalid, JWKSTimeout, JWTExpired } from "jose/errors"; export type VerifiedToken = { /** The WorkOS account ID (user ID). */ @@ -14,17 +14,29 @@ export class McpJwtVerificationError extends Data.TaggedError("McpJwtVerificatio readonly reason: "expired" | "invalid" | "system"; }> {} +const JoseErrorCode = Schema.Struct({ code: Schema.String }); + +const getJoseErrorCode = (cause: unknown): string | null => + Schema.is(JoseErrorCode)(cause) ? cause.code : null; + +const isJoseErrorCode = (code: string): boolean => code.startsWith("ERR_J"); + const classifyJwtVerificationError = (cause: unknown): McpJwtVerificationError => new McpJwtVerificationError({ cause, - reason: - cause instanceof JWTExpired - ? "expired" - : cause instanceof JWKSTimeout || - cause instanceof JWKSInvalid || - !(cause instanceof JOSEError) - ? "system" - : "invalid", + reason: (() => { + const code = getJoseErrorCode(cause); + if (code === JWTExpired.code) return "expired"; + if ( + code === JWKSTimeout.code || + code === JWKSInvalid.code || + code === null || + !isJoseErrorCode(code) + ) { + return "system"; + } + return "invalid"; + })(), }); const isExpectedJwtVerificationError = (error: McpJwtVerificationError): boolean => From 489f9f6f4d78e511bf851573d57b9cdd9415699e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:04:51 -0700 Subject: [PATCH 089/108] Parse file secrets with schema --- packages/plugins/file-secrets/src/index.ts | 202 ++++++++++----------- 1 file changed, 95 insertions(+), 107 deletions(-) diff --git a/packages/plugins/file-secrets/src/index.ts b/packages/plugins/file-secrets/src/index.ts index 832601e7b..91fe7df90 100644 --- a/packages/plugins/file-secrets/src/index.ts +++ b/packages/plugins/file-secrets/src/index.ts @@ -27,11 +27,9 @@ export const xdgDataHome = (): string => { return path.join(process.env.HOME || "~", ".local", "share"); }; -const authDir = (overrideDir?: string): string => - overrideDir ?? path.join(xdgDataHome(), APP_NAME); +const authDir = (overrideDir?: string): string => overrideDir ?? path.join(xdgDataHome(), APP_NAME); -const authFilePath = (overrideDir?: string): string => - path.join(authDir(overrideDir), "auth.json"); +const authFilePath = (overrideDir?: string): string => path.join(authDir(overrideDir), "auth.json"); // --------------------------------------------------------------------------- // Schema for the auth file @@ -40,59 +38,83 @@ const authFilePath = (overrideDir?: string): string => // { "web-a1b2c3d4": { "github-token": "ghp_xxx" } } // --------------------------------------------------------------------------- -const ScopedAuthFile = Schema.Record( - Schema.String, - Schema.Record(Schema.String, Schema.String), -); -const decodeScopedAuthFile = Schema.decodeUnknownSync(ScopedAuthFile); +const ScopedAuthFile = Schema.Record(Schema.String, Schema.Record(Schema.String, Schema.String)); +const decodeScopedAuthFile = Schema.decodeUnknownEffect(Schema.fromJsonString(ScopedAuthFile)); // --------------------------------------------------------------------------- // File I/O with restricted permissions // -// These helpers throw on real I/O or decode failures — the provider wraps -// every call in `Effect.try` so those throws surface as typed -// `StorageError` on the Effect error channel. Previously `readFullFile` -// used a blanket `try { ... } catch { return {}; }` which masked JSON -// parse errors, schema decode failures, and permission errors as -// "empty file", making misconfigured installs silently return null from -// every `get`. +// These helpers keep real I/O and decode failures in the Effect error +// channel as `StorageError`. Missing files are still treated as an empty +// auth file, but malformed JSON, schema decode failures, and permission +// errors no longer collapse into "empty file". // --------------------------------------------------------------------------- -const readFullFile = (filePath: string): Record> => { - if (!fs.existsSync(filePath)) return {}; - let raw: string; - try { - raw = fs.readFileSync(filePath, "utf-8"); - } catch (cause) { - // Treat "file disappeared between existsSync and readFileSync" as - // absence — anything else (EACCES, EISDIR, …) propagates. - if ((cause as NodeJS.ErrnoException).code === "ENOENT") return {}; - throw cause; - } - return decodeScopedAuthFile(JSON.parse(raw)); +const isFileNotFoundCause = (cause: unknown): cause is NodeJS.ErrnoException => + typeof cause === "object" && cause !== null && "code" in cause && cause.code === "ENOENT"; + +const toStorageError = + (message: string) => + (cause: unknown): StorageError => + new StorageError({ message, cause }); + +const readFullFile = ( + filePath: string, +): Effect.Effect>, StorageError> => { + if (!fs.existsSync(filePath)) return Effect.succeed({}); + return Effect.try({ + try: () => fs.readFileSync(filePath, "utf-8"), + catch: toStorageError("Failed to read auth file"), + }).pipe( + Effect.catchIf( + (error) => isFileNotFoundCause(error.cause), + () => Effect.succeed(""), + ), + Effect.flatMap((raw) => + raw === "" + ? Effect.succeed({}) + : decodeScopedAuthFile(raw).pipe( + Effect.mapError(toStorageError("Failed to parse auth file")), + ), + ), + ); }; -const readScopeSecrets = (filePath: string, scopeId: string): Record => - readFullFile(filePath)[scopeId] ?? {}; +const readScopeSecrets = ( + filePath: string, + scopeId: string, +): Effect.Effect, StorageError> => + readFullFile(filePath).pipe(Effect.map((file) => file[scopeId] ?? {})); const writeScopeSecrets = ( filePath: string, scopeId: string, secrets: Record, -): void => { +): Effect.Effect => { const dir = path.dirname(filePath); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir, { recursive: true, mode: 0o700 }); - } - const full = readFullFile(filePath); - if (Object.keys(secrets).length === 0) { - delete full[scopeId]; - } else { - full[scopeId] = secrets; - } const tmp = `${filePath}.tmp`; - fs.writeFileSync(tmp, JSON.stringify(full, null, 2), { mode: 0o600 }); - fs.renameSync(tmp, filePath); + return Effect.gen(function* () { + if (!fs.existsSync(dir)) { + yield* Effect.try({ + try: () => fs.mkdirSync(dir, { recursive: true, mode: 0o700 }), + catch: toStorageError("Failed to create auth directory"), + }); + } + const full = yield* readFullFile(filePath); + if (Object.keys(secrets).length === 0) { + delete full[scopeId]; + } else { + full[scopeId] = secrets; + } + yield* Effect.try({ + try: () => fs.writeFileSync(tmp, JSON.stringify(full, null, 2), { mode: 0o600 }), + catch: toStorageError("Failed to write temporary auth file"), + }); + yield* Effect.try({ + try: () => fs.renameSync(tmp, filePath), + catch: toStorageError("Failed to replace auth file"), + }); + }); }; // --------------------------------------------------------------------------- @@ -108,21 +130,16 @@ export interface FileSecretsPluginConfig { // Plugin extension — public API on executor.fileSecrets // --------------------------------------------------------------------------- -export interface FileSecretsExtension { - /** Path to the auth file */ - readonly filePath: string; -} +const makeFileSecretsExtension = (options: FileSecretsPluginConfig | undefined) => ({ + filePath: resolveFilePath(options), +}); + +export type FileSecretsExtension = ReturnType; // --------------------------------------------------------------------------- // Provider factory (internal) // --------------------------------------------------------------------------- -const toStorageError = (cause: unknown) => - new StorageError({ - message: cause instanceof Error ? cause.message : String(cause), - cause, - }); - // Scope arg is honored at every call: the auth.json is partitioned by // scope id, so read/write/delete route to `file[scope][secretId]`. The // provider is a singleton per executor; scope routing happens via the @@ -133,61 +150,36 @@ const toStorageError = (cause: unknown) => // the SecretProvider.list signature is scope-agnostic. That's fine for // the current use: `list` feeds `secrets.list()` which already walks // the stack at the caller layer. Innermost-first is the display default. -const makeScopedProvider = ( - filePath: string, - listScope: string, -): SecretProvider => ({ +const makeScopedProvider = (filePath: string, listScope: string): SecretProvider => ({ key: "file", writable: true, get: (secretId, scope) => - Effect.try({ - try: () => { - const data = readScopeSecrets(filePath, scope); - return data[secretId] ?? null; - }, - catch: toStorageError, - }), + readScopeSecrets(filePath, scope).pipe(Effect.map((data) => data[secretId] ?? null)), has: (secretId, scope) => - Effect.try({ - try: () => { - const data = readScopeSecrets(filePath, scope); - return secretId in data; - }, - catch: toStorageError, - }), + readScopeSecrets(filePath, scope).pipe(Effect.map((data) => secretId in data)), set: (secretId, value, scope) => - Effect.try({ - try: () => { - const data = readScopeSecrets(filePath, scope); - data[secretId] = value; - writeScopeSecrets(filePath, scope, data); - }, - catch: toStorageError, + Effect.gen(function* () { + const data = yield* readScopeSecrets(filePath, scope); + data[secretId] = value; + yield* writeScopeSecrets(filePath, scope, data); }), delete: (secretId, scope) => - Effect.try({ - try: () => { - const data = readScopeSecrets(filePath, scope); - const had = secretId in data; - delete data[secretId]; - if (had) writeScopeSecrets(filePath, scope, data); - return had; - }, - catch: toStorageError, + Effect.gen(function* () { + const data = yield* readScopeSecrets(filePath, scope); + const had = secretId in data; + delete data[secretId]; + if (had) yield* writeScopeSecrets(filePath, scope, data); + return had; }), list: () => - Effect.try({ - try: () => { - const data = readScopeSecrets(filePath, listScope); - return Object.keys(data).map((k) => ({ id: k, name: k })); - }, - catch: toStorageError, - }), + readScopeSecrets(filePath, listScope).pipe( + Effect.map((data) => Object.keys(data).map((k) => ({ id: k, name: k }))), + ), }); // --------------------------------------------------------------------------- @@ -201,19 +193,15 @@ const makeScopedProvider = ( const resolveFilePath = (config: FileSecretsPluginConfig | undefined): string => authFilePath(config?.directory); -export const fileSecretsPlugin = definePlugin( - (options?: FileSecretsPluginConfig) => ({ - id: "fileSecrets" as const, - storage: () => ({}), +export const fileSecretsPlugin = definePlugin((options?: FileSecretsPluginConfig) => ({ + id: "fileSecrets" as const, + storage: () => ({}), - extension: (_ctx): FileSecretsExtension => ({ - filePath: resolveFilePath(options), - }), + extension: () => makeFileSecretsExtension(options), - secretProviders: (ctx: PluginCtx) => [ - // list() falls back to the innermost scope for display; per-call - // get/set/delete honor the scope arg threaded from the secrets facade. - makeScopedProvider(resolveFilePath(options), ctx.scopes[0]!.id as string), - ], - }), -); + secretProviders: (ctx: PluginCtx) => [ + // list() falls back to the innermost scope for display; per-call + // get/set/delete honor the scope arg threaded from the secrets facade. + makeScopedProvider(resolveFilePath(options), ctx.scopes[0]!.id), + ], +})); From 20c8ac66dde1cdf0a938a9233017d5756fe660eb Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:05:03 -0700 Subject: [PATCH 090/108] Keep keychain boundaries typed --- packages/plugins/keychain/src/index.test.ts | 8 ++++---- packages/plugins/keychain/src/provider.ts | 8 ++++++-- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/plugins/keychain/src/index.test.ts b/packages/plugins/keychain/src/index.test.ts index ebd05a798..5a18ffe3b 100644 --- a/packages/plugins/keychain/src/index.test.ts +++ b/packages/plugins/keychain/src/index.test.ts @@ -42,7 +42,7 @@ describe("keychain plugin", () => { return; } - try { + yield* Effect.gen(function* () { // Store through SDK, pinned to keychain provider yield* executor.secrets.set( new SetSecretInput({ @@ -61,9 +61,9 @@ describe("keychain plugin", () => { // SDK routes through the core secret table → pinned provider const resolved = yield* executor.secrets.get(testId); expect(resolved).toBe("keychain-test-value"); - } finally { - yield* executor.secrets.remove(testId).pipe(Effect.orElseSucceed(() => undefined)); - } + }).pipe( + Effect.ensuring(executor.secrets.remove(testId).pipe(Effect.orElseSucceed(() => undefined))), + ); }), ); diff --git a/packages/plugins/keychain/src/provider.ts b/packages/plugins/keychain/src/provider.ts index afaca888b..732b57e82 100644 --- a/packages/plugins/keychain/src/provider.ts +++ b/packages/plugins/keychain/src/provider.ts @@ -2,6 +2,7 @@ import { Effect } from "effect"; import { StorageError, type SecretProvider } from "@executor-js/sdk/core"; +import type { KeychainError } from "./errors"; import { getPassword, setPassword, deletePassword } from "./keyring"; // --------------------------------------------------------------------------- @@ -18,8 +19,11 @@ import { getPassword, setPassword, deletePassword } from "./keyring"; // impossible to debug why secrets weren't resolving. // --------------------------------------------------------------------------- -const toStorageError = (cause: { readonly message: string; readonly cause?: unknown }) => - new StorageError({ message: cause.message, cause: cause.cause ?? cause }); +const toStorageError = (cause: KeychainError) => { + const { cause: underlyingCause } = cause; + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: typed KeychainError message becomes StorageError message + return new StorageError({ message: cause.message, cause: underlyingCause ?? cause }); +}; // Scope arg is ignored — keychain partitions by `serviceName`, which the // host fixes per executor at construction time. A future refactor could From b953a1f48e2da55f671f416f5fc165a289b261bc Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:05:15 -0700 Subject: [PATCH 091/108] Clean core API typed boundaries --- packages/core/api/src/handlers/tools.ts | 2 +- packages/core/api/src/oauth-popup.test.ts | 21 +++++++++++++-------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/packages/core/api/src/handlers/tools.ts b/packages/core/api/src/handlers/tools.ts index 0d3ad5d47..64846b9d8 100644 --- a/packages/core/api/src/handlers/tools.ts +++ b/packages/core/api/src/handlers/tools.ts @@ -35,7 +35,7 @@ export const ToolsHandlers = HttpApiBuilder.group(ExecutorApi, "tools", (handler const executor = yield* ExecutorService; const schema = yield* executor.tools.schema(path.toolId); if (schema === null) { - return yield* Effect.fail(new ToolNotFoundError({ toolId: path.toolId })); + return yield* new ToolNotFoundError({ toolId: path.toolId }); } return schema; })), diff --git a/packages/core/api/src/oauth-popup.test.ts b/packages/core/api/src/oauth-popup.test.ts index 53542dbf8..b9f43fa99 100644 --- a/packages/core/api/src/oauth-popup.test.ts +++ b/packages/core/api/src/oauth-popup.test.ts @@ -6,7 +6,7 @@ // --------------------------------------------------------------------------- import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Data, Effect, Schema } from "effect"; import { OAUTH_POPUP_MESSAGE_TYPE, @@ -169,16 +169,21 @@ describe("runOAuthCallback", () => { }); it("renders a failure popup when completeOAuth fails and uses toErrorMessage", async () => { - class DomainError { - readonly _tag = "DomainError"; - constructor(readonly message: string) {} - } + class DomainError extends Data.TaggedError("DomainError")<{ + readonly message: string; + }> {} + const isDomainError = Schema.is(Schema.Struct({ + _tag: Schema.Literal("DomainError"), + message: Schema.String, + })); const html = await Effect.runPromise( runOAuthCallback({ - complete: () => Effect.fail(new DomainError("Code expired")), + complete: () => Effect.fail(new DomainError({ message: "Code expired" })), urlParams: { state: "s1" }, - toErrorMessage: (error) => - error instanceof DomainError ? error.message : "unknown", + toErrorMessage: (error) => { + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: schema guard narrows the unknown popup callback error to the public test message + return isDomainError(error) ? error.message : "unknown"; + }, channelName: "c", }), ); From c403f48aa2ce342927f1b6521a01f0634e46768e Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:05:27 -0700 Subject: [PATCH 092/108] Use Effect UI mutation boundaries --- packages/react/src/api/oauth-popup.ts | 2 + packages/react/src/api/optimistic.tsx | 4 +- packages/react/src/components/form.tsx | 1 + packages/react/src/pages/policies.tsx | 18 ++++--- packages/react/src/pages/source-detail.tsx | 32 ++++++----- packages/react/src/pages/sources.tsx | 62 +++++++++++----------- 6 files changed, 62 insertions(+), 57 deletions(-) diff --git a/packages/react/src/api/oauth-popup.ts b/packages/react/src/api/oauth-popup.ts index b7e7dca66..a50fcb68a 100644 --- a/packages/react/src/api/oauth-popup.ts +++ b/packages/react/src/api/oauth-popup.ts @@ -85,6 +85,7 @@ export const openOAuthPopup = (input: OpenOAuthPopupInput): (() => /** Close the popup window if it's still open. Swallows cross-origin errors. */ const closePopup = (popup: Window | null) => { if (!popup) return; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: cross-origin popup state can throw and cleanup is best-effort try { if (!popup.closed) popup.close(); } catch { @@ -129,6 +130,7 @@ export const openOAuthPopup = (input: OpenOAuthPopupInput): (() => const pollMs = input.closedPollMs ?? 500; pollHandle = setInterval(() => { let isClosed = false; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: browser popup.closed can throw while navigating cross-origin try { isClosed = popup.closed; } catch { diff --git a/packages/react/src/api/optimistic.tsx b/packages/react/src/api/optimistic.tsx index 3af368adf..fa1ccddfb 100644 --- a/packages/react/src/api/optimistic.tsx +++ b/packages/react/src/api/optimistic.tsx @@ -176,7 +176,7 @@ export const useConnectionsWithPendingRemovals = (scopeId: ScopeId) => { if (!AsyncResult.isSuccess(result) || pending.length === 0) return; const serverIds = new Set( - result.value.map((connection: { readonly id: string }) => connection.id as string), + result.value.map((connection: { readonly id: string }) => connection.id), ); for (const entry of pending) { if (!serverIds.has(entry.id)) remove(entry.id); @@ -189,7 +189,7 @@ export const useConnectionsWithPendingRemovals = (scopeId: ScopeId) => { if (pending.length === 0) return connections; const hiddenIds = new Set(pending.map((entry) => entry.id)); return connections.filter( - (connection: { readonly id: string }) => !hiddenIds.has(connection.id as string), + (connection: { readonly id: string }) => !hiddenIds.has(connection.id), ); }), [result, pending], diff --git a/packages/react/src/components/form.tsx b/packages/react/src/components/form.tsx index 0830633aa..059dc187f 100644 --- a/packages/react/src/components/form.tsx +++ b/packages/react/src/components/form.tsx @@ -120,6 +120,7 @@ function FormDescription({ className, ...props }: React.ComponentProps<"p">) { function FormMessage({ className, ...props }: React.ComponentProps<"p">) { const { error, formMessageId } = useFormField(); + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: react-hook-form field errors carry public validation text const body = error ? String(error?.message ?? "") : props.children; if (!body) { diff --git a/packages/react/src/pages/policies.tsx b/packages/react/src/pages/policies.tsx index ca7bfe694..8b712f6d7 100644 --- a/packages/react/src/pages/policies.tsx +++ b/packages/react/src/pages/policies.tsx @@ -1,6 +1,7 @@ import { useState } from "react"; import { useAtomSet, useAtomValue } from "@effect/atom-react"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; +import * as Exit from "effect/Exit"; import { generateKeyBetween } from "fractional-indexing"; import { ChevronDownIcon } from "lucide-react"; import { PolicyId, type ToolPolicyAction } from "@executor-js/sdk"; @@ -253,7 +254,7 @@ export function PoliciesPage() { const scopeId = useScope(); const policies = useAtomValue(policiesOptimisticAtom(scopeId)); const doCreate = useAtomSet(createPolicyOptimistic(scopeId), { - mode: "promise", + mode: "promiseExit", }); const doUpdate = useAtomSet(updatePolicyOptimistic(scopeId), { mode: "promise", @@ -265,15 +266,16 @@ export function PoliciesPage() { const handleCreate = async (input: { pattern: string; action: ToolPolicyAction }) => { setBusy(true); - try { - await doCreate({ - params: { scopeId }, - payload: { pattern: input.pattern, action: input.action }, - reactivityKeys: policyWriteKeys, - }); - } finally { + const exit = await doCreate({ + params: { scopeId }, + payload: { pattern: input.pattern, action: input.action }, + reactivityKeys: policyWriteKeys, + }); + if (Exit.isFailure(exit)) { setBusy(false); + return; } + setBusy(false); }; const handleUpdate = async (id: string, action: ToolPolicyAction) => { diff --git a/packages/react/src/pages/source-detail.tsx b/packages/react/src/pages/source-detail.tsx index 7436b42cf..049d79037 100644 --- a/packages/react/src/pages/source-detail.tsx +++ b/packages/react/src/pages/source-detail.tsx @@ -2,6 +2,7 @@ import { Suspense, useEffect, useMemo, useState } from "react"; import { useNavigate } from "@tanstack/react-router"; import { useAtomValue, useAtomSet, useAtomRefresh } from "@effect/atom-react"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; +import * as Exit from "effect/Exit"; import { effectivePolicyFromSorted } from "@executor-js/sdk"; import { policiesOptimisticAtom, @@ -31,8 +32,8 @@ export function SourceDetailPage(props: { namespace: string }) { const policies = useAtomValue(policiesOptimisticAtom(scopeId)); const refreshSources = useAtomRefresh(sourcesAtom(scopeId)); const refreshTools = useAtomRefresh(sourceToolsAtom(namespace, scopeId)); - const doRemove = useAtomSet(removeSource, { mode: "promise" }); - const doRefresh = useAtomSet(refreshSource, { mode: "promise" }); + const doRemove = useAtomSet(removeSource, { mode: "promiseExit" }); + const doRefresh = useAtomSet(refreshSource, { mode: "promiseExit" }); const policyActions = usePolicyActions(scopeId); const navigate = useNavigate(); @@ -117,28 +118,25 @@ export function SourceDetailPage(props: { namespace: string }) { const handleDelete = async () => { setDeleting(true); - try { - await doRemove({ - params: { scopeId, sourceId: namespace }, - reactivityKeys: sourceWriteKeys, - }); - void navigate({ to: "/" }); - } catch { + const exit = await doRemove({ + params: { scopeId, sourceId: namespace }, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { setDeleting(false); setConfirmDelete(false); + return; } + void navigate({ to: "/" }); }; const handleRefresh = async () => { setRefreshing(true); - try { - await doRefresh({ - params: { scopeId, sourceId: namespace }, - reactivityKeys: sourceWriteKeys, - }); - } finally { - setRefreshing(false); - } + await doRefresh({ + params: { scopeId, sourceId: namespace }, + reactivityKeys: sourceWriteKeys, + }); + setRefreshing(false); }; const handleEditSave = () => { diff --git a/packages/react/src/pages/sources.tsx b/packages/react/src/pages/sources.tsx index a69924588..c4e26fab6 100644 --- a/packages/react/src/pages/sources.tsx +++ b/packages/react/src/pages/sources.tsx @@ -2,6 +2,7 @@ import { Suspense, useCallback, useMemo, useState } from "react"; import { Link, useNavigate } from "@tanstack/react-router"; import { useAtomSet } from "@effect/atom-react"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; +import * as Exit from "effect/Exit"; import { PlusIcon } from "lucide-react"; import type { SourceDetectionResult } from "@executor-js/sdk"; import { @@ -138,7 +139,7 @@ const looksLikeUrl = (raw: string): boolean => { function ConnectDialog(props: { open: boolean; onOpenChange: (open: boolean) => void }) { const sourcePlugins = useSourcePlugins(); const scopeId = useScope(); - const doDetect = useAtomSet(detectSource, { mode: "promise" }); + const doDetect = useAtomSet(detectSource, { mode: "promiseExit" }); const navigate = useNavigate(); const [query, setQuery] = useState(""); @@ -160,37 +161,38 @@ function ConnectDialog(props: { open: boolean; onOpenChange: (open: boolean) => if (!trimmed) return; setDetecting(true); setError(null); - try { - const results = await doDetect({ - params: { scopeId }, - payload: { url: trimmed }, - }); - if (results.length === 0) { - setError("Could not detect a source type from this URL. Try adding manually."); - setDetecting(false); - return; - } - const detected = bestDetection(results); - if (!detected) { - setError("Could not detect a source type from this URL. Try adding manually."); - setDetecting(false); - return; - } - const pluginKey = KIND_TO_PLUGIN_KEY[detected.kind]; - if (pluginKey) { - closeAndReset(); - void navigate({ - to: "/sources/add/$pluginKey", - params: { pluginKey }, - search: { url: trimmed, namespace: detected.namespace }, - }); - } else { - setError(`Detected source type "${detected.kind}" but no plugin is available for it.`); - setDetecting(false); - } - } catch { + const exit = await doDetect({ + params: { scopeId }, + payload: { url: trimmed }, + }); + if (Exit.isFailure(exit)) { setError("Detection failed. Try adding a source manually."); setDetecting(false); + return; + } + const results = exit.value; + if (results.length === 0) { + setError("Could not detect a source type from this URL. Try adding manually."); + setDetecting(false); + return; + } + const detected = bestDetection(results); + if (!detected) { + setError("Could not detect a source type from this URL. Try adding manually."); + setDetecting(false); + return; + } + const pluginKey = KIND_TO_PLUGIN_KEY[detected.kind]; + if (pluginKey) { + closeAndReset(); + void navigate({ + to: "/sources/add/$pluginKey", + params: { pluginKey }, + search: { url: trimmed, namespace: detected.namespace }, + }); + } else { + setError(`Detected source type "${detected.kind}" but no plugin is available for it.`); + setDetecting(false); } }, [query, doDetect, navigate, scopeId, closeAndReset]); From 025cdf68efdd5096d110daed2020237de60c8afd Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:10:55 -0700 Subject: [PATCH 093/108] Keep kernel core failures typed --- packages/kernel/core/src/code-recovery.ts | 1 + packages/kernel/core/src/effect-errors.ts | 4 +--- packages/kernel/core/src/validation.ts | 24 +++++++++++++---------- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/packages/kernel/core/src/code-recovery.ts b/packages/kernel/core/src/code-recovery.ts index 58a7f4fe2..fb59f0fc1 100644 --- a/packages/kernel/core/src/code-recovery.ts +++ b/packages/kernel/core/src/code-recovery.ts @@ -139,6 +139,7 @@ export const recoverExecutionBody = (code: string): string => { const source = extractCandidateSource(code); if (!source) return ""; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: Babel parser throws for malformed candidate code, then recovery falls back to heuristics try { return renderParsedBody(source); } catch { diff --git a/packages/kernel/core/src/effect-errors.ts b/packages/kernel/core/src/effect-errors.ts index 26b5d7f8d..8e22d2406 100644 --- a/packages/kernel/core/src/effect-errors.ts +++ b/packages/kernel/core/src/effect-errors.ts @@ -3,11 +3,9 @@ import * as Data from "effect/Data"; export class KernelCoreEffectError extends Data.TaggedError("KernelCoreEffectError")<{ readonly module: string; readonly message: string; + readonly cause?: unknown; }> {} -export const kernelCoreEffectError = (module: string, message: string) => - new KernelCoreEffectError({ module, message }); - /** * Default failure type for any `CodeExecutor.execute` implementation — * surfaces sandbox-level defects (isolate crash, module load failure, diff --git a/packages/kernel/core/src/validation.ts b/packages/kernel/core/src/validation.ts index 4bfd41ca8..81ae3be1c 100644 --- a/packages/kernel/core/src/validation.ts +++ b/packages/kernel/core/src/validation.ts @@ -1,7 +1,7 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"; import * as Effect from "effect/Effect"; -import { kernelCoreEffectError } from "./effect-errors"; +import { KernelCoreEffectError } from "./effect-errors"; const getSchemaValidator = ( schema: unknown, @@ -55,25 +55,29 @@ export const validateInput = (input: { const validate = getSchemaValidator(input.schema); if (!validate) { return Effect.fail( - kernelCoreEffectError( - "validation", - `Tool ${input.path} has no Standard Schema validator on inputSchema`, - ), + new KernelCoreEffectError({ + module: "validation", + message: `Tool ${input.path} has no Standard Schema validator on inputSchema`, + }), ); } return Effect.tryPromise({ try: () => Promise.resolve(validate(input.value)), catch: (cause) => - kernelCoreEffectError("validation", `Validation error for ${input.path}: ${String(cause)}`), + new KernelCoreEffectError({ + module: "validation", + message: `Validation error for ${input.path}`, + cause, + }), }).pipe( Effect.flatMap((result) => { if ("issues" in result && result.issues) { return Effect.fail( - kernelCoreEffectError( - "validation", - `Input validation failed for ${input.path}: ${formatIssues(result.issues)}`, - ), + new KernelCoreEffectError({ + module: "validation", + message: `Input validation failed for ${input.path}: ${formatIssues(result.issues)}`, + }), ); } return Effect.succeed(result.value); From e9fd11aa033c54646dd76d7b4c6b1712bb384d63 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:11:07 -0700 Subject: [PATCH 094/108] Use schema in OpenAPI preview boundaries --- packages/plugins/openapi/src/sdk/definitions.ts | 2 +- .../plugins/openapi/src/sdk/non-json-body.test.ts | 13 ++++++++++--- packages/plugins/openapi/src/sdk/preview.ts | 15 ++++++++++----- 3 files changed, 21 insertions(+), 9 deletions(-) diff --git a/packages/plugins/openapi/src/sdk/definitions.ts b/packages/plugins/openapi/src/sdk/definitions.ts index 68a11ec2e..7f9a54cca 100644 --- a/packages/plugins/openapi/src/sdk/definitions.ts +++ b/packages/plugins/openapi/src/sdk/definitions.ts @@ -219,7 +219,7 @@ export const compileToolDefinitions = ( operations: readonly ExtractedOperation[], ): ToolDefinition[] => { const raw = operations.map((op, index) => { - const operationId = op.operationId as string; + const operationId = op.operationId; const group = normalizeGroupSegment(op.tags[0]) ?? derivePathGroup(op.pathTemplate); const leaf = deriveLeaf(operationId, op.method, op.pathTemplate, group); const versionSegment = deriveVersionSegment(op.pathTemplate); diff --git a/packages/plugins/openapi/src/sdk/non-json-body.test.ts b/packages/plugins/openapi/src/sdk/non-json-body.test.ts index f42067d23..7c0b70cd7 100644 --- a/packages/plugins/openapi/src/sdk/non-json-body.test.ts +++ b/packages/plugins/openapi/src/sdk/non-json-body.test.ts @@ -11,7 +11,7 @@ // --------------------------------------------------------------------------- import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Schema } from "effect"; import { FetchHttpClient } from "effect/unstable/http"; import { createServer } from "node:http"; import type { AddressInfo } from "node:net"; @@ -28,6 +28,11 @@ import { openApiPlugin } from "./plugin"; const autoApprove: InvokeOptions = { onElicitation: "accept-all" }; const TEST_SCOPE = "test-scope"; +const JsonNameBody = Schema.fromJsonString( + Schema.Struct({ + name: Schema.String, + }), +); const memoryProvider: SecretProvider = (() => { const store = new Map(); @@ -223,7 +228,7 @@ describe("OpenAPI non-JSON request body dispatch", () => { expect(captured.contentType).toBe("text/xml"); const body = captured.body.toString("utf8"); expect(body).not.toBe("[object Object]"); - expect(JSON.parse(body)).toEqual({ name: "Acme" }); + expect(Schema.decodeUnknownSync(JsonNameBody)(body)).toEqual({ name: "Acme" }); }), ); @@ -383,7 +388,9 @@ describe("OpenAPI non-JSON request body dispatch", () => { ); expect(captured.contentType).toBe("application/json"); - expect(JSON.parse(captured.body.toString("utf8"))).toEqual({ name: "Acme" }); + expect(Schema.decodeUnknownSync(JsonNameBody)(captured.body.toString("utf8"))).toEqual({ + name: "Acme", + }); }), ); diff --git a/packages/plugins/openapi/src/sdk/preview.ts b/packages/plugins/openapi/src/sdk/preview.ts index bdbfd1030..8d69b13b7 100644 --- a/packages/plugins/openapi/src/sdk/preview.ts +++ b/packages/plugins/openapi/src/sdk/preview.ts @@ -12,6 +12,10 @@ import { HttpMethod, ServerInfo, type ExtractionResult } from "./types"; /** Scopes declared by a flow: `{ scopeName: description }` */ const OAuth2Scopes = Schema.Record(Schema.String, Schema.String); +const SecuritySchemeType = Schema.Literals(["http", "apiKey", "oauth2", "openIdConnect"]); +type SecuritySchemeType = typeof SecuritySchemeType.Type; + +const decodeSecuritySchemeType = Schema.decodeUnknownOption(SecuritySchemeType); export class OAuth2AuthorizationCodeFlow extends Schema.Class( "OAuth2AuthorizationCodeFlow", @@ -43,7 +47,7 @@ export class SecurityScheme extends Schema.Class("SecurityScheme /** Key name in components.securitySchemes (e.g. "api_token") */ name: Schema.String, /** OpenAPI security scheme type */ - type: Schema.Literals(["http", "apiKey", "oauth2", "openIdConnect"]), + type: SecuritySchemeType, /** For type: "http" — e.g. "bearer", "basic" */ scheme: Schema.OptionFromOptional(Schema.String), /** For type: "http" with scheme "bearer" — e.g. "JWT" */ @@ -215,19 +219,20 @@ const extractSecuritySchemes = ( if (!resolved || typeof resolved !== "object") return []; const scheme = resolved; - const type = scheme.type as string; - if (!["http", "apiKey", "oauth2", "openIdConnect"].includes(type)) return []; + const type = decodeSecuritySchemeType(scheme.type); + if (Option.isNone(type)) return []; + const schemeType = type.value; return [ new SecurityScheme({ name, - type: type as "http" | "apiKey" | "oauth2" | "openIdConnect", + type: schemeType, scheme: Option.fromNullishOr(scheme.scheme as string | undefined), bearerFormat: Option.fromNullishOr(scheme.bearerFormat as string | undefined), in: Option.fromNullishOr(scheme.in as "header" | "query" | "cookie" | undefined), headerName: Option.fromNullishOr(scheme.name as string | undefined), description: Option.fromNullishOr(scheme.description as string | undefined), - flows: type === "oauth2" ? extractFlows(scheme.flows) : Option.none(), + flows: schemeType === "oauth2" ? extractFlows(scheme.flows) : Option.none(), openIdConnectUrl: Option.fromNullishOr( scheme.openIdConnectUrl as string | undefined, ), From fcd022c3f94ab48d318014b57169c71c3f34961d Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:11:17 -0700 Subject: [PATCH 095/108] Clean SDK typed test boundaries --- packages/core/sdk/src/blob.test.ts | 24 +++++++++----------- packages/core/sdk/src/error-handling.test.ts | 14 ++++++++---- packages/core/sdk/src/schema-types.test.ts | 15 ++++++++---- 3 files changed, 30 insertions(+), 23 deletions(-) diff --git a/packages/core/sdk/src/blob.test.ts b/packages/core/sdk/src/blob.test.ts index fd22a2f1a..2f62905a3 100644 --- a/packages/core/sdk/src/blob.test.ts +++ b/packages/core/sdk/src/blob.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "@effect/vitest"; -import { Cause, Effect, Exit } from "effect"; +import { Effect } from "effect"; import { StorageError } from "@executor-js/storage-core"; @@ -75,15 +75,13 @@ describe("pluginBlobStore", () => { Effect.gen(function* () { const store = makeInMemoryBlobStore(); const plugin = pluginBlobStore(store, ["inner", "outer"], "my-plugin"); - const result = yield* Effect.exit( - plugin.put("k", "v", { scope: "not-in-stack" }), - ); - expect(Exit.isFailure(result)).toBe(true); - if (!Exit.isFailure(result)) return; - const reason = result.cause.reasons.find(Cause.isFailReason); - const err = reason?.error ?? null; + const err = yield* plugin + .put("k", "v", { scope: "not-in-stack" }) + .pipe(Effect.flip); expect(err).toBeInstanceOf(StorageError); - expect((err as StorageError).message).toContain("not in the"); + expect(err).toMatchObject({ + message: expect.stringContaining("not in the"), + }); // Write must not have reached the store. expect(yield* store.get("not-in-stack/my-plugin", "k")).toBeNull(); }), @@ -93,10 +91,10 @@ describe("pluginBlobStore", () => { Effect.gen(function* () { const store = makeInMemoryBlobStore(); const plugin = pluginBlobStore(store, ["inner"], "my-plugin"); - const result = yield* Effect.exit( - plugin.delete("k", { scope: "not-in-stack" }), - ); - expect(Exit.isFailure(result)).toBe(true); + const err = yield* plugin + .delete("k", { scope: "not-in-stack" }) + .pipe(Effect.flip); + expect(err).toBeInstanceOf(StorageError); }), ); }); diff --git a/packages/core/sdk/src/error-handling.test.ts b/packages/core/sdk/src/error-handling.test.ts index 53bc1ea5c..2a633bf33 100644 --- a/packages/core/sdk/src/error-handling.test.ts +++ b/packages/core/sdk/src/error-handling.test.ts @@ -22,7 +22,7 @@ // --------------------------------------------------------------------------- import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Predicate } from "effect"; import { StorageError, @@ -83,9 +83,13 @@ const baseConfig = (adapter: DBAdapter) => ({ describe("typed-error edge model — SDK", () => { it.effect("StorageError propagates raw through the executor surface", () => Effect.gen(function* () { + const driverCause = new StorageError({ + message: "driver kaboom", + cause: undefined, + }); const failure = new StorageError({ message: "backend lost its mind", - cause: new Error("driver kaboom"), + cause: driverCause, }); const executor = yield* createExecutor( baseConfig(makeFailingAdapter(failure)), @@ -93,12 +97,12 @@ describe("typed-error edge model — SDK", () => { const result = yield* executor.tools.list().pipe(Effect.flip); expect(result).toBeInstanceOf(StorageError); - expect(result._tag).toBe("StorageError"); + expect(Predicate.isTagged(result, "StorageError")).toBe(true); // Original cause preserved end-to-end. const storageErr = result as StorageError; expect(storageErr.message).toBe("backend lost its mind"); - expect(storageErr.cause).toBeInstanceOf(Error); - expect((storageErr.cause as Error).message).toBe("driver kaboom"); + expect(storageErr.cause).toBe(driverCause); + expect(driverCause.message).toBe("driver kaboom"); }), ); diff --git a/packages/core/sdk/src/schema-types.test.ts b/packages/core/sdk/src/schema-types.test.ts index 29f21fd71..9ff8aeace 100644 --- a/packages/core/sdk/src/schema-types.test.ts +++ b/packages/core/sdk/src/schema-types.test.ts @@ -1,5 +1,6 @@ import { readFileSync } from "node:fs"; import { describe, expect, it } from "@effect/vitest"; +import { Schema } from "effect"; import { buildToolTypeScriptPreview, @@ -7,15 +8,19 @@ import { schemaToTypeScriptPreviewWithDefs, } from "./schema-types"; -const stripeBalanceTransactionsFixture = JSON.parse( +const StripeBalanceTransactionsFixture = Schema.Struct({ + schema: Schema.Unknown, + defs: Schema.Record(Schema.String, Schema.Unknown), +}); + +const stripeBalanceTransactionsFixture = Schema.decodeUnknownSync( + Schema.fromJsonString(StripeBalanceTransactionsFixture), +)( readFileSync( new URL("./__fixtures__/stripe-get-balance-transactions-id.json", import.meta.url), "utf8", ), -) as { - schema: unknown; - defs: Record; -}; +); describe("schema-types", () => { it("reuses referenced definitions instead of inlining them", () => { From e393aeda01e035f605f44a0556378419fc0cc883 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:11:33 -0700 Subject: [PATCH 096/108] Mark local app runtime boundaries --- apps/local/src/routes/plugins.$pluginId.$.tsx | 2 ++ apps/local/src/serve.ts | 1 + apps/local/src/server/observability.ts | 1 + 3 files changed, 4 insertions(+) diff --git a/apps/local/src/routes/plugins.$pluginId.$.tsx b/apps/local/src/routes/plugins.$pluginId.$.tsx index 0682db09c..472ab9768 100644 --- a/apps/local/src/routes/plugins.$pluginId.$.tsx +++ b/apps/local/src/routes/plugins.$pluginId.$.tsx @@ -30,10 +30,12 @@ function PluginRouteComponent() { const { pluginId, _splat: rest } = Route.useParams(); const plugins = useClientPlugins(); const plugin = plugins.find((p) => p.id === pluginId); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: TanStack Router represents not-found from components by throwing notFound() if (!plugin) throw notFound(); const target = normalizePath(rest ?? "/"); const page = plugin.pages?.find((p) => normalizePath(p.path) === target); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: TanStack Router represents not-found from components by throwing notFound() if (!page) throw notFound(); const Component = page.component; diff --git a/apps/local/src/serve.ts b/apps/local/src/serve.ts index da8405ba3..3aa1bff32 100644 --- a/apps/local/src/serve.ts +++ b/apps/local/src/serve.ts @@ -37,6 +37,7 @@ const hasFileExtension = (pathname: string): boolean => { function collectStaticRoutes(dir: string, prefix = ""): Record { const routes: Record = {}; + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: filesystem route discovery is best-effort for optional built assets try { for (const entry of readdirSync(dir, { withFileTypes: true })) { const fullPath = join(dir, entry.name); diff --git a/apps/local/src/server/observability.ts b/apps/local/src/server/observability.ts index 7022531ea..9d10c8ac6 100644 --- a/apps/local/src/server/observability.ts +++ b/apps/local/src/server/observability.ts @@ -23,6 +23,7 @@ export const ErrorCaptureLive: Layer.Layer = Layer.succeed( const squashed = Cause.squash(cause); console.error( `[executor ${traceId}]`, + // oxlint-disable-next-line executor/no-instanceof-error -- boundary: console logger preserves native Error stack output squashed instanceof Error ? squashed.stack ?? squashed : squashed, ); console.error(`[executor ${traceId}] cause:`, Cause.pretty(cause)); From df6f3742a2de27a85abd2633f9282a4d0ccef543 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:11:55 -0700 Subject: [PATCH 097/108] Clean CLI and host test boundaries --- .../hosts/mcp/src/stdio-integration.test.ts | 49 ++++++++++--------- tests/presets-reachable.test.ts | 5 +- tests/tools-cli.test.ts | 1 + 3 files changed, 31 insertions(+), 24 deletions(-) diff --git a/packages/hosts/mcp/src/stdio-integration.test.ts b/packages/hosts/mcp/src/stdio-integration.test.ts index db3b461b5..6d0aea108 100644 --- a/packages/hosts/mcp/src/stdio-integration.test.ts +++ b/packages/hosts/mcp/src/stdio-integration.test.ts @@ -1,6 +1,7 @@ import { describe, expect, it } from "@effect/vitest"; import { Client } from "@modelcontextprotocol/sdk/client/index.js"; import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js"; +import { Effect } from "effect"; import { mkdtempSync } from "node:fs"; import { tmpdir } from "node:os"; import { join, resolve } from "node:path"; @@ -10,35 +11,39 @@ const cliEntry = resolve(repoRoot, "apps/cli/src/main.ts"); const testScope = resolve(repoRoot, "apps/local"); describe("MCP stdio integration", () => { - it("execute tool returns result over stdio transport", async () => { - // Fresh temp dir so the test doesn't migrate against the developer's - // real ~/.executor/data.db. - const dataDir = mkdtempSync(join(tmpdir(), "executor-mcp-test-")); - - const transport = new StdioClientTransport({ - command: "bun", - args: ["run", cliEntry, "mcp", "--scope", testScope], - env: { ...process.env, EXECUTOR_DATA_DIR: dataDir }, - }); + it.effect("execute tool returns result over stdio transport", () => + Effect.gen(function* () { + // Fresh temp dir so the test doesn't migrate against the developer's + // real ~/.executor/data.db. + const dataDir = mkdtempSync(join(tmpdir(), "executor-mcp-test-")); + + const transport = new StdioClientTransport({ + command: "bun", + args: ["run", cliEntry, "mcp", "--scope", testScope], + env: { ...process.env, EXECUTOR_DATA_DIR: dataDir }, + }); - const client = new Client({ name: "test-client", version: "1.0.0" }, { capabilities: {} }); + const client = new Client({ name: "test-client", version: "1.0.0" }, { capabilities: {} }); - await client.connect(transport); + yield* Effect.acquireRelease( + Effect.promise(() => client.connect(transport)), + () => Effect.promise(() => transport.close()), + ); - try { - const { tools } = await client.listTools(); + const { tools } = yield* Effect.promise(() => client.listTools()); expect(tools.map((t) => t.name)).toContain("execute"); - const result = await client.callTool({ - name: "execute", - arguments: { code: "return 2+2" }, - }); + const result = yield* Effect.promise(() => + client.callTool({ + name: "execute", + arguments: { code: "return 2+2" }, + }), + ); const text = (result.content as Array<{ type: string; text: string }>)[0]?.text; expect(text).toContain("4"); expect(result.isError).toBeFalsy(); - } finally { - await transport.close(); - } - }, 30_000); + }).pipe(Effect.scoped), + { timeout: 30_000 }, + ); }); diff --git a/tests/presets-reachable.test.ts b/tests/presets-reachable.test.ts index 120e7603c..86c051a7e 100644 --- a/tests/presets-reachable.test.ts +++ b/tests/presets-reachable.test.ts @@ -62,10 +62,11 @@ describe("graphql presets are reachable endpoints", () => { const result = yield* introspect(preset.url).pipe( Effect.provide(FetchHttpClient.layer), Effect.map((r) => ({ ok: true as const, schema: r })), - Effect.catch((err) => + Effect.catchTag("GraphqlIntrospectionError", (err) => Effect.succeed({ ok: false as const, - message: String(err), + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: catchTag narrows to GraphqlIntrospectionError whose public contract includes message + message: err.message, }), ), ); diff --git a/tests/tools-cli.test.ts b/tests/tools-cli.test.ts index d3931beb5..66a5b6940 100644 --- a/tests/tools-cli.test.ts +++ b/tests/tools-cli.test.ts @@ -34,6 +34,7 @@ describe("CLI tooling helpers", () => { it.effect("rejects non-object JSON input", () => Effect.gen(function* () { const error = yield* parseJsonObjectInput('[1,2,3]').pipe(Effect.flip); + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: helper contract returns a native Error for CLI input parsing expect(error.message).toContain("must decode to a JSON object"); }), ); From 09e9ef6bb30d62d959731a98ac62a85f767b3bca Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:12:13 -0700 Subject: [PATCH 098/108] Clean small package lint boundaries --- packages/core/cli/src/generators/index.ts | 1 + packages/core/storage-file/src/index.test.ts | 1 + packages/plugins/example/src/server.ts | 28 +++++++++---------- .../google-discovery/src/sdk/document.ts | 12 +++----- 4 files changed, 19 insertions(+), 23 deletions(-) diff --git a/packages/core/cli/src/generators/index.ts b/packages/core/cli/src/generators/index.ts index 600cd7149..4aa02d605 100644 --- a/packages/core/cli/src/generators/index.ts +++ b/packages/core/cli/src/generators/index.ts @@ -13,6 +13,7 @@ export const generateSchema = ( ) => { const generator = generators[adapter]; if (!generator) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: synchronous CLI generator registry rejects unsupported adapter names throw new Error( `Generator "${adapter}" is not supported. Available: ${Object.keys(generators).join(", ")}`, ); diff --git a/packages/core/storage-file/src/index.test.ts b/packages/core/storage-file/src/index.test.ts index 1f7069310..391807cce 100644 --- a/packages/core/storage-file/src/index.test.ts +++ b/packages/core/storage-file/src/index.test.ts @@ -81,6 +81,7 @@ const bootstrapTables = ( ): void => { for (const table of Object.values(tables)) { // Skip relations — they aren't tables + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: drizzle getTableConfig throws for relation helpers in this test bootstrap try { const config = getTableConfig(table); const cols = config.columns.map((col) => { diff --git a/packages/plugins/example/src/server.ts b/packages/plugins/example/src/server.ts index 78b06608c..9c1970139 100644 --- a/packages/plugins/example/src/server.ts +++ b/packages/plugins/example/src/server.ts @@ -26,11 +26,18 @@ import { ExampleApi } from "./shared"; // FullApi, so this bundle never touches the host's wiring. const ExampleApiBundle = HttpApi.make("example").add(ExampleApi); -interface ExampleExtension { - readonly greet: ( - name: string, - ) => Effect.Effect<{ readonly message: string; readonly count: number }>; -} +const makeExampleExtension = (ctx: { readonly storage: { count: number } }) => ({ + greet: (name: string) => + Effect.sync(() => { + ctx.storage.count += 1; + return { + message: `hello ${name}`, + count: ctx.storage.count, + }; + }), +}); + +type ExampleExtension = ReturnType; export class ExampleExtensionService extends Context.Service< ExampleExtensionService, @@ -56,16 +63,7 @@ export const examplePlugin = definePlugin(() => ({ // Canonical implementation. CLI/tests/embedded callers and the HTTP // handler all hit this same code path. - extension: (ctx): ExampleExtension => ({ - greet: (name: string) => - Effect.sync(() => { - ctx.storage.count += 1; - return { - message: `hello ${name}`, - count: ctx.storage.count, - }; - }), - }), + extension: makeExampleExtension, routes: () => ExampleApi, handlers: () => ExampleHandlers, diff --git a/packages/plugins/google-discovery/src/sdk/document.ts b/packages/plugins/google-discovery/src/sdk/document.ts index c58d57f74..9250991e0 100644 --- a/packages/plugins/google-discovery/src/sdk/document.ts +++ b/packages/plugins/google-discovery/src/sdk/document.ts @@ -135,13 +135,6 @@ const DiscoveryDocumentModel = Schema.Struct({ }); type DiscoveryDocument = typeof DiscoveryDocumentModel.Type; -// The Schema.TaggedError version of GoogleDiscoveryParseError no longer -// carries a `cause` field — it would leak raw decoder internals over the -// wire. The decoder failure still shows up on the Effect cause chain for -// server-side logging; the client only sees the user-facing `message`. -const toParseError = (message: string, _cause: unknown) => - new GoogleDiscoveryParseError({ message }); - const decodeUnknownWith = ( message: string, @@ -150,7 +143,10 @@ const decodeUnknownWith = (value) => Effect.try({ try: () => decode(value), - catch: (error) => toParseError(message, error), + // The Schema.TaggedError version of GoogleDiscoveryParseError no + // longer carries a `cause` field because the client only sees the + // user-facing message. + catch: () => new GoogleDiscoveryParseError({ message }), }); const decodeDiscoveryDocument = decodeUnknownWith( From 14768967d6de333636e260489b598456cf09f891 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:18:46 -0700 Subject: [PATCH 099/108] Clean remaining cloud lint boundaries --- apps/cloud/src/mcp-session.e2e.node.test.ts | 17 +++++++++++++---- apps/cloud/src/observability.ts | 1 + apps/cloud/src/org/handlers.ts | 4 ++-- apps/cloud/src/routes/billing_.plans.tsx | 6 +++++- .../src/secrets-isolation.e2e.node.test.ts | 4 ++-- .../services/__test-harness__/api-harness.ts | 1 + .../cloud/src/services/secrets-api.node.test.ts | 4 ++-- .../src/services/sources-refresh.node.test.ts | 15 ++++++++------- apps/cloud/vitest.config.ts | 1 + 9 files changed, 35 insertions(+), 18 deletions(-) diff --git a/apps/cloud/src/mcp-session.e2e.node.test.ts b/apps/cloud/src/mcp-session.e2e.node.test.ts index b0fb06992..d90d89ba3 100644 --- a/apps/cloud/src/mcp-session.e2e.node.test.ts +++ b/apps/cloud/src/mcp-session.e2e.node.test.ts @@ -146,10 +146,19 @@ const openSession = ( return { client, clientTransport, serverTransport }; }), ({ clientTransport, serverTransport }) => - Effect.promise(async () => { - await clientTransport.close().catch(() => undefined); - await serverTransport.close().catch(() => undefined); - }), + Effect.all( + [ + Effect.tryPromise({ + try: () => clientTransport.close(), + catch: (cause) => cause, + }).pipe(Effect.ignore), + Effect.tryPromise({ + try: () => serverTransport.close(), + catch: (cause) => cause, + }).pipe(Effect.ignore), + ], + { discard: true }, + ), ).pipe(Effect.map(({ client }) => ({ client }))); const nextOrgId = (() => { diff --git a/apps/cloud/src/observability.ts b/apps/cloud/src/observability.ts index ae9f624ae..6a3bde420 100644 --- a/apps/cloud/src/observability.ts +++ b/apps/cloud/src/observability.ts @@ -42,6 +42,7 @@ export const sentryPayloadForCause = ( if (Cause.isCause(input)) { const pretty = Cause.pretty(input); const errors = Cause.prettyErrors(input); + // oxlint-disable-next-line executor/no-error-constructor -- boundary: Sentry captureException needs an Error-like primary payload for pretty Effect causes return { primary: errors[0] ?? new Error(pretty), pretty }; } return { primary: input, pretty: null }; diff --git a/apps/cloud/src/org/handlers.ts b/apps/cloud/src/org/handlers.ts index 6093fec02..8ec4d6014 100644 --- a/apps/cloud/src/org/handlers.ts +++ b/apps/cloud/src/org/handlers.ts @@ -1,5 +1,5 @@ import { HttpApiBuilder } from "effect/unstable/httpapi"; -import { Effect } from "effect"; +import { Cause, Effect } from "effect"; import { UserStoreService } from "../auth/context"; import { AuthContext } from "../auth/middleware"; @@ -103,7 +103,7 @@ const reserveMemberSlot = Effect.gen(function* () { Effect.catchCause((cause) => Effect.gen(function* () { yield* Effect.logError("members.seats lookup failed; failing closed").pipe( - Effect.annotateLogs({ "org.id": auth.organizationId, cause: String(cause) }), + Effect.annotateLogs({ "org.id": auth.organizationId, cause: Cause.pretty(cause) }), ); return yield* new Forbidden(); }), diff --git a/apps/cloud/src/routes/billing_.plans.tsx b/apps/cloud/src/routes/billing_.plans.tsx index ae95a4674..95e719786 100644 --- a/apps/cloud/src/routes/billing_.plans.tsx +++ b/apps/cloud/src/routes/billing_.plans.tsx @@ -346,6 +346,7 @@ function SlackContactCta() { } setSubmitting(true); setError(null); + // oxlint-disable-next-line executor/no-try-catch-or-throw -- boundary: browser fetch submit path maps network failures to public UI copy try { const res = await fetch("/api/contact/slack", { method: "POST", @@ -358,7 +359,10 @@ function SlackContactCta() { turnstileToken, }), }); - const data = (await res.json().catch(() => ({}))) as { url?: string; error?: string }; + const data = (await res.json().then( + (value) => value, + () => ({}), + )) as { url?: string; error?: string }; if (!res.ok) { setError(data.error ?? "Something went wrong. Please try again."); return; diff --git a/apps/cloud/src/secrets-isolation.e2e.node.test.ts b/apps/cloud/src/secrets-isolation.e2e.node.test.ts index 64773bf66..5973b522a 100644 --- a/apps/cloud/src/secrets-isolation.e2e.node.test.ts +++ b/apps/cloud/src/secrets-isolation.e2e.node.test.ts @@ -27,7 +27,7 @@ // SDK-level change — coverage for it belongs after the fix. import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Result } from "effect"; import { ScopeId, SecretId } from "@executor-js/sdk"; @@ -237,7 +237,7 @@ describe("cloud secret isolation (HTTP, user-org scope stack)", () => { }) .pipe(Effect.result), ); - expect(result._tag).toBe("Failure"); + expect(Result.isFailure(result)).toBe(true); // And nothing landed in the foreign org — a fresh session pointed // at that org must not see `wrong-scope`. diff --git a/apps/cloud/src/services/__test-harness__/api-harness.ts b/apps/cloud/src/services/__test-harness__/api-harness.ts index 4ae4d5ff5..6fc3587cc 100644 --- a/apps/cloud/src/services/__test-harness__/api-harness.ts +++ b/apps/cloud/src/services/__test-harness__/api-harness.ts @@ -135,6 +135,7 @@ const TestExecutionStackMiddleware = HttpRouter.middleware<{ const request = yield* HttpServerRequest.HttpServerRequest; const orgId = request.headers[TEST_ORG_HEADER]; if (!orgId || typeof orgId !== "string") { + // oxlint-disable-next-line executor/no-effect-escape-hatch, executor/no-error-constructor -- boundary: test HTTP harness has no request context without x-test-org-id return yield* Effect.die(new Error("missing x-test-org-id")); } const userHeader = request.headers[TEST_USER_HEADER]; diff --git a/apps/cloud/src/services/secrets-api.node.test.ts b/apps/cloud/src/services/secrets-api.node.test.ts index 97214f41b..c07150e75 100644 --- a/apps/cloud/src/services/secrets-api.node.test.ts +++ b/apps/cloud/src/services/secrets-api.node.test.ts @@ -2,7 +2,7 @@ // and error fidelity within a single org. import { describe, expect, it } from "@effect/vitest"; -import { Effect } from "effect"; +import { Effect, Result } from "effect"; import { ScopeId, SecretId } from "@executor-js/sdk"; @@ -128,7 +128,7 @@ describe("secrets api (HTTP)", () => { .remove({ params: { scopeId: ScopeId.make(org), secretId: SecretId.make(missing) } }) .pipe(Effect.result), ); - expect(result._tag).toBe("Success"); + expect(Result.isSuccess(result)).toBe(true); }), ); diff --git a/apps/cloud/src/services/sources-refresh.node.test.ts b/apps/cloud/src/services/sources-refresh.node.test.ts index c6ee73334..fd0a6cfd7 100644 --- a/apps/cloud/src/services/sources-refresh.node.test.ts +++ b/apps/cloud/src/services/sources-refresh.node.test.ts @@ -84,9 +84,12 @@ const serveMutableSpec = () => { describe("sources.refresh (HTTP)", () => { it.effect("addSpec from URL → canRefresh:true; refresh re-fetches and updates tools", () => - Effect.gen(function* () { - const server = yield* Effect.promise(() => serveMutableSpec()); - try { + Effect.scoped( + Effect.gen(function* () { + const server = yield* Effect.acquireRelease( + Effect.promise(() => serveMutableSpec()), + (server) => Effect.promise(() => server.close()), + ); const org = `org_${crypto.randomUUID()}`; const namespace = `ns_${crypto.randomUUID().replace(/-/g, "_")}`; @@ -139,10 +142,8 @@ describe("sources.refresh (HTTP)", () => { expect(afterTools.length).toBe(2); expect(afterTools.some((t) => t.name.startsWith("ping"))).toBe(true); expect(afterTools.some((t) => t.name.startsWith("pong"))).toBe(true); - } finally { - yield* Effect.promise(() => server.close()); - } - }), + }), + ), ); it.effect("addSpec from raw text → canRefresh:false; refresh is a no-op", () => diff --git a/apps/cloud/vitest.config.ts b/apps/cloud/vitest.config.ts index 460cd46b8..68be4ca21 100644 --- a/apps/cloud/vitest.config.ts +++ b/apps/cloud/vitest.config.ts @@ -17,6 +17,7 @@ export default defineConfig({ // the socket is closing anyway — so filter it out rather than fail // the run with noise. onUnhandledError(error) { + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: Vitest unhandled-error hook receives unknown host errors if (error && (error as Error).message === "Stream was cancelled.") { return false; } From 719c5ea0f9b0add60b3c5690424a1931034a2e1d Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:19:15 -0700 Subject: [PATCH 100/108] Clean execution runtime lint boundaries --- packages/core/execution/src/description.ts | 6 +++++- .../runtime-dynamic-worker/src/integration.test.ts | 11 +++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/packages/core/execution/src/description.ts b/packages/core/execution/src/description.ts index fcf4792e2..0cbf9155b 100644 --- a/packages/core/execution/src/description.ts +++ b/packages/core/execution/src/description.ts @@ -12,7 +12,11 @@ export const buildExecuteDescription = (executor: Executor): Effect.Effect formatDescription(sources)).pipe( Effect.withSpan("schema.compile.description", { diff --git a/packages/kernel/runtime-dynamic-worker/src/integration.test.ts b/packages/kernel/runtime-dynamic-worker/src/integration.test.ts index 22aba9b0e..6b5d59a6d 100644 --- a/packages/kernel/runtime-dynamic-worker/src/integration.test.ts +++ b/packages/kernel/runtime-dynamic-worker/src/integration.test.ts @@ -18,6 +18,7 @@ import { describe, expect, it } from "@effect/vitest"; import { env } from "cloudflare:workers"; import * as Effect from "effect/Effect"; import * as Layer from "effect/Layer"; +import * as Predicate from "effect/Predicate"; import { HttpClient, HttpClientResponse, type HttpClientRequest } from "effect/unstable/http"; import { @@ -85,15 +86,17 @@ const makeRecordingHttpClient = () => { const headers = { ...request.headers }; let bytes = new Uint8Array(); let contentType = headers["content-type"] ?? ""; - const tag = request.body._tag; + const isRaw = Predicate.isTagged(request.body, "Raw"); + const isUint8Array = Predicate.isTagged(request.body, "Uint8Array"); + const isFormData = Predicate.isTagged(request.body, "FormData"); - if (tag === "Raw" || tag === "Uint8Array") { + if (isRaw || isUint8Array) { const wire = new Request("http://capture/", { method: "POST", body: request.body.body as BodyInit, }); bytes = new Uint8Array(yield* Effect.promise(() => wire.arrayBuffer())); - } else if (tag === "FormData") { + } else if (isFormData) { // Letting `Response` realize the FormData yields the actual // multipart wire bytes plus a generated boundary in its // content-type header — exactly what the upstream server sees. @@ -106,7 +109,7 @@ const makeRecordingHttpClient = () => { url: request.url, method: request.method, contentType, - bodyKind: tag, + bodyKind: isRaw ? "Raw" : isUint8Array ? "Uint8Array" : isFormData ? "FormData" : "", body: bytes, }); From 6220d9fcd7366156c6a0851610f9e835923b27a0 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:19:29 -0700 Subject: [PATCH 101/108] Remove OpenAPI summary casts --- packages/plugins/openapi/src/react/OpenApiSourceSummary.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/plugins/openapi/src/react/OpenApiSourceSummary.tsx b/packages/plugins/openapi/src/react/OpenApiSourceSummary.tsx index f04e7e654..305c161ed 100644 --- a/packages/plugins/openapi/src/react/OpenApiSourceSummary.tsx +++ b/packages/plugins/openapi/src/react/OpenApiSourceSummary.tsx @@ -86,9 +86,9 @@ export default function OpenApiSourceSummary(props: { return props.variant === "panel" ? null : ; } const connections = AsyncResult.isSuccess(connectionsResult) ? connectionsResult.value : []; - const liveConnectionIds = new Set(connections.map((connection) => connection.id as string)); + const liveConnectionIds = new Set(connections.map((connection) => connection.id)); const scopeRanks = new Map( - scopeStack.map((scope, index) => [scope.id as string, index] as const), + scopeStack.map((scope, index) => [scope.id, index] as const), ); const credentialTargetScope = userScope; const missing = missingCredentialLabels(source, bindings, credentialTargetScope, scopeRanks, { From 7656b2e434be765154b59b2c4ceea3df4880a085 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:33:31 -0700 Subject: [PATCH 102/108] Clean MCP per-user auth test boundaries --- .../src/sdk/per-user-auth-isolation.test.ts | 41 ++++++++----------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/packages/plugins/mcp/src/sdk/per-user-auth-isolation.test.ts b/packages/plugins/mcp/src/sdk/per-user-auth-isolation.test.ts index cf655a4d1..869055bbd 100644 --- a/packages/plugins/mcp/src/sdk/per-user-auth-isolation.test.ts +++ b/packages/plugins/mcp/src/sdk/per-user-auth-isolation.test.ts @@ -19,7 +19,7 @@ import * as http from "node:http"; import { describe, expect, it } from "@effect/vitest"; -import { Cause, Effect, Exit } from "effect"; +import { Cause, Effect, Exit, Predicate } from "effect"; import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js"; import { z } from "zod"; @@ -37,6 +37,7 @@ import { definePlugin, makeInMemoryBlobStore, type SecretProvider, + type ToolInvocationError, } from "@executor-js/sdk"; import { makeMemoryAdapter } from "@executor-js/storage-core/testing/memory"; @@ -96,6 +97,12 @@ type TestServer = { readonly recorded: () => readonly RecordedRequest[]; }; +const failureError = (exit: Exit.Exit): E | undefined => + Exit.isFailure(exit) ? exit.cause.reasons.find(Cause.isFailReason)?.error : undefined; + +const isToolInvocationError = (error: unknown): error is ToolInvocationError => + Predicate.isTagged(error, "ToolInvocationError"); + const createAuthRecordingServer: Effect.Effect = Effect.callback((resume) => { const transports = new Map(); @@ -258,7 +265,7 @@ describe("per-user MCP auth isolation", () => { // stack [userB, org] can see them via fall-through. yield* execUserA.mcp.addSource({ transport: "remote", - scope: ORG as string, + scope: ORG, name: "Shared MCP", endpoint: server.url, namespace: "iso_test", @@ -312,21 +319,15 @@ describe("per-user MCP auth isolation", () => { // Pin the exact error tag so a future regression that swaps // the "connection not found" check for a silent `auth: { kind: // "none" }` fallback would fail here, not silently connect. - if (!Exit.isFailure(userBResult)) return; // tools.invoke wraps plugin failures in ToolInvocationError // with the original error carried on `cause`. Pin the exact // inner tag — a regression that swapped the "no connection // found" check for a silent no-auth fallback would either // succeed outright (leaking) or surface a different tag here. - const failure = userBResult.cause.reasons.find(Cause.isFailReason); - const outer = failure?.error as - | { - _tag?: string; - cause?: { _tag?: string }; - } - | undefined; - expect(outer?._tag).toBe("ToolInvocationError"); - expect(outer?.cause?._tag).toBe("McpConnectionError"); + const outer = failureError(userBResult); + expect(isToolInvocationError(outer)).toBe(true); + const inner = isToolInvocationError(outer) ? outer.cause : undefined; + expect(Predicate.isTagged(inner, "McpConnectionError")).toBe(true); // CRITICAL: no outbound MCP request was made on user B's behalf // carrying user A's bearer token. Auth resolution must have @@ -366,7 +367,7 @@ describe("per-user MCP auth isolation", () => { yield* execUserA.mcp.addSource({ transport: "remote", - scope: ORG as string, + scope: ORG, name: "Shared MCP (header)", endpoint: server.url, namespace: "iso_header", @@ -418,21 +419,15 @@ describe("per-user MCP auth isolation", () => { ); expect(Exit.isFailure(userBResult)).toBe(true); - if (!Exit.isFailure(userBResult)) return; // tools.invoke wraps plugin failures in ToolInvocationError // with the original error carried on `cause`. Pin the exact // inner tag — a regression that swapped the "no connection // found" check for a silent no-auth fallback would either // succeed outright (leaking) or surface a different tag here. - const failure = userBResult.cause.reasons.find(Cause.isFailReason); - const outer = failure?.error as - | { - _tag?: string; - cause?: { _tag?: string }; - } - | undefined; - expect(outer?._tag).toBe("ToolInvocationError"); - expect(outer?.cause?._tag).toBe("McpConnectionError"); + const outer = failureError(userBResult); + expect(isToolInvocationError(outer)).toBe(true); + const inner = isToolInvocationError(outer) ? outer.cause : undefined; + expect(Predicate.isTagged(inner, "McpConnectionError")).toBe(true); const afterUserB = server.recorded().slice(recordedBeforeUserB); for (const req of afterUserB) { From de10d6de2578789fbc26572f7edb052612ef4c39 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:37:54 -0700 Subject: [PATCH 103/108] Use promiseExit in MCP edit source --- .../plugins/mcp/src/react/EditMcpSource.tsx | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/packages/plugins/mcp/src/react/EditMcpSource.tsx b/packages/plugins/mcp/src/react/EditMcpSource.tsx index 4ab5e6d07..e138b79d2 100644 --- a/packages/plugins/mcp/src/react/EditMcpSource.tsx +++ b/packages/plugins/mcp/src/react/EditMcpSource.tsx @@ -1,6 +1,7 @@ import { useState } from "react"; import { useAtomValue, useAtomSet } from "@effect/atom-react"; import * as AsyncResult from "effect/unstable/reactivity/AsyncResult"; +import * as Exit from "effect/Exit"; import { mcpSourceAtom, updateMcpSource } from "./atoms"; import { useScope } from "@executor-js/react/api/scope-context"; import { sourceWriteKeys } from "@executor-js/react/api/reactivity-keys"; @@ -35,7 +36,7 @@ function RemoteEditForm(props: { onSave: () => void; }) { const scopeId = useScope(); - const doUpdate = useAtomSet(updateMcpSource, { mode: "promise" }); + const doUpdate = useAtomSet(updateMcpSource, { mode: "promiseExit" }); const secretList = useSecretPickerSecrets(); const identity = useSourceIdentity({ @@ -64,24 +65,24 @@ function RemoteEditForm(props: { setSaving(true); setError(null); const { headers, queryParams } = serializeHttpCredentials(credentials); - try { - await doUpdate({ - params: { scopeId, namespace: props.sourceId }, - payload: { - name: identity.name.trim() || undefined, - endpoint: endpoint.trim() || undefined, - headers, - queryParams, - }, - reactivityKeys: sourceWriteKeys, - }); - setDirty(false); - props.onSave(); - } catch (e) { - setError(e instanceof Error ? e.message : "Failed to update source"); - } finally { + const exit = await doUpdate({ + params: { scopeId, namespace: props.sourceId }, + payload: { + name: identity.name.trim() || undefined, + endpoint: endpoint.trim() || undefined, + headers, + queryParams, + }, + reactivityKeys: sourceWriteKeys, + }); + if (Exit.isFailure(exit)) { + setError("Failed to update source"); setSaving(false); + return; } + setDirty(false); + setSaving(false); + props.onSave(); }; return ( From e8422d276a51f63899b820fc9f78c833701819e2 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:38:19 -0700 Subject: [PATCH 104/108] Document React scope context boundaries --- packages/react/src/api/scope-context.tsx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/react/src/api/scope-context.tsx b/packages/react/src/api/scope-context.tsx index b44681287..d3b6b9edc 100644 --- a/packages/react/src/api/scope-context.tsx +++ b/packages/react/src/api/scope-context.tsx @@ -41,6 +41,7 @@ export function ScopeProvider(props: React.PropsWithChildren<{ fallback?: React. export function useScope(): ScopeId { const scope = React.useContext(ScopeContext); if (scope === null) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: React hook invariant throw new Error("useScope must be used inside a ScopeProvider"); } return scope.id; @@ -53,6 +54,7 @@ export function useScope(): ScopeId { export function useScopeInfo(): ScopeInfo { const scope = React.useContext(ScopeContext); if (scope === null) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: React hook invariant throw new Error("useScopeInfo must be used inside a ScopeProvider"); } return scope; @@ -66,6 +68,7 @@ export function useUserScope(): ScopeId { const stack = useScopeStack(); const innermost = stack[0]; if (!innermost) { + // oxlint-disable-next-line executor/no-try-catch-or-throw, executor/no-error-constructor -- boundary: React hook invariant throw new Error("useUserScope requires a non-empty scope stack"); } return innermost.id; From 83b84eb70882dbe0715c211c4b58f22edfffe2a7 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:38:48 -0700 Subject: [PATCH 105/108] Use typed GraphQL invocation boundaries --- .../plugins/graphql/src/sdk/introspect.ts | 175 +++++++++++------- packages/plugins/graphql/src/sdk/invoke.ts | 11 +- 2 files changed, 114 insertions(+), 72 deletions(-) diff --git a/packages/plugins/graphql/src/sdk/introspect.ts b/packages/plugins/graphql/src/sdk/introspect.ts index 589ceafd7..0ccbe6a56 100644 --- a/packages/plugins/graphql/src/sdk/introspect.ts +++ b/packages/plugins/graphql/src/sdk/introspect.ts @@ -1,4 +1,4 @@ -import { Effect } from "effect"; +import { Effect, Schema } from "effect"; import { HttpClient, HttpClientRequest } from "effect/unstable/http"; import { GraphqlIntrospectionError } from "./errors"; @@ -81,49 +81,99 @@ const INTROSPECTION_QUERY = ` // Introspection result types // --------------------------------------------------------------------------- -export interface IntrospectionTypeRef { - readonly kind: string; - readonly name: string | null; - readonly ofType: IntrospectionTypeRef | null; -} - -export interface IntrospectionInputValue { - readonly name: string; - readonly description: string | null; - readonly type: IntrospectionTypeRef; - readonly defaultValue: string | null; -} - -export interface IntrospectionField { - readonly name: string; - readonly description: string | null; - readonly args: readonly IntrospectionInputValue[]; - readonly type: IntrospectionTypeRef; -} - -export interface IntrospectionEnumValue { - readonly name: string; - readonly description: string | null; -} - -export interface IntrospectionType { - readonly kind: string; - readonly name: string; - readonly description: string | null; - readonly fields: readonly IntrospectionField[] | null; - readonly inputFields: readonly IntrospectionInputValue[] | null; - readonly enumValues: readonly IntrospectionEnumValue[] | null; -} - -export interface IntrospectionSchema { - readonly queryType: { readonly name: string } | null; - readonly mutationType: { readonly name: string } | null; - readonly types: readonly IntrospectionType[]; -} - -export interface IntrospectionResult { - readonly __schema: IntrospectionSchema; -} +const IntrospectionTypeRefLeaf = Schema.Struct({ + kind: Schema.String, + name: Schema.NullOr(Schema.String), + ofType: Schema.Null, +}); + +const IntrospectionTypeRef5 = Schema.Struct({ + kind: Schema.String, + name: Schema.NullOr(Schema.String), + ofType: Schema.NullOr(IntrospectionTypeRefLeaf), +}); + +const IntrospectionTypeRef4 = Schema.Struct({ + kind: Schema.String, + name: Schema.NullOr(Schema.String), + ofType: Schema.NullOr(IntrospectionTypeRef5), +}); + +const IntrospectionTypeRef3 = Schema.Struct({ + kind: Schema.String, + name: Schema.NullOr(Schema.String), + ofType: Schema.NullOr(IntrospectionTypeRef4), +}); + +const IntrospectionTypeRef2 = Schema.Struct({ + kind: Schema.String, + name: Schema.NullOr(Schema.String), + ofType: Schema.NullOr(IntrospectionTypeRef3), +}); + +const IntrospectionTypeRefSchema = Schema.Struct({ + kind: Schema.String, + name: Schema.NullOr(Schema.String), + ofType: Schema.NullOr(IntrospectionTypeRef2), +}); + +const IntrospectionInputValueSchema = Schema.Struct({ + name: Schema.String, + description: Schema.NullOr(Schema.String), + type: IntrospectionTypeRefSchema, + defaultValue: Schema.NullOr(Schema.String), +}); + +const IntrospectionFieldSchema = Schema.Struct({ + name: Schema.String, + description: Schema.NullOr(Schema.String), + args: Schema.Array(IntrospectionInputValueSchema), + type: IntrospectionTypeRefSchema, +}); + +const IntrospectionTypeSchema = Schema.Struct({ + kind: Schema.String, + name: Schema.String, + description: Schema.NullOr(Schema.String), + fields: Schema.NullOr(Schema.Array(IntrospectionFieldSchema)), + inputFields: Schema.NullOr(Schema.Array(IntrospectionInputValueSchema)), + enumValues: Schema.NullOr( + Schema.Array( + Schema.Struct({ + name: Schema.String, + description: Schema.NullOr(Schema.String), + }), + ), + ), +}); + +const IntrospectionResultSchema = Schema.Struct({ + __schema: Schema.Struct({ + queryType: Schema.NullOr(Schema.Struct({ name: Schema.String })), + mutationType: Schema.NullOr(Schema.Struct({ name: Schema.String })), + types: Schema.Array(IntrospectionTypeSchema), + }), +}); + +const IntrospectionResponseSchema = Schema.Struct({ + data: Schema.optional(IntrospectionResultSchema), + errors: Schema.optional(Schema.Array(Schema.Unknown)), +}); + +const IntrospectionJsonSchema = Schema.Union([ + Schema.Struct({ data: IntrospectionResultSchema }), + IntrospectionResultSchema, +]); + +export type IntrospectionTypeRef = typeof IntrospectionTypeRefSchema.Type; +export type IntrospectionInputValue = typeof IntrospectionInputValueSchema.Type; +export type IntrospectionField = typeof IntrospectionFieldSchema.Type; +export type IntrospectionEnumValue = NonNullable< + (typeof IntrospectionTypeSchema.Type)["enumValues"] +>[number]; +export type IntrospectionType = typeof IntrospectionTypeSchema.Type; +export type IntrospectionSchema = (typeof IntrospectionResultSchema.Type)["__schema"]; +export type IntrospectionResult = typeof IntrospectionResultSchema.Type; // --------------------------------------------------------------------------- // Introspect a GraphQL endpoint @@ -162,9 +212,9 @@ export const introspect = Effect.fn("GraphQL.introspect")(function* ( const response = yield* client.execute(request).pipe( Effect.tapCause((cause) => Effect.logError("graphql introspection request failed", cause)), Effect.mapError( - (err) => + () => new GraphqlIntrospectionError({ - message: `Failed to reach GraphQL endpoint: ${err.message}`, + message: "Failed to reach GraphQL endpoint", }), ), ); @@ -187,7 +237,14 @@ export const introspect = Effect.fn("GraphQL.introspect")(function* ( ), ); - const json = raw as { data?: IntrospectionResult; errors?: unknown[] }; + const json = yield* Schema.decodeUnknownEffect(IntrospectionResponseSchema)(raw).pipe( + Effect.mapError( + () => + new GraphqlIntrospectionError({ + message: "Introspection response has an invalid shape", + }), + ), + ); if (json.errors && Array.isArray(json.errors) && json.errors.length > 0) { return yield* new GraphqlIntrospectionError({ @@ -211,18 +268,12 @@ export const introspect = Effect.fn("GraphQL.introspect")(function* ( export const parseIntrospectionJson = ( text: string, ): Effect.Effect => - Effect.try({ - try: () => { - const parsed = JSON.parse(text); - // Accept both { data: { __schema } } and { __schema } formats - const result = parsed.data ?? parsed; - if (!result.__schema) { - throw new Error("Missing __schema in introspection JSON"); - } - return result as IntrospectionResult; - }, - catch: (err) => - new GraphqlIntrospectionError({ - message: `Failed to parse introspection JSON: ${err instanceof Error ? err.message : String(err)}`, - }), - }); + Schema.decodeUnknownEffect(Schema.fromJsonString(IntrospectionJsonSchema))(text).pipe( + Effect.map((parsed) => ("data" in parsed ? parsed.data : parsed)), + Effect.mapError( + () => + new GraphqlIntrospectionError({ + message: "Failed to parse introspection JSON", + }), + ), + ); diff --git a/packages/plugins/graphql/src/sdk/invoke.ts b/packages/plugins/graphql/src/sdk/invoke.ts index c8242453e..eb1fa3ee4 100644 --- a/packages/plugins/graphql/src/sdk/invoke.ts +++ b/packages/plugins/graphql/src/sdk/invoke.ts @@ -114,7 +114,7 @@ export const invoke = Effect.fn("GraphQL.invoke")(function* ( Effect.mapError( (err) => new GraphqlInvocationError({ - message: `GraphQL request failed: ${err.message}`, + message: "GraphQL request failed", statusCode: Option.none(), cause: err, }), @@ -159,15 +159,6 @@ export const invokeWithLayer = ( ) => invoke(operation, args, endpoint, resolvedHeaders, resolvedQueryParams).pipe( Effect.provide(httpClientLayer), - Effect.mapError((err) => - err instanceof GraphqlInvocationError - ? err - : new GraphqlInvocationError({ - message: err instanceof Error ? err.message : String(err), - statusCode: Option.none(), - cause: err, - }), - ), Effect.withSpan("plugin.graphql.invoke", { attributes: { "plugin.graphql.endpoint": endpoint, From 72e83f39619f6bdc57a856b0e0bebc42f9e1f41d Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:39:14 -0700 Subject: [PATCH 106/108] Use typed Google Discovery invocation boundaries --- .../google-discovery/src/sdk/invoke.ts | 74 +++++++++++-------- 1 file changed, 44 insertions(+), 30 deletions(-) diff --git a/packages/plugins/google-discovery/src/sdk/invoke.ts b/packages/plugins/google-discovery/src/sdk/invoke.ts index cac95144d..a3d1aa16c 100644 --- a/packages/plugins/google-discovery/src/sdk/invoke.ts +++ b/packages/plugins/google-discovery/src/sdk/invoke.ts @@ -1,4 +1,4 @@ -import { Effect, Layer, Option } from "effect"; +import { Effect, Layer, Option, Schema } from "effect"; import { FetchHttpClient, HttpClient, HttpClientRequest } from "effect/unstable/http"; import type { PluginCtx, StorageFailure } from "@executor-js/sdk/core"; @@ -13,6 +13,16 @@ import { const SAFE_METHODS = new Set(["get", "head", "options"]); +const UnknownErrorMessage = Schema.Struct({ message: Schema.String }); +const decodeUnknownErrorMessage = Schema.decodeUnknownOption(UnknownErrorMessage); + +const errorMessageFromUnknown = (cause: unknown): string => { + const decoded = decodeUnknownErrorMessage(cause); + if (Option.isSome(decoded)) return decoded.value.message; + // oxlint-disable-next-line executor/no-unknown-error-message -- boundary: preserves existing fallback text for HTTP client errors + return String(cause); +}; + export const annotationsForOperation = ( method: string, pathTemplate: string, @@ -46,19 +56,27 @@ const replacePathParameters = (input: { pathTemplate: string; args: Record; parameters: readonly GoogleDiscoveryParameter[]; -}): string => - input.pathTemplate.replaceAll(/\{([^}]+)\}/g, (_, name: string) => { - const parameter = input.parameters.find( - (entry) => entry.location === "path" && entry.name === name, - ); - const values = stringValuesFromParameter(input.args[name], false); - if (values.length === 0) { - if (parameter?.required) { - throw new Error(`Missing required path parameter: ${name}`); +}): Effect.Effect => + Effect.gen(function* () { + let failure: GoogleDiscoveryInvocationError | undefined; + const resolved = input.pathTemplate.replaceAll(/\{([^}]+)\}/g, (_, name: string) => { + const parameter = input.parameters.find( + (entry) => entry.location === "path" && entry.name === name, + ); + const values = stringValuesFromParameter(input.args[name], false); + if (values.length === 0) { + if (parameter?.required) { + failure = new GoogleDiscoveryInvocationError({ + message: `Missing required path parameter: ${name}`, + statusCode: Option.none(), + }); + } + return ""; } - return ""; - } - return encodeURIComponent(values[0]!); + return encodeURIComponent(values[0]!); + }); + if (failure) return yield* failure; + return resolved; }); const resolveBaseUrl = (source: GoogleDiscoveryStoredSourceData): string => @@ -87,7 +105,7 @@ const performRequest = Effect.fn("GoogleDiscovery.invoke")(function* (input: { }) { const client = yield* HttpClient.HttpClient; - const resolvedPath = replacePathParameters({ + const resolvedPath = yield* replacePathParameters({ pathTemplate: input.pathTemplate, args: input.args, parameters: input.parameters, @@ -138,7 +156,7 @@ const performRequest = Effect.fn("GoogleDiscovery.invoke")(function* (input: { Effect.mapError( (err) => new GoogleDiscoveryInvocationError({ - message: `HTTP request failed: ${err.message}`, + message: `HTTP request failed: ${errorMessageFromUnknown(err)}`, statusCode: Option.none(), cause: err, }), @@ -147,9 +165,9 @@ const performRequest = Effect.fn("GoogleDiscovery.invoke")(function* (input: { const contentType = response.headers["content-type"] ?? null; const mapBodyError = Effect.mapError( - (err: { readonly message?: string }) => + (err: unknown) => new GoogleDiscoveryInvocationError({ - message: `Failed to read response body: ${err.message ?? String(err)}`, + message: `Failed to read response body: ${errorMessageFromUnknown(err)}`, statusCode: Option.some(response.status), cause: err, }), @@ -191,21 +209,17 @@ export const invokeGoogleDiscoveryTool = (input: { Effect.gen(function* () { const entry = yield* input.ctx.storage.getBinding(input.toolId, input.toolScope); if (!entry) { - return yield* Effect.fail( - new GoogleDiscoveryInvocationError({ - message: `No Google Discovery operation found for tool "${input.toolId}"`, - statusCode: Option.none(), - }), - ); + return yield* new GoogleDiscoveryInvocationError({ + message: `No Google Discovery operation found for tool "${input.toolId}"`, + statusCode: Option.none(), + }); } const stored = yield* input.ctx.storage.getSource(entry.namespace, input.toolScope); if (!stored) { - return yield* Effect.fail( - new GoogleDiscoveryInvocationError({ - message: `No Google Discovery source found for "${entry.namespace}"`, - statusCode: Option.none(), - }), - ); + return yield* new GoogleDiscoveryInvocationError({ + message: `No Google Discovery source found for "${entry.namespace}"`, + statusCode: Option.none(), + }); } const source = stored.config; @@ -215,7 +229,7 @@ export const invokeGoogleDiscoveryTool = (input: { Effect.mapError( (err) => new GoogleDiscoveryOAuthError({ - message: "message" in err ? (err as { message: string }).message : String(err), + message: errorMessageFromUnknown(err), }), ), )}` From 8f916068fc54ddf54ba801ba0fe1686f4deb941c Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:40:57 -0700 Subject: [PATCH 107/108] Add OpenAPI parse boundary tests --- .../plugins/openapi/src/sdk/parse.test.ts | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 packages/plugins/openapi/src/sdk/parse.test.ts diff --git a/packages/plugins/openapi/src/sdk/parse.test.ts b/packages/plugins/openapi/src/sdk/parse.test.ts new file mode 100644 index 000000000..c48248d73 --- /dev/null +++ b/packages/plugins/openapi/src/sdk/parse.test.ts @@ -0,0 +1,53 @@ +import { describe, expect, it } from "@effect/vitest"; +import { Effect } from "effect"; + +import { OpenApiParseError } from "./errors"; +import { parse } from "./parse"; + +describe("OpenAPI parse", () => { + it.effect("parses JSON OpenAPI documents", () => + Effect.gen(function* () { + const doc = yield* parse( + JSON.stringify({ + openapi: "3.1.0", + info: { title: "Test", version: "1.0.0" }, + paths: {}, + }), + ); + + expect(doc.openapi).toBe("3.1.0"); + }), + ); + + it.effect("parses YAML OpenAPI documents", () => + Effect.gen(function* () { + const doc = yield* parse(` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +paths: {} +`); + + expect(doc.openapi).toBe("3.0.0"); + }), + ); + + it.effect("returns a stable parse error for empty documents", () => + Effect.gen(function* () { + const error = yield* parse("").pipe(Effect.flip); + + expect(error).toBeInstanceOf(OpenApiParseError); + expect(error).toHaveProperty("message", "OpenAPI document is empty"); + }), + ); + + it.effect("returns a stable parse error for non-object documents", () => + Effect.gen(function* () { + const error = yield* parse("[]").pipe(Effect.flip); + + expect(error).toBeInstanceOf(OpenApiParseError); + expect(error).toHaveProperty("message", "OpenAPI document must parse to an object"); + }), + ); +}); From eddcffcdf1722052a65c4e719ea837e840901b60 Mon Sep 17 00:00:00 2001 From: Rhys Sullivan <39114868+RhysSullivan@users.noreply.github.com> Date: Tue, 5 May 2026 22:51:57 -0700 Subject: [PATCH 108/108] Resolve OAuth refresh integration assertion --- packages/plugins/openapi/src/sdk/oauth-refresh.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts b/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts index 62e963e32..fc6fd06fd 100644 --- a/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts +++ b/packages/plugins/openapi/src/sdk/oauth-refresh.test.ts @@ -391,7 +391,7 @@ layer(TestLayer)("OpenAPI oauth refresh", (it) => { ), ); expect(flipped.provider).toBe("openapi:oauth2"); - expect(flipped.message).toBe("OAuth refresh failed"); + expect(flipped.message).toMatch(/OAuth refresh failed: .*revoked/i); }), ); });