diff --git a/.changeset/ripe-signs-teach.md b/.changeset/ripe-signs-teach.md new file mode 100644 index 00000000..8c6024ff --- /dev/null +++ b/.changeset/ripe-signs-teach.md @@ -0,0 +1,5 @@ +--- +"@vercel/flags-core": minor +--- + +Refactor client diff --git a/packages/vercel-flags-core/CLAUDE.md b/packages/vercel-flags-core/CLAUDE.md index e5542699..69fb0314 100644 --- a/packages/vercel-flags-core/CLAUDE.md +++ b/packages/vercel-flags-core/CLAUDE.md @@ -13,18 +13,49 @@ src/ ├── types.ts # Type definitions ├── errors.ts # Error classes ├── evaluate.ts # Core evaluation logic -├── data-source/ # Data source implementations -│ ├── flag-network-data-source.ts -│ ├── in-memory-data-source.ts -│ └── stream-connection.ts +├── controller-fns.ts # Controller function wrappers + instance map +├── create-raw-client.ts # Raw client factory (ID-based indirection for 'use cache') +├── controller/ # Controller (state machine) and I/O sources +│ ├── index.ts # Controller class +│ ├── stream-source.ts # StreamSource (wraps stream-connection) +│ ├── polling-source.ts # PollingSource (wraps fetch-datafile) +│ ├── bundled-source.ts # BundledSource (wraps read-bundled-definitions) +│ ├── stream-connection.ts # Low-level NDJSON stream connection +│ ├── fetch-datafile.ts # HTTP datafile fetch +│ ├── tagged-data.ts # Data origin tagging types/helpers +│ ├── normalized-options.ts # Option normalization +│ └── typed-emitter.ts # Lightweight typed event emitter ├── openfeature.*.ts # OpenFeature provider +├── test-utils.ts # Shared test helpers ├── utils/ # Utilities │ ├── usage-tracker.ts │ ├── sdk-keys.ts +│ ├── sleep.ts │ └── read-bundled-definitions.ts -└── lib/ # Internal libraries +└── lib/ + └── report-value.ts # Flag evaluation reporting to Vercel request context ``` +## Architecture + +### Data flow + +``` +createClient(sdkKey, options) + → Controller (state machine, owns all data tagging and source coordination) + → StreamSource / PollingSource / BundledSource (emit raw DatafileInput) + → create-raw-client (ID-based indirection for 'use cache' support) + → controller-fns (lookup by ID, evaluate, report) + → FlagsClient (public API) +``` + +### Design principles + +- **Sources emit raw data** — StreamSource, PollingSource, and BundledSource return/emit raw `DatafileInput`. The Controller is solely responsible for tagging data with its origin (`tagData(data, 'stream')` etc.). +- **BundledSource is a plain class** — unlike StreamSource and PollingSource which extend TypedEmitter, BundledSource has no event listeners. The Controller calls its methods directly and uses return values. +- **Tests are black-box** — all behavioral tests go through the public API (`createClient` from `./index.default`). Mock `readBundledDefinitions` and `internalReportValue` as observable I/O. Use `fetchMock` for network assertions. +- **ID-based indirection** — `controller-fns.ts` holds a `controllerInstanceMap` (Map) so that `'use cache'` wrappers in Next.js can pass serializable IDs instead of function references. + ## Key Concepts ### FlagsClient @@ -36,6 +67,7 @@ type FlagsClient = { initialize(): Promise; shutdown(): Promise; getDatafile(): Promise; + getFallbackDatafile(): Promise; evaluate(flagKey, defaultValue?, entities?): Promise>; } ``` @@ -48,15 +80,16 @@ type FlagsClient = { 4. Evaluate segment-based rules against entity context 5. Return fallthrough default if no match -### FlagNetworkDataSource Options +### Controller Options ```typescript -type FlagNetworkDataSourceOptions = { +type ControllerOptions = { sdkKey: string; datafile?: Datafile; // Initial datafile for immediate reads stream?: boolean | { initTimeoutMs: number }; // default: true (3000ms) polling?: boolean | { intervalMs: number; initTimeoutMs: number }; // default: true (30s interval, 3s timeout) buildStep?: boolean; // Override build step auto-detection + sources?: { stream?: StreamSource; polling?: PollingSource; bundled?: BundledSource }; // DI for testing }; ``` @@ -67,18 +100,22 @@ Behavior differs based on environment: **Build step** (CI=1, NEXT_PHASE=phase-production-build, or `buildStep: true`): 1. **Provided datafile** - Use `options.datafile` if provided 2. **Bundled definitions** - Use `@vercel/flags-definitions` -3. **Fetch** - Last resort network fetch +3. **One-time fetch** - Fallback network request +4. **Throw** - If all above fail + +Build-step reads are deduplicated: data is loaded once via a shared promise (`buildDataPromise`) and all concurrent `evaluate()` calls share the result. The entire build counts as a single tracked read event (`buildReadTracked` flag in Controller). **Runtime** (default, or `buildStep: false`): -1. **Stream** - Real-time updates via SSE, wait up to `initTimeoutMs` +1. **Stream** - Real-time updates via NDJSON streaming, wait up to `initTimeoutMs` 2. **Polling** - Interval-based HTTP requests, wait up to `initTimeoutMs` 3. **Provided datafile** - Use `options.datafile` if provided 4. **Bundled definitions** - Use `@vercel/flags-definitions` +5. **One-time fetch** - Last resort (only when stream and polling are both disabled) Key behaviors: -- Bundled definitions are always loaded as ultimate fallback -- All mechanisms write to in-memory state -- If in-memory state exists, serve immediately while background updates happen +- Bundled definitions are loaded eagerly so their revision can be sent to the stream via `X-Revision` header +- When streaming or polling is enabled and data already exists (bundled or provided), `initialize()` still waits for fresh data (stream confirmation or first poll) up to `initTimeoutMs`, then falls back to existing data on timeout +- For offline mode with existing data, `initialize()` returns immediately - **Never stream AND poll simultaneously** - If stream reconnects while polling → stop polling - If stream disconnects → start polling (if enabled) @@ -97,8 +134,9 @@ Key behaviors: Internal compact format for flag definitions: - Variants stored as indices -- Conditions use enum values -- Entities accessed via arrays (e.g., `['user', 'id']`) +- Conditions use tuples: `[LHS, Comparator, RHS]` (e.g., `[['user', 'id'], Comparator.EQ, 'user-123']`) +- Targets shorthand: `{ user: { id: ['user-123'] } }` +- Entities accessed via path arrays (e.g., `['user', 'id']`) ## Entry Points @@ -110,13 +148,21 @@ The package has conditional exports based on environment: ## Commands +All commands must be run from the package directory (`packages/vercel-flags-core`): + ```bash # Build pnpm build -# Test +# Run all tests pnpm test +# Run a single test file +pnpm vitest --run src/black-box.test.ts + +# Run a single test file in watch mode +pnpm vitest src/black-box.test.ts + # Type check pnpm check @@ -124,41 +170,125 @@ pnpm check pnpm test:integration ``` +## Test Guidelines (black-box.test.ts) + +### Critical rules + +- **All tests must use fake timers** unless there is a specific reason to use `vi.useRealTimers()`. The `beforeEach` sets up fake timers; only opt out when testing real async timing. +- **No stderr leaks**: every `console.warn` and `console.error` the implementation emits must be captured by a spy (`vi.spyOn(console, 'warn').mockImplementation(() => {})`) and asserted. A test that produces stderr output is broken. +- **Tests should complete in milliseconds**, not seconds. If a test takes ~3s, it's hitting a real timeout instead of advancing fake timers. + +### initialize() blocks on stream/poll confirmation + +`initialize()` waits for fresh data before resolving, even when bundled data or a provided datafile is available: +- **Streaming**: waits for a stream message (`primed` or `datafile`) up to `initTimeoutMs` +- **Polling**: waits for the first poll response up to `initTimeoutMs` + +This means: + +- **With fake timers**: call `client.initialize()` (or `client.evaluate()` which triggers lazy init), then `await vi.advanceTimersByTimeAsync(initTimeoutMs)` to trigger the timeout fallback. +- **With real timers (`vi.useRealTimers()`)**: for streaming, you MUST push a stream message before awaiting `initialize()`, otherwise it blocks for the real 3s timeout: + ```typescript + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); // let stream connect + stream.push({ type: 'primed', revision: 42, projectId: 'prj_123', environment: 'production' }); + await initPromise; // resolves immediately + ``` + For polling, `initialize()` will await the first poll (which resolves immediately if `fetchMock` responds synchronously). + +### Prefer evaluate-driven tests over explicit initialize() + +Many tests on the `control` branch test that `evaluate()` triggers lazy initialization. Prefer this pattern to test the full public API path: +```typescript +const evalPromise = client.evaluate('flagA'); +await vi.advanceTimersByTimeAsync(3_000); +const result = await evalPromise; +``` +Only call `initialize()` explicitly when the test specifically needs to verify initialization behavior (e.g., deduplication, timing, init promise resolution). + +### Assert console output from the implementation + +The implementation logs warnings/errors for specific conditions. Tests must assert these: +- Stream timeout: `console.warn('@vercel/flags-core: Stream initialization timeout, falling back')` +- Stream error (e.g., 502): `console.error('@vercel/flags-core: Stream error', expect.any(Error))` +- 401 fast-fail: `console.error` with auth error (no retry, no timeout wait) + +### Do not weaken assertions when adapting tests + +When updating tests for new behavior, preserve the strength of existing assertions: +- Keep exact call count checks (e.g., `expect(streamCalls).toHaveLength(2)`) rather than weakening to `.toBeGreaterThanOrEqual(1)` +- Keep specific header assertions (e.g., `X-Retry-Attempt` values) rather than removing them +- Keep `errorSpy`/`warnSpy` assertions rather than dropping them + ## Important Implementation Details ### Stream Connection - Uses fetch with streaming body (NDJSON format) +- Callbacks: `onDatafile` (new data), `onPrimed` (server confirmed revision is current), `onDisconnect` +- Sends `X-Revision` header with the current revision number on every connection (including reconnects), allowing the server to respond with a lightweight `primed` message instead of a full datafile when the revision is current +- The `primed` message confirms the client's data is up-to-date; it resolves the init promise (like `datafile`) but does not update data — only transitions state to `streaming` - Reconnects with exponential backoff (base: 1s, max: 60s, max retries: 15) +- Retries on transient errors both before and after initial data is received. Before initial data, retries continue until max retries are exhausted or the abort controller is aborted (e.g., by the Controller's init timeout). The init promise rejects when the loop exits without data. - Default `initTimeoutMs`: 3000ms -- 401 errors abort immediately (invalid SDK key) -- On disconnect: falls back to polling if enabled +- 401 errors abort immediately (invalid SDK key) and reject the init promise, so fallback kicks in without waiting for the stream timeout +- On disconnect: state transitions to `'degraded'`, falls back to polling if enabled +- On reconnect: Controller listens for `'connected'` event and transitions back to `'streaming'` +- Background stream promises (from init timeout) are `.catch`-ed by the Controller to prevent unhandled rejections when the stream is aborted before receiving data ### Polling - Interval-based HTTP requests to `/v1/datafile` - Default `intervalMs`: 30000ms (30s) -- Default `initTimeoutMs`: 10000ms (10s) -- Retries with exponential backoff (base: 500ms, max 3 retries) +- Default `initTimeoutMs`: 3000ms (3s) +- No retries — on fetch failure, emits an error event and waits for the next interval - Stops automatically when stream reconnects +- `PollingSource` passes its abort signal to `fetchDatafile`, so calling `stop()` aborts in-flight HTTP requests +- `fetchDatafile` accepts an optional `signal` parameter; when provided, it aborts the internal fetch controller when the external signal fires + +### Data Origin Tagging + +The Controller tags all data with its origin using `tagData(data, origin)` from `tagged-data.ts`. Origins map to public `metrics.source` values: +- `'stream'`, `'poll'`, `'provided'` → `'in-memory'` +- `'fetched'` → `'remote'` +- `'bundled'` → `'embedded'` + +`tagData` mutates the input object in-place via `Object.assign` (callers always pass freshly-created data). ### Usage Tracking - Batches flag read events (max 50 events, max 5s wait) - Sends to `flags.vercel.com/v1/ingest` -- Deduplicates by request context -- Uses `waitUntil()` from `@vercel/functions` +- At runtime: deduplicates by request context (per-instance WeakSet in UsageTracker) +- During builds: deduplicates all reads to a single event (buildReadTracked flag in Controller), since there is no request context available +- Uses `waitUntil()` from `@vercel/functions` (wrapped in try/catch for resilience) +- On flush failure, events are re-queued for retry with a max queue size of 500 events (oldest events are dropped when exceeded) +- `flush()` directly flushes queued events even when no scheduled flush is pending, ensuring events are not lost during `shutdown()` ### Client Management - Each client gets unique incrementing ID -- Stored in `clientMap` for function lookups +- Stored in `controllerInstanceMap` in `controller-fns.ts` - Supports multiple simultaneous clients -- Necessary as we can't pass function to `'use cache'` client-fns +- Necessary as we can't pass functions to `'use cache'` wrappers + +### configUpdatedAt Guard + +The Controller rejects incoming data (from stream or poll) if its `configUpdatedAt` is older than or equal to the current in-memory data. This prevents stale updates from overwriting newer data. Accepts the update if either side lacks a `configUpdatedAt`. + +### Evaluation Reporting + +- `internalReportValue` (defined in `lib/report-value.ts`, called from `controller-fns.ts`) reports flag evaluations to the Vercel request context +- Reports are sent for all evaluations where `datafile.projectId` exists, including error cases (e.g., FLAG_NOT_FOUND) + +### Evaluation Safety + +- Regex comparators (`REGEX`, `NOT_REGEX`) limit input string length to 10,000 characters to prevent ReDoS +- `read()` and `getDatafile()` return new objects with spread (never mutate `this.data`) ### Debug Mode -Enable debug logging with `DEBUG=1` environment variable. +Enable debug logging with `DEBUG=@vercel/flags-core` environment variable. ## Dependencies diff --git a/packages/vercel-flags-core/README.md b/packages/vercel-flags-core/README.md index d2a1396f..b84b0862 100644 --- a/packages/vercel-flags-core/README.md +++ b/packages/vercel-flags-core/README.md @@ -17,6 +17,8 @@ import { createClient } from '@vercel/flags-core'; const client = createClient(process.env.FLAGS!); +await client.initialize(); + const result = await client.evaluate('show-new-feature', false, { user: { id: 'user-123' }, }); diff --git a/packages/vercel-flags-core/package.json b/packages/vercel-flags-core/package.json index d30fd37f..35f6f68f 100644 --- a/packages/vercel-flags-core/package.json +++ b/packages/vercel-flags-core/package.json @@ -71,7 +71,6 @@ "@arethetypeswrong/cli": "0.18.2", "@types/node": "20.11.17", "flags": "workspace:*", - "msw": "2.6.4", "next": "16.1.6", "tsup": "8.5.1", "typescript": "5.6.3", @@ -79,9 +78,9 @@ "vitest": "2.1.9" }, "peerDependencies": { - "next": "*", "@openfeature/server-sdk": "1.18.0", - "flags": "*" + "flags": "*", + "next": "*" }, "peerDependenciesMeta": { "@openfeature/server-sdk": { diff --git a/packages/vercel-flags-core/src/black-box.test.ts b/packages/vercel-flags-core/src/black-box.test.ts new file mode 100644 index 00000000..4b3531e5 --- /dev/null +++ b/packages/vercel-flags-core/src/black-box.test.ts @@ -0,0 +1,3841 @@ +/** + * Black-box tests for controller behaviors. + * + * These tests verify the SDK's behavior exclusively through the public API + * (createClient → evaluate/getDatafile/getFallbackDatafile/initialize/shutdown). + * This allows internal refactoring without test breakage. + */ + +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import type { StreamMessage } from './controller/stream-connection'; +import { type BundledDefinitions, createClient } from './index.default'; +import { internalReportValue } from './lib/report-value'; +import { setRequestContext } from './test-utils'; +import { readBundledDefinitions } from './utils/read-bundled-definitions'; + +vi.mock('./utils/read-bundled-definitions', () => ({ + readBundledDefinitions: vi.fn(() => + Promise.resolve({ definitions: null, state: 'missing-file' }), + ), +})); + +vi.mock('./lib/report-value', () => ({ + internalReportValue: vi.fn(), +})); + +const sdkKey = 'vf_fake'; +const fetchMock = vi.fn(); + +/** + * Creates a mock NDJSON stream response for testing. + * + * Returns a controller object that lets you gradually push messages + * and a `response` promise suitable for use with a fetch mock. + */ +function createMockStream() { + const encoder = new TextEncoder(); + let controller: ReadableStreamDefaultController; + + const body = new ReadableStream({ + start(c) { + controller = c; + }, + }); + + return { + response: Promise.resolve(new Response(body, { status: 200 })), + push(message: StreamMessage) { + controller.enqueue(encoder.encode(`${JSON.stringify(message)}\n`)); + }, + close() { + try { + controller.close(); + } catch { + // Stream may already be closed (e.g. after shutdown) + } + }, + }; +} + +/** A simple bundled definitions fixture */ +function makeBundled( + overrides: Partial = {}, +): BundledDefinitions { + return { + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + segments: {}, + environment: 'production', + projectId: 'prj_123', + configUpdatedAt: 1, + digest: 'abc', + revision: 1, + ...overrides, + }; +} + +const ingestRequestHeaders = Object.freeze({ + Authorization: 'Bearer vf_fake', + 'Content-Type': 'application/json', + 'User-Agent': 'VercelFlagsCore/1.0.1', +}); + +const streamRequestHeaders = Object.freeze({ + Authorization: 'Bearer vf_fake', + 'User-Agent': 'VercelFlagsCore/1.0.1', + 'X-Retry-Attempt': '0', +}); + +const datafileRequestHeaders = Object.freeze({ + Authorization: 'Bearer vf_fake', + 'User-Agent': 'VercelFlagsCore/1.0.1', +}); + +const originalEnv = { ...process.env }; + +describe('Controller (black-box)', () => { + const date = new Date(); + + beforeEach(() => { + vi.useFakeTimers(); + vi.setSystemTime(date); + vi.mocked(readBundledDefinitions).mockReset(); + vi.mocked(internalReportValue).mockReset(); + fetchMock.mockReset(); + // Reset env vars that affect build step detection + delete process.env.CI; + delete process.env.NEXT_PHASE; + }); + + afterEach(() => { + vi.useRealTimers(); + process.env = { ...originalEnv }; + }); + + // --------------------------------------------------------------------------- + // Constructor validation + // --------------------------------------------------------------------------- + describe('constructor validation', () => { + it('should throw for missing SDK key', () => { + expect(() => + createClient('', { fetch: fetchMock, stream: false, polling: false }), + ).toThrow('@vercel/flags-core: Missing sdkKey'); + }); + + it('should throw for SDK key not starting with vf_', () => { + expect(() => + createClient('invalid_key', { + fetch: fetchMock, + stream: false, + polling: false, + }), + ).toThrow('@vercel/flags-core: Missing sdkKey'); + }); + + it('should throw for non-string SDK key', () => { + expect(() => + createClient(123 as unknown as string, { + fetch: fetchMock, + stream: false, + polling: false, + }), + ).toThrow( + '@vercel/flags-core: Invalid sdkKey. Expected string, got number', + ); + }); + + it('should accept valid SDK key', () => { + expect(() => + createClient('vf_valid_key', { + fetch: fetchMock, + stream: false, + polling: false, + }), + ).not.toThrow(); + }); + + it('should throw for polling interval below 30s', () => { + expect(() => + createClient(sdkKey, { + fetch: fetchMock, + polling: { intervalMs: 1000, initTimeoutMs: 3000 }, + }), + ).toThrow('Polling interval must be at least 30000ms'); + }); + }); + + // --------------------------------------------------------------------------- + // Build step detection + // --------------------------------------------------------------------------- + describe('build step detection', () => { + it('should detect build step when CI=1', async () => { + process.env.CI = '1'; + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + const result = await client.evaluate('flagA'); + + expect(result.metrics?.mode).toBe('build'); + expect(result.metrics?.source).toBe('embedded'); + // No network requests should have been made + expect(fetchMock).not.toHaveBeenCalled(); + + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'embedded', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'build', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should detect build step when NEXT_PHASE=phase-production-build', async () => { + process.env.NEXT_PHASE = 'phase-production-build'; + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + const result = await client.evaluate('flagA'); + + expect(result.metrics?.mode).toBe('build'); + expect(result.metrics?.source).toBe('embedded'); + expect(fetchMock).not.toHaveBeenCalled(); + + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'embedded', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'build', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should NOT detect build step when neither CI nor NEXT_PHASE is set', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + const initPromise = client.initialize(); + + stream.push({ + type: 'datafile', + data: makeBundled({ projectId: 'stream' }), + }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Stream should have been attempted + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: { + ...streamRequestHeaders, + 'X-Retry-Attempt': '0', + }, + signal: expect.any(AbortSignal), + }, + ); + + stream.close(); + expect(fetchMock).toHaveBeenCalledTimes(1); + await client.shutdown(); + await vi.advanceTimersByTimeAsync(0); + // Still 1 — shutdown flushes the usage tracker, but no evaluate() + // was called, so there are no FLAGS_CONFIG_READ events to send. + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it('should override auto-detection with buildStep: false', async () => { + process.env.CI = '1'; // Would normally trigger build step + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + buildStep: false, // Explicitly override CI detection + }); + + const initPromise = client.initialize(); + + stream.push({ + type: 'datafile', + data: makeBundled({ projectId: 'stream' }), + }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + const result = await client.evaluate('flagA'); + + // Should use stream (buildStep: false overrides CI detection) + expect(result.metrics?.mode).toBe('streaming'); + expect(fetchMock).toHaveBeenCalledTimes(1); + + await client.shutdown(); + stream.close(); + + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + }); + + // --------------------------------------------------------------------------- + // Build step behavior + // --------------------------------------------------------------------------- + describe('build step behavior', () => { + it('should fall back to one-time fetch when bundled definitions missing during build', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) + return Promise.resolve(Response.json(makeBundled())); + return Promise.resolve(new Response('', { status: 200 })); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + buildStep: true, + }); + + // run two in parallel to ensure we still only track one read + const [result] = await Promise.all([ + client.evaluate('flagA'), + client.evaluate('flagB'), + ]); + + expect(result.value).toBe(true); + expect(result.metrics?.mode).toBe('build'); + expect(result.metrics?.source).toBe('remote'); + + const fetchCall = fetchMock.mock.calls.find((call) => + call[0]?.toString().includes('/v1/datafile'), + ); + expect(fetchCall).toBeDefined(); + + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/datafile', + { + signal: expect.any(AbortSignal), + headers: datafileRequestHeaders, + }, + ); + + await client.shutdown(); + + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'build', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should throw when bundled definitions missing and fetch fails during build (no defaultValue)', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + fetchMock.mockRejectedValue(new Error('network error')); + + const client = createClient(sdkKey, { + fetch: fetchMock, + buildStep: true, + }); + + await expect(client.evaluate('flagA')).rejects.toThrow( + '@vercel/flags-core: No flag definitions available during build', + ); + }); + + it('should cache data after first build step read', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + buildStep: true, + }); + + const first = await client.evaluate('flagA'); + expect(first.metrics?.cacheStatus).toBe('HIT'); + + const second = await client.evaluate('flagA'); + expect(second.metrics?.cacheStatus).toBe('HIT'); + + // readBundledDefinitions should only be called once + expect(readBundledDefinitions).toHaveBeenCalledTimes(1); + + await client.shutdown(); + }); + + it('should skip network when buildStep: true even if stream/polling configured', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + buildStep: true, + stream: true, + polling: true, + }); + + const result = await client.evaluate('flagA'); + + expect(result.metrics?.source).toBe('embedded'); + expect(result.metrics?.mode).toBe('build'); + expect(fetchMock).not.toHaveBeenCalled(); + + await client.shutdown(); + }); + + it('should use datafile over bundled in build step', async () => { + const providedDatafile = makeBundled({ + configUpdatedAt: 2, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const bundled = makeBundled({ + configUpdatedAt: 1, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: bundled, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + buildStep: true, + datafile: providedDatafile, + }); + + const result = await client.evaluate('flagA'); + + // value true means variant index 1 (from provided datafile), not 0 (bundled) + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('in-memory'); + + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // Stream behavior + // --------------------------------------------------------------------------- + describe('stream behavior', () => { + it('should handle messages split across chunks', async () => { + const datafile = makeBundled({ projectId: 'test-project' }); + const fullMessage = JSON.stringify({ + type: 'datafile', + data: datafile, + }); + const part1 = fullMessage.slice(0, 20); + const part2 = `${fullMessage.slice(20)}\n`; + + const encoder = new TextEncoder(); + let streamController: ReadableStreamDefaultController; + + const body = new ReadableStream({ + start(c) { + streamController = c; + }, + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(body, { status: 200 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + const initPromise = client.initialize(); + + // Send chunks separately + streamController!.enqueue(encoder.encode(part1)); + await vi.advanceTimersByTimeAsync(10); + streamController!.enqueue(encoder.encode(part2)); + await vi.advanceTimersByTimeAsync(0); + + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('in-memory'); + expect(result.metrics?.connectionState).toBe('connected'); + + streamController!.close(); + await client.shutdown(); + }); + + it('should update definitions when new datafile messages arrive', async () => { + const datafile1 = makeBundled({ + revision: 1, + configUpdatedAt: 1, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + const datafile2 = makeBundled({ + revision: 2, + configUpdatedAt: 2, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: datafile1 }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // First evaluate returns variant 0 (false) + const result1 = await client.evaluate('flagA'); + expect(result1.value).toBe(false); + + // Push updated definitions + stream.push({ type: 'datafile', data: datafile2 }); + await vi.advanceTimersByTimeAsync(0); + + // Second evaluate returns variant 1 (true) + const result2 = await client.evaluate('flagA'); + expect(result2.value).toBe(true); + + stream.close(); + await client.shutdown(); + }); + + it('should fall back to bundled when stream times out', async () => { + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + // Stream opens but never sends data + const body = new ReadableStream({ start() {} }); + return Promise.resolve(new Response(body, { status: 200 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + // initialize() now waits for the stream to confirm (primed/datafile) + // but falls back to bundled data after the init timeout + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(3000); + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('embedded'); + expect(result.metrics?.connectionState).toBe('disconnected'); + + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + warnSpy.mockRestore(); + }); + + it('should use bundled definitions when stream errors (502) after init timeout', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(null, { status: 502 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const evalPromise = client.evaluate('flagA'); + + // The 502 triggers stream error; init promise hangs until timeout + await vi.advanceTimersByTimeAsync(3_000); + + const result = await evalPromise; + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('embedded'); + + expect(errorSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream error', + expect.any(Error), + ); + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + errorSpy.mockRestore(); + warnSpy.mockRestore(); + }); + + it('should fast-fail on 401 without waiting for stream timeout', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(null, { status: 401 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + + const client = createClient(sdkKey, { fetch: fetchMock }); + + const evalPromise = client.evaluate('flagA'); + + // Only advance a tiny amount — well under the 3s stream timeout. + // If the 401 fast-fail works, evaluate resolves without the full timeout. + await vi.advanceTimersByTimeAsync(100); + + const result = await evalPromise; + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('embedded'); + + errorSpy.mockRestore(); + + // Only one stream call — 401 does not trigger retries + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: { + ...streamRequestHeaders, + 'X-Revision': '1', + }, + signal: expect.any(AbortSignal), + }, + ); + + // Advance time to allow any potential retries (should not happen) + await vi.advanceTimersByTimeAsync(5_000); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: { ...streamRequestHeaders, 'X-Revision': '1' }, + signal: expect.any(AbortSignal), + }, + ); + + await client.shutdown(); + await vi.advanceTimersByTimeAsync(0); + // still only one call, no ingest calls + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it('should use custom initTimeoutMs value', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + const body = new ReadableStream({ start() {} }); + return Promise.resolve(new Response(body, { status: 200 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: { initTimeoutMs: 500 }, + polling: false, + }); + + const initPromise = client.initialize(); + + // Advance only 500ms (custom timeout) + await vi.advanceTimersByTimeAsync(500); + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.metrics?.source).toBe('embedded'); + + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + warnSpy.mockRestore(); + }); + + it('should not spam the server when stream repeatedly connects then disconnects', async () => { + const datafile = makeBundled(); + let streamRequestCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + streamRequestCount++; + + // Each stream connection sends a datafile (resetting retryCount) + // then immediately closes — simulating a flapping connection + const encoder = new TextEncoder(); + const body = new ReadableStream({ + start(controller) { + controller.enqueue( + encoder.encode( + `${JSON.stringify({ type: 'datafile', data: datafile })}\n`, + ), + ); + controller.close(); + }, + }); + return Promise.resolve(new Response(body, { status: 200 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + await client.initialize(); + + // Advance 10 seconds — without the minimum gap protection this would + // cause an unbounded number of reconnections (retryCount resets to 0 + // after each datafile, and backoff(1)=0 gives immediate retry). + // With the fix, reconnections are spaced at least 1s apart. + await vi.advanceTimersByTimeAsync(10_000); + + // At most ~11 attempts in 10s (initial + 10 reconnections at 1s each) + expect(streamRequestCount).toBeLessThanOrEqual(12); + // But we should still see reconnection attempts happening + expect(streamRequestCount).toBeGreaterThanOrEqual(2); + + await client.shutdown(); + }); + + it('should disable stream when stream: false', async () => { + const datafile = makeBundled(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) { + return Promise.resolve(Response.json(datafile)); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: true, + }); + + await client.initialize(); + await vi.advanceTimersByTimeAsync(0); + + // No stream requests should have been made, + // the below check verifies only a dataifle call was made + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/datafile', + { + headers: datafileRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // Polling behavior + // --------------------------------------------------------------------------- + describe('polling behavior', () => { + it('should use polling when enabled', async () => { + let pollCount = 0; + const datafile = makeBundled(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) { + pollCount++; + return Promise.resolve(Response.json(datafile)); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: { intervalMs: 30_000, initTimeoutMs: 5000 }, + }); + + await client.initialize(); + + expect(pollCount).toBeGreaterThanOrEqual(1); + + // Wait for a few poll intervals + await vi.advanceTimersByTimeAsync(90_000); + + expect(pollCount).toBeGreaterThanOrEqual(3); + + await client.shutdown(); + }); + + it('should disable polling when polling: false', async () => { + const datafile = makeBundled(); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + }); + + await client.initialize(); + await vi.advanceTimersByTimeAsync(100); + + // No datafile fetch requests should have been made + const pollCalls = fetchMock.mock.calls.filter((call) => + call[0]?.toString().includes('/v1/datafile'), + ); + expect(pollCalls).toHaveLength(0); + + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // Datafile option + // --------------------------------------------------------------------------- + describe('datafile option', () => { + it('should use provided datafile after stream init timeout', async () => { + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const datafile = makeBundled({ projectId: 'provided' }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile, + }); + + // evaluate() triggers lazy initialize() which waits for stream + const evalPromise = client.evaluate('flagA'); + await vi.advanceTimersByTimeAsync(3000); + const result = await evalPromise; + + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('in-memory'); + + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + + warnSpy.mockRestore(); + stream.close(); + await client.shutdown(); + }); + + it('should resolve initialize() with provided datafile after stream init timeout', async () => { + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + // Stream that never sends data + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + const body = new ReadableStream({ start() {} }); + return Promise.resolve(new Response(body, { status: 200 })); + } + return Promise.resolve(new Response('', { status: 200 })); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: makeBundled(), + }); + + // initialize() waits for stream, falls back after timeout + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(3000); + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('in-memory'); + + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + + warnSpy.mockRestore(); + await client.shutdown(); + }); + + it('should use provided datafile then update from polling', async () => { + let pollCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) { + pollCount++; + return Promise.resolve( + Response.json( + makeBundled({ + configUpdatedAt: 2, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }), + ), + ); + } + return Promise.resolve(new Response('', { status: 200 })); + }); + + const providedDatafile = makeBundled({ + configUpdatedAt: 1, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: { intervalMs: 30_000, initTimeoutMs: 5000 }, + datafile: providedDatafile, + }); + + // initialize() now waits for the first poll before resolving + await client.initialize(); + + // The initial poll during initialize() already fetched fresh data + expect(pollCount).toBe(1); + + // First evaluate uses polled data (variant 0 = false), since the + // poll during init returned newer data (configUpdatedAt: 2 > 1) + const result1 = await client.evaluate('flagA'); + expect(result1.value).toBe(false); + expect(result1.metrics?.source).toBe('in-memory'); + + // Advance past a poll interval to trigger another update + await vi.advanceTimersByTimeAsync(30_000); + + expect(pollCount).toBe(2); + + // Still uses polled data + const result2 = await client.evaluate('flagA'); + expect(result2.value).toBe(false); + + await client.shutdown(); + }); + + it('should work with datafile only (stream and polling disabled)', async () => { + const datafile = makeBundled(); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + }); + + await client.initialize(); + const result = await client.evaluate('flagA'); + + expect(result.value).toBe(true); + expect(result.metrics?.source).toBe('in-memory'); + + // No network requests + const networkCalls = fetchMock.mock.calls.filter( + (call) => + call[0]?.toString().includes('/v1/stream') || + call[0]?.toString().includes('/v1/datafile'), + ); + expect(networkCalls).toHaveLength(0); + + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // Stream/polling coordination + // --------------------------------------------------------------------------- + describe('stream/polling coordination', () => { + it('should fall back to bundled when stream times out (skip polling)', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled({ projectId: 'bundled' }), + }); + + let pollCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + const body = new ReadableStream({ start() {} }); + return Promise.resolve(new Response(body, { status: 200 })); + } + if (url.includes('/v1/datafile')) { + pollCount++; + return Promise.resolve( + Response.json(makeBundled({ projectId: 'polled' })), + ); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: { initTimeoutMs: 100 }, + polling: { intervalMs: 30_000, initTimeoutMs: 5000 }, + }); + + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(100); + await initPromise; + const after = new Date(); + + const result = await client.evaluate('flagA'); + expect(result.metrics?.source).toBe('embedded'); + expect(pollCount).toBe(0); + + warnSpy.mockRestore(); + + expect(fetchMock).toHaveBeenCalledTimes(1); + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: after.getTime(), + payload: { + configOrigin: 'embedded', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'offline', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: { + Authorization: 'Bearer vf_fake', + 'Content-Type': 'application/json', + 'User-Agent': 'VercelFlagsCore/1.0.1', + }, + method: 'POST', + }, + ); + }); + + it('should use bundled definitions when stream fails after init timeout (skip polling)', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled({ projectId: 'bundled' }), + }); + + let pollCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(null, { status: 500 })); + } + if (url.includes('/v1/datafile')) { + pollCount++; + return Promise.resolve( + Response.json(makeBundled({ projectId: 'polled' })), + ); + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const initTimeoutMs = 1_500; + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: { initTimeoutMs }, + polling: { intervalMs: 30_000, initTimeoutMs: 5000 }, + }); + + // initialize() waits for stream, falls back after 1.5s timeout + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(initTimeoutMs); + await initPromise; + const after = new Date(); + + const result = await client.evaluate('flagA'); + expect(result.metrics?.source).toBe('embedded'); + // No polling should have started + expect(pollCount).toBe(0); + + errorSpy.mockRestore(); + warnSpy.mockRestore(); + + await client.shutdown(); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: after.getTime(), + payload: { + configOrigin: 'embedded', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'offline', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should never stream and poll simultaneously when stream is connected', async () => { + const stream = createMockStream(); + let pollCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + if (url.includes('/v1/datafile')) { + pollCount++; + return Promise.resolve(Response.json(makeBundled())); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: makeBundled() }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Wait to see if any polls happen + await vi.advanceTimersByTimeAsync(60_000); + + expect(pollCount).toBe(0); + + stream.close(); + await client.shutdown(); + }); + + it('should use datafile immediately while starting background stream', async () => { + vi.useRealTimers(); // Need real timers for delayed stream + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return stream.response; + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const providedDatafile = makeBundled({ + projectId: 'provided', + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: providedDatafile, + }); + + // Initialize waits for stream confirmation; push primed so it resolves + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); + stream.push({ + type: 'primed', + revision: 1, + projectId: 'provided', + environment: 'production', + }); + await initPromise; + + // First evaluate uses provided datafile immediately + const result1 = await client.evaluate('flagA'); + expect(result1.value).toBe(false); // variant 0 from provided + expect(result1.metrics?.source).toBe('in-memory'); + + // Now push stream data (with newer configUpdatedAt) + stream.push({ + type: 'datafile', + data: makeBundled({ + projectId: 'stream', + configUpdatedAt: 2, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }), + }); + + // Wait for stream to deliver + await new Promise((r) => setTimeout(r, 0)); + + const result2 = await client.evaluate('flagA'); + expect(result2.value).toBe(true); // variant 1 from stream + + stream.close(); + await client.shutdown(); + }); + + it('should send X-Revision header when provided datafile has revision', async () => { + vi.useRealTimers(); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return stream.response; + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const providedDatafile = makeBundled({ revision: 42 }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: providedDatafile, + }); + + // Push primed so initialize() resolves without waiting for timeout + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); + stream.push({ + type: 'primed', + revision: 42, + projectId: 'prj_123', + environment: 'production', + }); + await initPromise; + + // The stream request should include the X-Revision header + const streamCall = fetchMock.mock.calls.find((call) => { + const url = typeof call[0] === 'string' ? call[0] : call[0]!.toString(); + return url.includes('/v1/stream'); + }); + expect(streamCall).toBeDefined(); + const headers = streamCall![1]!.headers as Record; + expect(headers['X-Revision']).toBe('42'); + + stream.close(); + await client.shutdown(); + }); + + it('should not send X-Revision header when provided datafile has no revision', async () => { + vi.useRealTimers(); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return stream.response; + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + // DatafileInput without revision field + const providedDatafile = { + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + environment: 'production', + projectId: 'prj_123', + configUpdatedAt: 1, + }; + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: providedDatafile, + }); + + // Push a datafile so initialize() resolves without waiting for timeout + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); + stream.push({ + type: 'datafile', + data: makeBundled({ configUpdatedAt: 1 }), + }); + await initPromise; + + const streamCall = fetchMock.mock.calls.find((call) => { + const url = typeof call[0] === 'string' ? call[0] : call[0]!.toString(); + return url.includes('/v1/stream'); + }); + expect(streamCall).toBeDefined(); + const headers = streamCall![1]!.headers as Record; + expect(headers['X-Revision']).toBeUndefined(); + + stream.close(); + await client.shutdown(); + }); + + it('should handle primed response and keep using provided datafile', async () => { + vi.useRealTimers(); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return stream.response; + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const providedDatafile = makeBundled({ + revision: 33, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: providedDatafile, + }); + + // Server responds with primed (our revision is current), + // which resolves initialize() without sending a full datafile + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); + stream.push({ + type: 'primed', + revision: 33, + projectId: 'prj_123', + environment: 'production', + }); + await initPromise; + + // Primed confirms the data is current — value is unchanged, + // state is connected and streaming + const result = await client.evaluate('flagA'); + expect(result.value).toBe(false); // variant 0 from provided + expect(result.metrics?.connectionState).toBe('connected'); + expect(result.metrics?.mode).toBe('streaming'); + + stream.close(); + await client.shutdown(); + }); + + it('should handle primed then subsequent datafile update', async () => { + vi.useRealTimers(); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return stream.response; + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const providedDatafile = makeBundled({ + revision: 5, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: providedDatafile, + }); + + // Server responds with primed first, resolving initialize() + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); + stream.push({ + type: 'primed', + revision: 5, + projectId: 'prj_123', + environment: 'production', + }); + await initPromise; + + // Then server pushes a new datafile (config changed) + stream.push({ + type: 'datafile', + data: makeBundled({ + configUpdatedAt: 2, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }), + }); + await new Promise((r) => setTimeout(r, 0)); + + // Should use the updated data + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); // variant 1 from stream update + + stream.close(); + await client.shutdown(); + }); + + it('should not start polling from stream disconnect during initialization', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + let pollCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(null, { status: 500 })); + } + if (url.includes('/v1/datafile')) { + pollCount++; + return Promise.resolve(Response.json(makeBundled())); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: { initTimeoutMs: 5000 }, + polling: { intervalMs: 30_000, initTimeoutMs: 5000 }, + }); + + // Stream retries with backoff; advance timers so the init timeout fires + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(5100); + await initPromise; + + expect(pollCount).toBe(0); + + await client.shutdown(); + errorSpy.mockRestore(); + warnSpy.mockRestore(); + }); + }); + + // --------------------------------------------------------------------------- + // Degraded connection scenarios + // --------------------------------------------------------------------------- + describe('degraded connection scenarios', () => { + it('should transition to degraded on disconnect and back to streaming on reconnect with newer data', async () => { + const datafile1 = makeBundled({ + configUpdatedAt: 1, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + const datafile2 = makeBundled({ + configUpdatedAt: 2, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + let streamCount = 0; + const streams: ReturnType[] = []; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + streamCount++; + const s = createMockStream(); + streams.push(s); + return s.response; + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + + // Allow the eager bundled load (returns undefined) to settle + // so the stream connection is started + await vi.advanceTimersByTimeAsync(0); + + // First stream sends datafile + streams[0]!.push({ type: 'datafile', data: datafile1 }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Verify streaming state + const result1 = await client.evaluate('flagA'); + expect(result1.value).toBe(false); + expect(result1.metrics?.connectionState).toBe('connected'); + + // Disconnect (server closes stream) + streams[0]!.close(); + await vi.advanceTimersByTimeAsync(0); + + // Verify degraded state + const result2 = await client.evaluate('flagA'); + expect(result2.value).toBe(false); + expect(result2.metrics?.connectionState).toBe('disconnected'); + + // Advance past reconnection backoff (minimum 1s gap) + await vi.advanceTimersByTimeAsync(1_000); + await vi.advanceTimersByTimeAsync(0); + + // Push newer data on reconnected stream + expect(streamCount).toBeGreaterThanOrEqual(2); + streams[1]!.push({ type: 'datafile', data: datafile2 }); + await vi.advanceTimersByTimeAsync(0); + + // Verify back to streaming with newer data + const result3 = await client.evaluate('flagA'); + expect(result3.value).toBe(true); + expect(result3.metrics?.connectionState).toBe('connected'); + + await client.shutdown(); + }); + + it('should detect zombie connection when pings stop arriving', async () => { + const datafile = makeBundled(); + + let streamCount = 0; + const streams: ReturnType[] = []; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + streamCount++; + const s = createMockStream(); + streams.push(s); + return s.response; + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + + // Allow the eager bundled load to settle so the stream starts + await vi.advanceTimersByTimeAsync(0); + + streams[0]!.push({ type: 'datafile', data: datafile }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Send pings for a while (proves connection is alive) + streams[0]!.push({ type: 'ping' }); + await vi.advanceTimersByTimeAsync(30_000); + streams[0]!.push({ type: 'ping' }); + await vi.advanceTimersByTimeAsync(30_000); + + // Verify still connected + const result1 = await client.evaluate('flagA'); + expect(result1.metrics?.connectionState).toBe('connected'); + + // Now stop sending pings, advance past timeout (90s) + await vi.advanceTimersByTimeAsync(90_000); + await vi.advanceTimersByTimeAsync(0); + + // Should have transitioned to degraded + const result2 = await client.evaluate('flagA'); + expect(result2.metrics?.connectionState).toBe('disconnected'); + + // Should have attempted reconnection + expect(streamCount).toBeGreaterThanOrEqual(2); + + await client.shutdown(); + errorSpy.mockRestore(); + }); + + it('should skip malformed JSON in stream and continue processing', async () => { + const encoder = new TextEncoder(); + let streamController: ReadableStreamDefaultController; + + const body = new ReadableStream({ + start(c) { + streamController = c; + }, + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(body, { status: 200 })); + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + + // Send malformed JSON first + streamController!.enqueue(encoder.encode('not valid json\n')); + await vi.advanceTimersByTimeAsync(0); + + // Then send valid datafile + const datafile = makeBundled(); + streamController!.enqueue( + encoder.encode( + `${JSON.stringify({ type: 'datafile', data: datafile })}\n`, + ), + ); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); + expect(result.metrics?.connectionState).toBe('connected'); + + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Failed to parse stream message, skipping', + ); + + streamController!.close(); + await client.shutdown(); + warnSpy.mockRestore(); + }); + + it('should silently ignore empty lines in stream', async () => { + const encoder = new TextEncoder(); + let streamController: ReadableStreamDefaultController; + + const body = new ReadableStream({ + start(c) { + streamController = c; + }, + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(body, { status: 200 })); + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + + // Send empty lines + streamController!.enqueue(encoder.encode('\n\n\n')); + await vi.advanceTimersByTimeAsync(0); + + // Then valid datafile + const datafile = makeBundled(); + streamController!.enqueue( + encoder.encode( + `${JSON.stringify({ type: 'datafile', data: datafile })}\n`, + ), + ); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); + + // No warnings should have been logged for empty lines + expect(warnSpy).not.toHaveBeenCalledWith( + '@vercel/flags-core: Failed to parse stream message, skipping', + ); + + streamController!.close(); + await client.shutdown(); + warnSpy.mockRestore(); + }); + + it('should handle 200 response with missing body', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + return Promise.resolve(new Response(null, { status: 200 })); + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: { initTimeoutMs: 2000 }, + polling: false, + }); + + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(2_000); + await initPromise; + + const result = await client.evaluate('flagA'); + expect(result.metrics?.source).toBe('embedded'); + expect(result.metrics?.connectionState).toBe('disconnected'); + + expect(errorSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream error', + expect.objectContaining({ + message: 'stream body was not present', + }), + ); + + await client.shutdown(); + errorSpy.mockRestore(); + warnSpy.mockRestore(); + }); + + it('should recover from network error mid-stream', async () => { + const datafile = makeBundled(); + let streamCount = 0; + const streams: ReturnType[] = []; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + streamCount++; + if (streamCount === 1) { + // First stream: send datafile, then error + const encoder = new TextEncoder(); + const body = new ReadableStream({ + start(controller) { + controller.enqueue( + encoder.encode( + `${JSON.stringify({ type: 'datafile', data: datafile })}\n`, + ), + ); + // Schedule error after a tick + setTimeout( + () => controller.error(new TypeError('network error')), + 0, + ); + }, + }); + return Promise.resolve(new Response(body, { status: 200 })); + } + // Subsequent streams: normal + const s = createMockStream(); + streams.push(s); + return s.response; + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Should have received initial data + const result1 = await client.evaluate('flagA'); + expect(result1.value).toBe(true); + + // Error fires, wait for disconnect + await vi.advanceTimersByTimeAsync(0); + + // The error triggers reconnection. Advance past backoff. + await vi.advanceTimersByTimeAsync(1_000); + await vi.advanceTimersByTimeAsync(0); + + expect(streamCount).toBeGreaterThanOrEqual(2); + + // Reconnect with new data + streams[0]!.push({ + type: 'datafile', + data: makeBundled({ configUpdatedAt: 2 }), + }); + await vi.advanceTimersByTimeAsync(0); + + const result2 = await client.evaluate('flagA'); + expect(result2.metrics?.connectionState).toBe('connected'); + + await client.shutdown(); + errorSpy.mockRestore(); + }); + + it('should reject older data on stream reconnection', async () => { + const newerData = makeBundled({ + configUpdatedAt: 2000, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + const olderData = makeBundled({ + configUpdatedAt: 1000, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + let streamCount = 0; + const streams: ReturnType[] = []; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + streamCount++; + const s = createMockStream(); + streams.push(s); + return s.response; + } + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + + // Allow the eager bundled load to settle so the stream starts + await vi.advanceTimersByTimeAsync(0); + + // First stream sends newer data + streams[0]!.push({ type: 'datafile', data: newerData }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + const result1 = await client.evaluate('flagA'); + expect(result1.value).toBe(true); // variant 1 from newer data + + // Stream disconnects + streams[0]!.close(); + await vi.advanceTimersByTimeAsync(1_000); + await vi.advanceTimersByTimeAsync(0); + + // Reconnected stream sends older data + expect(streamCount).toBeGreaterThanOrEqual(2); + streams[1]!.push({ type: 'datafile', data: olderData }); + await vi.advanceTimersByTimeAsync(0); + + // Should still have newer data (configUpdatedAt guard rejected older) + const result2 = await client.evaluate('flagA'); + expect(result2.value).toBe(true); // still variant 1 + expect(result2.metrics?.connectionState).toBe('connected'); + + await client.shutdown(); + }); + + it('should cleanly shut down mid-stream', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + if (url.includes('/v1/ingest')) return Promise.resolve(new Response()); + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: makeBundled() }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Verify connected + const result = await client.evaluate('flagA'); + expect(result.metrics?.connectionState).toBe('connected'); + + // Shutdown while stream is still open — should not throw + await client.shutdown(); + await vi.advanceTimersByTimeAsync(0); + + // No stream requests should happen after shutdown, which + // we verify by checking the calls that actually happened + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock.mock.calls).toEqual([ + [ + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ], + [ + 'https://flags.vercel.com/v1/ingest', + { + headers: ingestRequestHeaders, + method: 'POST', + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + ]), + }, + ], + ]); + + await vi.advanceTimersByTimeAsync(5_000); + + // still no streaming calls, as the count has not changed from above + expect(fetchMock).toHaveBeenCalledTimes(2); + }); + }); + + // --------------------------------------------------------------------------- + // getDatafile + // --------------------------------------------------------------------------- + describe('getDatafile', () => { + it('should return bundled definitions when called without initialize', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + const result = await client.getDatafile(); + expect(result.metrics.source).toBe('embedded'); + expect(result.metrics.cacheStatus).toBe('MISS'); + expect(result.metrics.connectionState).toBe('disconnected'); + + await client.shutdown(); + }); + + it('should fetch datafile when called without initialize and no bundled definitions', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + const fetchedDatafile = makeBundled({ projectId: 'fetched' }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) { + return Promise.resolve(Response.json(fetchedDatafile)); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + const result = await client.getDatafile(); + expect(result.metrics.source).toBe('remote'); + expect(result.metrics.cacheStatus).toBe('MISS'); + + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/datafile', + { + headers: datafileRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + }); + + it('should throw when called without initialize and all sources fail', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) { + return Promise.resolve(new Response(null, { status: 500 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + await expect(client.getDatafile()).rejects.toThrow( + '@vercel/flags-core: No flag definitions available', + ); + + await client.shutdown(); + }); + + it('should return cached data when stream is connected', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + + const initPromise = client.initialize(); + stream.push({ type: 'datafile', data: makeBundled() }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + const result = await client.getDatafile(); + expect(result.metrics.source).toBe('in-memory'); + expect(result.metrics.cacheStatus).toBe('HIT'); + expect(result.metrics.connectionState).toBe('connected'); + + stream.close(); + await client.shutdown(); + + // no evaluate call so no usage tracking + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + }); + + it('should use build step path when CI=1', async () => { + process.env.CI = '1'; + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + const result = await client.getDatafile(); + + expect(result.metrics.source).toBe('embedded'); + expect(result.metrics.cacheStatus).toBe('MISS'); + + await client.shutdown(); + }); + + it('should return cached data on repeated calls', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + const result1 = await client.getDatafile(); + expect(result1.metrics).toEqual({ + cacheStatus: 'MISS', + connectionState: 'disconnected', + mode: 'offline', + readMs: 0, + source: 'embedded', + }); + + const result2 = await client.getDatafile(); + expect(result2.metrics).toEqual({ + cacheStatus: 'STALE', + connectionState: 'disconnected', + mode: 'offline', + readMs: 0, + source: 'embedded', + }); + + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // getFallbackDatafile + // --------------------------------------------------------------------------- + describe('getFallbackDatafile', () => { + it('should return bundled definitions when available', async () => { + const bundled = makeBundled(); + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: bundled, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + const result = await client.getFallbackDatafile(); + expect(result).toEqual(bundled); + + await client.shutdown(); + expect(fetchMock).not.toHaveBeenCalled(); + }); + + it('should throw FallbackNotFoundError for missing-file state', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + await expect(client.getFallbackDatafile()).rejects.toThrow( + 'Bundled definitions file not found', + ); + + try { + await client.getFallbackDatafile(); + } catch (error) { + expect((error as Error).name).toBe('FallbackNotFoundError'); + } + + await client.shutdown(); + expect(fetchMock).not.toHaveBeenCalled(); + }); + + it('should throw FallbackEntryNotFoundError for missing-entry state', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-entry', + definitions: null, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + await expect(client.getFallbackDatafile()).rejects.toThrow( + '@vercel/flags-core: No bundled definitions found for SDK key', + ); + + try { + await client.getFallbackDatafile(); + } catch (error) { + expect((error as Error).name).toBe('FallbackEntryNotFoundError'); + } + + await client.shutdown(); + expect(fetchMock).not.toHaveBeenCalled(); + }); + + it('should throw for unexpected-error state', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'unexpected-error', + definitions: null, + error: new Error('Some error'), + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + await expect(client.getFallbackDatafile()).rejects.toThrow( + '@vercel/flags-core: Failed to read bundled definitions', + ); + + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // configUpdatedAt guard + // --------------------------------------------------------------------------- + describe('configUpdatedAt guard', () => { + it('should not overwrite newer data with older stream message', async () => { + vi.useRealTimers(); + + const newerDatafile = makeBundled({ + configUpdatedAt: 2000, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const olderDatafile = makeBundled({ + configUpdatedAt: 1000, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const initPromise = client.initialize(); + + // Send newer data first + stream.push({ type: 'datafile', data: newerDatafile }); + await new Promise((r) => setTimeout(r, 10)); + await initPromise; + + // Then send older data + stream.push({ type: 'datafile', data: olderDatafile }); + await new Promise((r) => setTimeout(r, 50)); + + // Should still have newer data (older message was rejected) + const result = await client.evaluate('flagA'); + const after = new Date(); + expect(result.value).toBe(true); // variant 1 = newer + + stream.close(); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + await client.shutdown(); + + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + method: 'POST', + headers: ingestRequestHeaders, + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: after.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 2000, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + ]), + }, + ); + }); + + it('should skip stream data with equal configUpdatedAt', async () => { + vi.useRealTimers(); + + const data1 = makeBundled({ + configUpdatedAt: 1000, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + const data2 = makeBundled({ + configUpdatedAt: 1000, // Same + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: data1 }); + await new Promise((r) => setTimeout(r, 10)); + await initPromise; + + stream.push({ type: 'datafile', data: data2 }); + await new Promise((r) => setTimeout(r, 50)); + + // Should have kept first data (equal configUpdatedAt is not newer) + const result = await client.evaluate('flagA'); + expect(result.value).toBe(false); // variant 0 = data1 + + stream.close(); + await client.shutdown(); + }); + + it('should accept updates when current data has no configUpdatedAt', async () => { + vi.useRealTimers(); + + const providedDatafile = makeBundled({ + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + // Remove configUpdatedAt to simulate a plain DatafileInput + delete (providedDatafile as Record).configUpdatedAt; + + const streamData = makeBundled({ + configUpdatedAt: 1000, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + datafile: providedDatafile, + polling: false, + }); + + // Push stream data so initialize() resolves without waiting for timeout + const initPromise = client.initialize(); + await new Promise((r) => setTimeout(r, 0)); + stream.push({ type: 'datafile', data: streamData }); + await initPromise; + + // The stream data replaced the provided datafile (which had no configUpdatedAt) + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); // variant 1 = stream + + stream.close(); + await client.shutdown(); + }); + + it('should handle configUpdatedAt as string', async () => { + vi.useRealTimers(); + + const newerDatafile = { + ...makeBundled({ + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }), + configUpdatedAt: '2000' as unknown as number, + }; + + const olderDatafile = { + ...makeBundled({ + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }), + configUpdatedAt: '1000' as unknown as number, + }; + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: newerDatafile }); + await new Promise((r) => setTimeout(r, 10)); + await initPromise; + + stream.push({ type: 'datafile', data: olderDatafile }); + await new Promise((r) => setTimeout(r, 50)); + + // Should still have newer data + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); // variant 1 = newer + + stream.close(); + await client.shutdown(); + }); + + it('should accept updates when configUpdatedAt is a non-numeric string', async () => { + vi.useRealTimers(); + + const currentData = { + ...makeBundled({ + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }), + configUpdatedAt: 'not-a-number' as unknown as number, + }; + + const newData = makeBundled({ + configUpdatedAt: 1000, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: currentData }); + await new Promise((r) => setTimeout(r, 10)); + await initPromise; + + stream.push({ type: 'datafile', data: newData }); + await new Promise((r) => setTimeout(r, 50)); + + // Should accept update since current configUpdatedAt is unparseable + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); // variant 1 = newData + + stream.close(); + await client.shutdown(); + }); + + it('should not overwrite newer in-memory data via getDatafile', async () => { + vi.useRealTimers(); + + const newerDatafile = makeBundled({ + configUpdatedAt: 2000, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const initPromise = client.initialize(); + + stream.push({ type: 'datafile', data: newerDatafile }); + await new Promise((r) => setTimeout(r, 10)); + await initPromise; + + // getDatafile and then evaluate — data should still be newer + await client.getDatafile(); + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); // variant 1 = newer + + stream.close(); + await client.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // Evaluate behavior + // --------------------------------------------------------------------------- + describe('evaluate behavior', () => { + it('should return FLAG_NOT_FOUND with defaultValue for missing flag', async () => { + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: makeBundled(), + buildStep: true, + }); + + const result = await client.evaluate('nonexistent-flag', 'default'); + + expect(result.value).toBe('default'); + expect(result.reason).toBe('error'); + expect(result.errorCode).toBe('FLAG_NOT_FOUND'); + expect(result.errorMessage).toContain( + '@vercel/flags-core: Definition not found for flag "nonexistent-flag"', + ); + }); + + it('should evaluate existing paused flag', async () => { + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: makeBundled(), + buildStep: true, + }); + + const result = await client.evaluate('flagA'); + + expect(result.value).toBe(true); + expect(result.reason).toBe('paused'); + }); + + it('should pass entities for targeting evaluation', async () => { + const datafile = makeBundled({ + definitions: { + 'targeted-flag': { + environments: { + production: { + // targets is the packed shorthand for targeting rules + targets: [{}, { user: { id: ['user-123'] } }], + fallthrough: 0, + }, + }, + variants: ['default', 'targeted'], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + buildStep: true, + }); + + const result = await client.evaluate('targeted-flag', 'default', { + user: { id: 'user-123' }, + }); + + expect(result.value).toBe('targeted'); + expect(result.reason).toBe('target_match'); + }); + + it('should use empty entities when not provided', async () => { + const datafile = makeBundled({ + definitions: { + 'targeted-flag': { + environments: { + production: { + targets: [{}, { user: { id: ['user-123'] } }], + fallthrough: 0, + }, + }, + variants: ['default', 'targeted'], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + buildStep: true, + }); + + const result = await client.evaluate('targeted-flag'); + + expect(result.value).toBe('default'); + expect(result.reason).toBe('fallthrough'); + }); + + it('should work with different value types', async () => { + const datafile = makeBundled({ + definitions: { + boolFlag: { + environments: { production: 0 }, + variants: [true], + }, + stringFlag: { + environments: { production: 0 }, + variants: ['hello'], + }, + numberFlag: { + environments: { production: 0 }, + variants: [42], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + buildStep: true, + }); + + expect((await client.evaluate('boolFlag')).value).toBe(true); + expect((await client.evaluate('stringFlag')).value).toBe('hello'); + expect((await client.evaluate('numberFlag')).value).toBe(42); + }); + + it('should call internalReportValue when projectId exists', async () => { + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: makeBundled({ projectId: 'my-project-id' }), + buildStep: true, + }); + + await client.evaluate('flagA'); + + expect(internalReportValue).toHaveBeenCalledWith('flagA', true, { + originProjectId: 'my-project-id', + originProvider: 'vercel', + reason: 'paused', + outcomeType: 'value', + }); + }); + + it('should not call internalReportValue when projectId is missing', async () => { + const datafile = makeBundled(); + delete (datafile as Record).projectId; + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + buildStep: true, + }); + + await client.evaluate('flagA'); + + expect(internalReportValue).not.toHaveBeenCalled(); + }); + + it('should call internalReportValue with target_match reason', async () => { + const datafile = makeBundled({ + projectId: 'my-project-id', + definitions: { + 'targeted-flag': { + environments: { + production: { + targets: [{}, { user: { id: ['user-123'] } }], + fallthrough: 0, + }, + }, + variants: ['default', 'targeted'], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + buildStep: true, + }); + + await client.evaluate('targeted-flag', 'default', { + user: { id: 'user-123' }, + }); + + expect(internalReportValue).toHaveBeenCalledWith( + 'targeted-flag', + 'targeted', + { + originProjectId: 'my-project-id', + originProvider: 'vercel', + reason: 'target_match', + outcomeType: 'value', + }, + ); + }); + + it('should call internalReportValue with fallthrough reason', async () => { + const datafile = makeBundled({ + projectId: 'my-project-id', + definitions: { + 'targeted-flag': { + environments: { + production: { + targets: [{}, { user: { id: ['user-123'] } }], + fallthrough: 0, + }, + }, + variants: ['default', 'targeted'], + }, + }, + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile, + buildStep: true, + }); + + // No entities provided, so no target matches → fallthrough + await client.evaluate('targeted-flag'); + + expect(internalReportValue).toHaveBeenCalledWith( + 'targeted-flag', + 'default', + { + originProjectId: 'my-project-id', + originProvider: 'vercel', + reason: 'fallthrough', + outcomeType: 'value', + }, + ); + }); + + it('should not include outcomeType for error reason in internalReportValue', async () => { + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: makeBundled({ projectId: 'my-project-id' }), + buildStep: true, + }); + + await client.evaluate('nonexistent-flag', 'fallback'); + + expect(internalReportValue).toHaveBeenCalledWith( + 'nonexistent-flag', + 'fallback', + { + originProjectId: 'my-project-id', + originProvider: 'vercel', + reason: 'error', + }, + ); + // Verify outcomeType is NOT present in the call + const callArgs = vi.mocked(internalReportValue).mock.calls[0]; + expect(callArgs?.[2]).not.toHaveProperty('outcomeType'); + }); + + it('should call internalReportValue with error reason when flag is not found', async () => { + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: makeBundled({ projectId: 'my-project-id' }), + buildStep: true, + }); + + await client.evaluate('nonexistent-flag', 'default'); + + expect(internalReportValue).toHaveBeenCalledWith( + 'nonexistent-flag', + 'default', + { + originProjectId: 'my-project-id', + originProvider: 'vercel', + reason: 'error', + }, + ); + }); + }); + + // --------------------------------------------------------------------------- + // Concurrent initialization + // --------------------------------------------------------------------------- + describe('concurrent initialization', () => { + it('should deduplicate concurrent initialize() calls', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + + // Call initialize three times concurrently + const p1 = client.initialize(); + const p2 = client.initialize(); + const p3 = client.initialize(); + + stream.push({ type: 'datafile', data: makeBundled() }); + await vi.advanceTimersByTimeAsync(0); + + await Promise.all([p1, p2, p3]); + + // Stream should have been fetched only once + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + + stream.close(); + await client.shutdown(); + await vi.advanceTimersByTimeAsync(0); + + // didn't evaluate any flags, so no config reads tracked + expect(fetchMock).toHaveBeenCalledTimes(1); + }); + + it('should deduplicate concurrent evaluate() calls that trigger initialize, and only track one read when request context is set', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + // Set up a fake request context so usage tracking deduplicates + const cleanupContext = setRequestContext({ + 'x-vercel-id': 'iad1::req-abc123', + host: 'myapp.vercel.app', + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + + // Three concurrent evaluates trigger lazy initialization + const p1 = client.evaluate('flagA'); + const p2 = client.evaluate('flagA'); + const p3 = client.evaluate('flagA'); + + stream.push({ type: 'datafile', data: makeBundled() }); + await vi.advanceTimersByTimeAsync(0); + + const [r1, r2, r3] = await Promise.all([p1, p2, p3]); + + // All should have the same value + expect(r1.value).toBe(true); + expect(r2.value).toBe(true); + expect(r3.value).toBe(true); + + // Stream should have been fetched only once + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + + stream.close(); + await client.shutdown(); + + cleanupContext(); + + // Only a single config read should be tracked thanks to request context deduplication + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + vercelRequestId: 'iad1::req-abc123', + invocationHost: 'myapp.vercel.app', + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should deduplicate concurrent evaluate() calls that trigger initialize, and track each read individually when request context is missing', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { fetch: fetchMock }); + + // Three concurrent evaluates trigger lazy initialization + const p1 = client.evaluate('flagA'); + const p2 = client.evaluate('flagA'); + const p3 = client.evaluate('flagA'); + + stream.push({ type: 'datafile', data: makeBundled() }); + await vi.advanceTimersByTimeAsync(0); + + const [r1, r2, r3] = await Promise.all([p1, p2, p3]); + + // All should have the same value + expect(r1.value).toBe(true); + expect(r2.value).toBe(true); + expect(r3.value).toBe(true); + + // Stream should have been fetched only once + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + + stream.close(); + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should start only one retry loop when concurrent evaluate() calls hit a failing stream', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) { + // Stream returns 502 — triggers retry loop + return Promise.resolve(new Response(null, { status: 502 })); + } + return Promise.resolve(new Response('', { status: 200 })); + }); + + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: { initTimeoutMs: 1_500 }, + polling: false, + }); + + // Three concurrent evaluates all trigger lazy initialization + const p1 = client.evaluate('flagA'); + const p2 = client.evaluate('flagA'); + const p3 = client.evaluate('flagA'); + + // Advance past the stream init timeout. + // The minimum reconnection gap is 1s, so: attempt at t=0 (fail), + // retry at t=1000 (fail, backoff(2) >= 1s), timeout at t=1500. + await vi.advanceTimersByTimeAsync(1_500); + + const [r1, r2, r3] = await Promise.all([p1, p2, p3]); + + // All should resolve (falling back to bundled after stream timeout) + expect(r1.value).toBe(true); + expect(r2.value).toBe(true); + expect(r3.value).toBe(true); + + // Concurrent callers share the same init promise, so only one retry + // loop is started. With 1500ms timeout: attempt at retryCount=0 fails, + // minimum gap enforces 1s delay → retry at retryCount=1 fails at t=1000, + // backoff(2) >= 1s exceeds remaining timeout → falls back to bundled. + // So exactly 2 stream attempts (one loop, two iterations). + const streamCalls = fetchMock.mock.calls.filter((call) => + call[0]?.toString().includes('/v1/stream'), + ); + expect(streamCalls).toHaveLength(2); + // Verify only one retry loop: all stream calls should have sequential + // X-Retry-Attempt headers (0, 1) from a single loop + const h0 = streamCalls[0]?.[1]?.headers as Record; + const h1 = streamCalls[1]?.[1]?.headers as Record; + expect(h0['X-Retry-Attempt']).toBe('0'); + expect(h1['X-Retry-Attempt']).toBe('1'); + + expect(errorSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream error', + expect.any(Error), + ); + expect(warnSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + errorSpy.mockRestore(); + warnSpy.mockRestore(); + + await client.shutdown(); + }); + + it('should allow re-initialization after failure', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + let fetchCallCount = 0; + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/datafile')) { + fetchCallCount++; + if (fetchCallCount === 1) { + // First fetch fails + return Promise.resolve(new Response(null, { status: 500 })); + } + // Second fetch succeeds + return Promise.resolve(Response.json(makeBundled())); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + }); + + // First initialize fails (no bundled, fetch returns 500) + await expect(client.initialize()).rejects.toThrow(); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenCalledWith( + 'https://flags.vercel.com/v1/datafile', + { + headers: datafileRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + + // Second initialize should retry — fetch now succeeds + await client.initialize(); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenCalledWith( + 'https://flags.vercel.com/v1/datafile', + { + headers: datafileRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + + const result = await client.evaluate('flagA'); + expect(result.value).toBe(true); + + expect(fetchMock).toHaveBeenCalledTimes(2); + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(3); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + headers: ingestRequestHeaders, + method: 'POST', + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'offline', + revision: '1', + environment: 'test', + }, + }, + ]), + }, + ); + }); + }); + + // --------------------------------------------------------------------------- + // Multiple clients + // --------------------------------------------------------------------------- + describe('multiple clients', () => { + it('should maintain independent state for each client', async () => { + const datafileA = makeBundled({ + definitions: { + flagA: { + environments: { production: 0 }, + variants: ['a-value', 'b-value'], + }, + }, + }); + + const datafileB = makeBundled({ + definitions: { + flagA: { + environments: { production: 1 }, + variants: ['a-value', 'b-value'], + }, + }, + }); + + const clientA = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: datafileA, + buildStep: true, + }); + + const clientB = createClient(sdkKey, { + fetch: fetchMock, + stream: false, + polling: false, + datafile: datafileB, + buildStep: true, + }); + + const resultA = await clientA.evaluate('flagA'); + const resultB = await clientB.evaluate('flagA'); + + expect(resultA.value).toBe('a-value'); + expect(resultB.value).toBe('b-value'); + + // Shutdown one, other should still work + await clientA.shutdown(); + + const resultB2 = await clientB.evaluate('flagA'); + expect(resultB2.value).toBe('b-value'); + + await clientB.shutdown(); + }); + }); + + // --------------------------------------------------------------------------- + // Lazy initialization + // --------------------------------------------------------------------------- + describe('lazy initialization', () => { + it('should not load bundled definitions or stream or poll on creation', () => { + const client = createClient(sdkKey, { + buildStep: false, + fetch: fetchMock, + }); + + expect(client).toBeDefined(); + expect(fetchMock).not.toHaveBeenCalled(); + // Bundled definitions are loaded lazily, not at construction time + expect(readBundledDefinitions).not.toHaveBeenCalled(); + }); + }); + + // --------------------------------------------------------------------------- + // Failure behavior (no sources) + // --------------------------------------------------------------------------- + describe('failure behavior (no sources)', () => { + it('should return defaultValue when all data sources fail', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + const client = createClient(sdkKey, { + buildStep: false, + fetch: fetchMock, + stream: false, + polling: false, + }); + + const result = await client.evaluate('flagA', false); + + expect(result).toEqual({ + value: false, + reason: 'error', + errorMessage: expect.stringContaining( + '@vercel/flags-core: No flag definitions available', + ), + }); + }); + + it('should throw when all data sources fail and no defaultValue provided', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'missing-file', + definitions: null, + }); + + const client = createClient(sdkKey, { + buildStep: false, + fetch: fetchMock, + stream: false, + polling: false, + }); + + await expect(client.evaluate('flagA')).rejects.toThrow( + '@vercel/flags-core: No flag definitions available', + ); + }); + + it('should use bundled definitions when stream and polling are disabled', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled(), + }); + + const client = createClient(sdkKey, { + buildStep: false, + fetch: fetchMock, + stream: false, + polling: false, + }); + + const result = await client.evaluate('flagA'); + + expect(result.value).toBe(true); + expect(result.reason).toBe('paused'); + expect(result.metrics?.source).toBe('embedded'); + }); + }); + + // --------------------------------------------------------------------------- + // Usage tracking + // --------------------------------------------------------------------------- + describe('usage tracking', () => { + it('should report FLAGS_CONFIG_READ when using provided datafile in build step', async () => { + const passedDatafile = makeBundled({ + configUpdatedAt: 2, + revision: 2, + definitions: { + flagA: { + environments: { production: 1 }, + variants: [false, true], + }, + }, + }); + + const bundledDatafile = makeBundled({ + configUpdatedAt: 1, + revision: 1, + definitions: { + flagA: { + environments: { production: 0 }, + variants: [false, true], + }, + }, + }); + + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: bundledDatafile, + }); + + const client = createClient(sdkKey, { + buildStep: true, + fetch: fetchMock, + datafile: passedDatafile, + }); + + await expect(client.evaluate('flagA')).resolves.toEqual({ + metrics: { + cacheStatus: 'HIT', + connectionState: 'disconnected', + mode: 'build', + evaluationMs: 0, + readMs: 0, + source: 'in-memory', + }, + outcomeType: 'value', + reason: 'paused', + value: true, + }); + + expect(fetchMock).not.toHaveBeenCalled(); + + await client.shutdown(); + + expect(fetchMock).toHaveBeenCalledOnce(); + expect(fetchMock).toHaveBeenCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 2, + mode: 'build', + revision: '2', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should only track one FLAGS_CONFIG_READ during build step', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled({ configUpdatedAt: 1 }), + }); + + const client = createClient(sdkKey, { + buildStep: true, + fetch: fetchMock, + }); + + // Multiple evaluates during build + await Promise.all([client.evaluate('flagA'), client.evaluate('flagA')]); + await client.evaluate('flagA'); + + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledOnce(); + expect(fetchMock).toHaveBeenCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'embedded', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 1, + mode: 'build', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + + it('should report FLAGS_CONFIG_READ with FOLLOWING cacheAction when streaming', async () => { + const stream = createMockStream(); + + fetchMock.mockImplementation((input) => { + const url = typeof input === 'string' ? input : input.toString(); + if (url.includes('/v1/stream')) return stream.response; + if (url.includes('/v1/ingest')) { + return Promise.resolve(new Response(null, { status: 200 })); + } + return Promise.reject(new Error(`Unexpected fetch: ${url}`)); + }); + + const client = createClient(sdkKey, { + fetch: fetchMock, + polling: false, + }); + + const initPromise = client.initialize(); + + stream.push({ + type: 'datafile', + data: makeBundled({ configUpdatedAt: 5 }), + }); + await vi.advanceTimersByTimeAsync(0); + await initPromise; + + // Evaluate while streaming + await client.evaluate('flagA'); + + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/stream', + { + headers: streamRequestHeaders, + signal: expect.any(AbortSignal), + }, + ); + await client.shutdown(); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenLastCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'in-memory', + cacheStatus: 'HIT', + cacheAction: 'FOLLOWING', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 5, + mode: 'stream', + revision: '1', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + + stream.close(); + }); + + it('should report FLAGS_CONFIG_READ when using bundled definitions in build step', async () => { + vi.mocked(readBundledDefinitions).mockResolvedValue({ + state: 'ok', + definitions: makeBundled({ configUpdatedAt: 2, revision: 2 }), + }); + + const client = createClient(sdkKey, { + buildStep: true, + fetch: fetchMock, + }); + + await expect(client.evaluate('flagA')).resolves.toEqual({ + metrics: { + cacheStatus: 'HIT', + connectionState: 'disconnected', + mode: 'build', + evaluationMs: 0, + readMs: 0, + source: 'embedded', + }, + outcomeType: 'value', + reason: 'paused', + value: true, + }); + + expect(fetchMock).not.toHaveBeenCalled(); + + await client.shutdown(); + + expect(fetchMock).toHaveBeenCalledOnce(); + expect(fetchMock).toHaveBeenCalledWith( + 'https://flags.vercel.com/v1/ingest', + { + body: JSON.stringify([ + { + type: 'FLAGS_CONFIG_READ', + ts: date.getTime(), + payload: { + configOrigin: 'embedded', + cacheStatus: 'HIT', + cacheAction: 'NONE', + cacheIsFirstRead: true, + cacheIsBlocking: false, + duration: 0, + configUpdatedAt: 2, + mode: 'build', + revision: '2', + environment: 'test', + }, + }, + ]), + headers: ingestRequestHeaders, + method: 'POST', + }, + ); + }); + }); +}); diff --git a/packages/vercel-flags-core/src/client-fns.test.ts b/packages/vercel-flags-core/src/client-fns.test.ts deleted file mode 100644 index 1a575277..00000000 --- a/packages/vercel-flags-core/src/client-fns.test.ts +++ /dev/null @@ -1,499 +0,0 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; -import { - evaluate, - getFallbackDatafile, - initialize, - shutdown, -} from './client-fns'; -import { clientMap } from './client-map'; -import type { BundledDefinitions, DataSource, Packed } from './types'; -import { ErrorCode, ResolutionReason } from './types'; - -// Mock the internalReportValue function -vi.mock('./lib/report-value', () => ({ - internalReportValue: vi.fn(), -})); - -import { internalReportValue } from './lib/report-value'; - -function createMockDataSource(overrides?: Partial): DataSource { - return { - read: vi.fn().mockResolvedValue({ - projectId: 'test-project', - definitions: {}, - segments: {}, - environment: 'production', - metrics: { - readMs: 0, - source: 'in-memory', - cacheStatus: 'HIT', - }, - }), - getDatafile: vi.fn().mockResolvedValue({ - projectId: 'test-project', - definitions: {}, - segments: {}, - environment: 'production', - metrics: { - readMs: 0, - source: 'in-memory', - cacheStatus: 'HIT', - }, - }), - initialize: vi.fn().mockResolvedValue(undefined), - shutdown: vi.fn().mockResolvedValue(undefined), - ...overrides, - }; -} - -function mockDatafile(data: { - projectId?: string; - definitions: Record; - segments: Record; - environment: string; -}) { - return { - ...data, - metrics: { - readMs: 0, - source: 'in-memory' as const, - cacheStatus: 'HIT' as const, - }, - }; -} - -describe('client-fns', () => { - const CLIENT_ID = 99; - - beforeEach(() => { - clientMap.clear(); - vi.clearAllMocks(); - }); - - afterEach(() => { - clientMap.clear(); - }); - - describe('initialize', () => { - it('should call dataSource.initialize()', async () => { - const dataSource = createMockDataSource(); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - await initialize(CLIENT_ID); - - expect(dataSource.initialize).toHaveBeenCalledTimes(1); - }); - - it('should return the result from dataSource.initialize()', async () => { - const dataSource = createMockDataSource({ - initialize: vi.fn().mockResolvedValue('init-result'), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await initialize(CLIENT_ID); - - expect(result).toBe('init-result'); - }); - - it('should throw if client ID is not in map', () => { - expect(() => initialize(999)).toThrow(); - }); - }); - - describe('shutdown', () => { - it('should call dataSource.shutdown()', async () => { - const dataSource = createMockDataSource(); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - await shutdown(CLIENT_ID); - - expect(dataSource.shutdown).toHaveBeenCalledTimes(1); - }); - - it('should return the result from dataSource.shutdown()', async () => { - const dataSource = createMockDataSource({ - shutdown: vi.fn().mockResolvedValue('shutdown-result'), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await shutdown(CLIENT_ID); - - expect(result).toBe('shutdown-result'); - }); - - it('should throw if client ID is not in map', () => { - expect(() => shutdown(999)).toThrow(); - }); - }); - - describe('getFallbackDatafile', () => { - it('should call dataSource.getFallbackDatafile() if it exists', async () => { - const mockFallback: BundledDefinitions = { - projectId: 'test', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - const getFallbackDatafileFn = vi.fn().mockResolvedValue(mockFallback); - const dataSource = createMockDataSource({ - getFallbackDatafile: getFallbackDatafileFn, - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - await getFallbackDatafile(CLIENT_ID); - - expect(getFallbackDatafileFn).toHaveBeenCalledTimes(1); - }); - - it('should return the result from dataSource.getFallbackDatafile()', async () => { - const mockFallback: BundledDefinitions = { - projectId: 'test', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - const dataSource = createMockDataSource({ - getFallbackDatafile: vi.fn().mockResolvedValue(mockFallback), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await getFallbackDatafile(CLIENT_ID); - - expect(result).toEqual(mockFallback); - }); - - it('should throw if dataSource does not have getFallbackDatafile', () => { - const dataSource = createMockDataSource(); - // Remove getFallbackDatafile - delete (dataSource as Partial).getFallbackDatafile; - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - expect(() => getFallbackDatafile(CLIENT_ID)).toThrow( - 'flags: This data source does not support fallbacks', - ); - }); - - it('should throw if client ID is not in map', () => { - expect(() => getFallbackDatafile(999)).toThrow(); - }); - }); - - describe('evaluate', () => { - it('should return FLAG_NOT_FOUND error when flag does not exist', async () => { - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: {}, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await evaluate(CLIENT_ID, 'nonexistent-flag', 'default'); - - expect(result.value).toBe('default'); - expect(result.reason).toBe(ResolutionReason.ERROR); - expect(result.errorCode).toBe(ErrorCode.FLAG_NOT_FOUND); - expect(result.errorMessage).toBe( - 'Definition not found for flag "nonexistent-flag"', - ); - expect(result.metrics).toBeDefined(); - expect(result.metrics!.source).toBe('in-memory'); - }); - - it('should use defaultValue when flag is not found', async () => { - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: {}, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await evaluate(CLIENT_ID, 'missing', { fallback: true }); - - expect(result.value).toEqual({ fallback: true }); - }); - - it('should evaluate flag when it exists', async () => { - // A flag with environments: { production: 0 } is "paused" (just returns variant 0) - const flagDefinition: Packed.FlagDefinition = { - environments: { production: 0 }, - variants: [true], - }; - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: { 'my-flag': flagDefinition }, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await evaluate(CLIENT_ID, 'my-flag', false); - - expect(result.value).toBe(true); - expect(result.reason).toBe(ResolutionReason.PAUSED); - expect(result.metrics).toBeDefined(); - }); - - it('should call internalReportValue when projectId exists', async () => { - // A flag with environments: { production: 0 } is "paused" - const flagDefinition: Packed.FlagDefinition = { - environments: { production: 0 }, - variants: ['variant-a'], - }; - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'my-project-id', - definitions: { 'my-flag': flagDefinition }, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - await evaluate(CLIENT_ID, 'my-flag', 'default'); - - expect(internalReportValue).toHaveBeenCalledWith( - 'my-flag', - 'variant-a', - expect.objectContaining({ - originProjectId: 'my-project-id', - originProvider: 'vercel', - reason: ResolutionReason.PAUSED, - }), - ); - }); - - it('should not call internalReportValue when projectId is missing', async () => { - const flagDefinition: Packed.FlagDefinition = { - environments: { production: 0 }, - variants: [true], - }; - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: undefined, - definitions: { 'my-flag': flagDefinition }, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - await evaluate(CLIENT_ID, 'my-flag'); - - expect(internalReportValue).not.toHaveBeenCalled(); - }); - - it('should not include outcomeType in report when result is error', async () => { - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: {}, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - await evaluate(CLIENT_ID, 'nonexistent'); - - // internalReportValue is not called for FLAG_NOT_FOUND errors - // because there's no projectId in the mock or the code path doesn't report errors - // Let's verify by checking the actual behavior - expect(internalReportValue).not.toHaveBeenCalled(); - }); - - it('should pass entities to evaluation', async () => { - const flagDefinition: Packed.FlagDefinition = { - environments: { - production: { - targets: [{}, { user: { id: ['user-123'] } }], - fallthrough: 0, - }, - }, - variants: ['default', 'targeted'], - }; - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: { 'targeted-flag': flagDefinition }, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const result = await evaluate(CLIENT_ID, 'targeted-flag', 'default', { - user: { id: 'user-123' }, - }); - - expect(result.value).toBe('targeted'); - expect(result.reason).toBe(ResolutionReason.TARGET_MATCH); - }); - - it('should use empty entities object when not provided', async () => { - const flagDefinition: Packed.FlagDefinition = { - environments: { - production: { - fallthrough: 0, - }, - }, - variants: ['value'], - }; - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: { 'my-flag': flagDefinition }, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - // Call without entities - const result = await evaluate(CLIENT_ID, 'my-flag'); - - expect(result.value).toBe('value'); - }); - - it('should throw if client ID is not in map', async () => { - await expect(evaluate(999, 'any-flag')).rejects.toThrow(); - }); - - it('should work with different value types', async () => { - const dataSource = createMockDataSource({ - read: vi.fn().mockResolvedValue( - mockDatafile({ - projectId: 'test', - definitions: { - 'bool-flag': { - environments: { production: 0 }, - variants: [true], - }, - 'string-flag': { - environments: { production: 0 }, - variants: ['hello'], - }, - 'number-flag': { - environments: { production: 0 }, - variants: [42], - }, - 'object-flag': { - environments: { production: 0 }, - variants: [{ key: 'value' }], - }, - }, - segments: {}, - environment: 'production', - }), - ), - }); - clientMap.set(CLIENT_ID, { - dataSource, - initialized: false, - initPromise: null, - }); - - const boolResult = await evaluate(CLIENT_ID, 'bool-flag'); - expect(boolResult.value).toBe(true); - - const stringResult = await evaluate(CLIENT_ID, 'string-flag'); - expect(stringResult.value).toBe('hello'); - - const numberResult = await evaluate(CLIENT_ID, 'number-flag'); - expect(numberResult.value).toBe(42); - - const objectResult = await evaluate<{ key: string }>( - CLIENT_ID, - 'object-flag', - ); - expect(objectResult.value).toEqual({ key: 'value' }); - }); - }); -}); diff --git a/packages/vercel-flags-core/src/client-map.ts b/packages/vercel-flags-core/src/client-map.ts deleted file mode 100644 index 9e5b5524..00000000 --- a/packages/vercel-flags-core/src/client-map.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type { DataSource } from './types'; - -export type ClientInstance = { - dataSource: DataSource; - initialized: boolean; - initPromise: Promise | null; -}; - -export const clientMap = new Map(); diff --git a/packages/vercel-flags-core/src/client-fns.ts b/packages/vercel-flags-core/src/controller-fns.ts similarity index 56% rename from packages/vercel-flags-core/src/client-fns.ts rename to packages/vercel-flags-core/src/controller-fns.ts index 5c00562c..6a3cb8db 100644 --- a/packages/vercel-flags-core/src/client-fns.ts +++ b/packages/vercel-flags-core/src/controller-fns.ts @@ -1,23 +1,46 @@ -import { clientMap } from './client-map'; import { evaluate as evalFlag } from './evaluate'; import { internalReportValue } from './lib/report-value'; -import type { BundledDefinitions, EvaluationResult, Packed } from './types'; +import type { + BundledDefinitions, + ControllerInterface, + Datafile, + EvaluationResult, + Packed, +} from './types'; import { ErrorCode, ResolutionReason } from './types'; +export type ControllerInstance = { + controller: ControllerInterface; + initialized: boolean; + initPromise: Promise | null; +}; + +export const controllerInstanceMap = new Map(); + +function getInstance(id: number): ControllerInstance { + const instance = controllerInstanceMap.get(id); + if (!instance) { + throw new Error( + `@vercel/flags-core: Client instance ${id} not found. It may have been shut down.`, + ); + } + return instance; +} + export function initialize(id: number): Promise { - return clientMap.get(id)!.dataSource.initialize(); + return getInstance(id).controller.initialize(); } export function shutdown(id: number): void | Promise { - return clientMap.get(id)!.dataSource.shutdown(); + return getInstance(id).controller.shutdown(); } export function getDatafile(id: number) { - return clientMap.get(id)!.dataSource.getDatafile(); + return getInstance(id).controller.getDatafile(); } export function getFallbackDatafile(id: number): Promise { - const ds = clientMap.get(id)!.dataSource; + const ds = getInstance(id).controller; if (ds.getFallbackDatafile) return ds.getFallbackDatafile(); throw new Error('flags: This data source does not support fallbacks'); } @@ -28,22 +51,47 @@ export async function evaluate>( defaultValue?: T, entities?: E, ): Promise> { - const ds = clientMap.get(id)!.dataSource; - const datafile = await ds.read(); + const controller = getInstance(id).controller; + + let datafile: Datafile; + try { + datafile = await controller.read(); + } catch (error) { + // All data sources failed. Fall back to defaultValue if provided. + if (defaultValue !== undefined) { + return { + value: defaultValue, + reason: ResolutionReason.ERROR, + errorMessage: + error instanceof Error ? error.message : 'Failed to read datafile', + }; + } + throw error; + } + const flagDefinition = datafile.definitions[flagKey] as Packed.FlagDefinition; if (flagDefinition === undefined) { + if (datafile.projectId) { + internalReportValue(flagKey, defaultValue, { + originProjectId: datafile.projectId, + originProvider: 'vercel', + reason: ResolutionReason.ERROR, + }); + } + return { value: defaultValue, reason: ResolutionReason.ERROR, errorCode: ErrorCode.FLAG_NOT_FOUND, - errorMessage: `Definition not found for flag "${flagKey}"`, + errorMessage: `@vercel/flags-core: Definition not found for flag "${flagKey}"`, metrics: { evaluationMs: 0, readMs: datafile.metrics.readMs, source: datafile.metrics.source, cacheStatus: datafile.metrics.cacheStatus, connectionState: datafile.metrics.connectionState, + mode: datafile.metrics.mode, }, }; } @@ -77,6 +125,7 @@ export async function evaluate>( source: datafile.metrics.source, cacheStatus: datafile.metrics.cacheStatus, connectionState: datafile.metrics.connectionState, + mode: datafile.metrics.mode, }, }); } diff --git a/packages/vercel-flags-core/src/controller/bundled-source.ts b/packages/vercel-flags-core/src/controller/bundled-source.ts new file mode 100644 index 00000000..8a7a47de --- /dev/null +++ b/packages/vercel-flags-core/src/controller/bundled-source.ts @@ -0,0 +1,83 @@ +import { FallbackEntryNotFoundError, FallbackNotFoundError } from '../errors'; +import type { + BundledDefinitions, + BundledDefinitionsResult, + DatafileInput, +} from '../types'; +import type { readBundledDefinitions } from '../utils/read-bundled-definitions'; + +/** + * Manages loading of bundled flag definitions. + * Wraps readBundledDefinitions() with caching. + */ +export class BundledSource { + private promise: Promise | undefined; + private options: { + sdkKey: string; + readBundledDefinitions: typeof readBundledDefinitions; + }; + + constructor(options: { + sdkKey: string; + readBundledDefinitions: typeof readBundledDefinitions; + }) { + this.options = options; + } + + /** + * Load bundled definitions. + * Throws if bundled definitions are not available. + */ + async load(): Promise { + const result = await this.getResult(); + + if (result.state === 'ok' && result.definitions) { + return result.definitions; + } + + throw new Error( + '@vercel/flags-core: No flag definitions available. ' + + 'Bundled definitions not found.', + ); + } + + /** + * Get the raw BundledDefinitions (for getFallbackDatafile). + * Throws typed errors if not available. + */ + async getRaw(): Promise { + const result = await this.getResult(); + + switch (result.state) { + case 'ok': + return result.definitions; + case 'missing-file': + throw new FallbackNotFoundError(); + case 'missing-entry': + throw new FallbackEntryNotFoundError(); + case 'unexpected-error': + throw new Error( + '@vercel/flags-core: Failed to read bundled definitions: ' + + String(result.error), + ); + } + } + + /** + * Check if bundled definitions loaded successfully (without throwing). + */ + async tryLoad(): Promise { + const result = await this.getResult(); + if (result?.state === 'ok' && result.definitions) { + return result.definitions; + } + return undefined; + } + + private getResult(): Promise { + if (!this.promise) { + this.promise = this.options.readBundledDefinitions(this.options.sdkKey); + } + return this.promise; + } +} diff --git a/packages/vercel-flags-core/src/controller/fetch-datafile.ts b/packages/vercel-flags-core/src/controller/fetch-datafile.ts new file mode 100644 index 00000000..0ff0aa80 --- /dev/null +++ b/packages/vercel-flags-core/src/controller/fetch-datafile.ts @@ -0,0 +1,53 @@ +import { version } from '../../package.json'; +import type { BundledDefinitions } from '../types'; + +const DEFAULT_FETCH_TIMEOUT_MS = 10_000; + +/** + * Fetches the datafile from the flags service. + */ +export async function fetchDatafile(options: { + host: string; + sdkKey: string; + fetch: typeof globalThis.fetch; + signal?: AbortSignal; +}): Promise { + const controller = new AbortController(); + const timeoutId = setTimeout( + () => controller.abort(), + DEFAULT_FETCH_TIMEOUT_MS, + ); + + // Abort the internal controller when the external signal fires + const onExternalAbort = () => controller.abort(); + if (options.signal) { + if (options.signal.aborted) { + clearTimeout(timeoutId); + throw new Error('Fetch aborted'); + } + options.signal.addEventListener('abort', onExternalAbort, { once: true }); + } + + try { + const res = await options.fetch(`${options.host}/v1/datafile`, { + headers: { + Authorization: `Bearer ${options.sdkKey}`, + 'User-Agent': `VercelFlagsCore/${version}`, + }, + signal: controller.signal, + }); + + clearTimeout(timeoutId); + options.signal?.removeEventListener('abort', onExternalAbort); + + if (!res.ok) { + throw new Error(`Failed to fetch data: ${res.statusText}`); + } + + return res.json() as Promise; + } catch (error) { + clearTimeout(timeoutId); + options.signal?.removeEventListener('abort', onExternalAbort); + throw error instanceof Error ? error : new Error('Unknown fetch error'); + } +} diff --git a/packages/vercel-flags-core/src/controller/index.ts b/packages/vercel-flags-core/src/controller/index.ts new file mode 100644 index 00000000..ee8bc35c --- /dev/null +++ b/packages/vercel-flags-core/src/controller/index.ts @@ -0,0 +1,810 @@ +import type { + BundledDefinitions, + ControllerInterface, + Datafile, + DatafileInput, + Metrics, +} from '../types'; +import { readBundledDefinitions } from '../utils/read-bundled-definitions'; +import { type TrackReadOptions, UsageTracker } from '../utils/usage-tracker'; +import { BundledSource } from './bundled-source'; +import { fetchDatafile } from './fetch-datafile'; +import { + type ControllerOptions, + type NormalizedOptions, + normalizeOptions, +} from './normalized-options'; +import { PollingSource } from './polling-source'; +import { UnauthorizedError } from './stream-connection'; +import { StreamSource } from './stream-source'; +import { originToMetricsSource, type TaggedData, tagData } from './tagged-data'; + +export { BundledSource } from './bundled-source'; +export type { ControllerOptions } from './normalized-options'; +export { PollingSource } from './polling-source'; +export { StreamSource } from './stream-source'; + +// --------------------------------------------------------------------------- +// Internal helpers +// --------------------------------------------------------------------------- + +/** + * Parses a configUpdatedAt value (number or string) into a numeric timestamp. + * Returns undefined if the value is missing or cannot be parsed. + */ +function parseConfigUpdatedAt(value: unknown): number | undefined { + if (typeof value === 'number') return value; + if (typeof value === 'string') { + const parsed = Number(value); + return Number.isNaN(parsed) ? undefined : parsed; + } + return undefined; +} + +// --------------------------------------------------------------------------- +// Internal types +// --------------------------------------------------------------------------- + +/** + * Explicit states for the controller state machine. + */ +type State = + | 'idle' + | 'initializing:stream' + | 'initializing:polling' + | 'initializing:fallback' + | 'streaming' + | 'polling' + | 'degraded' + | 'build:loading' + | 'build:ready' + | 'shutdown'; + +// --------------------------------------------------------------------------- +// Controller +// --------------------------------------------------------------------------- + +/** + * Connects to flags.vercel.com and manages flag definitions. + * + * Implemented as a state machine controller that delegates all I/O to + * source modules (StreamSource, PollingSource, BundledSource). + * + * **Build step** (CI=1 or Next.js build, or buildStep: true): + * - Uses datafile (if provided), bundled definitions, or one-time fetch as fallback + * - No streaming or polling + * + * **Runtime — streaming mode** (stream enabled): + * - Uses streaming exclusively; polling is never started, even if configured + * - Init fallback (no data yet): constructor datafile → bundled → throw + * - Read fallback (post-init): in-memory value → constructor datafile → bundled → throw + * + * **Runtime — polling mode** (polling enabled, stream disabled): + * - Uses polling exclusively + * - Same fallback chains as streaming mode + * + * **Runtime — offline mode** (neither stream nor polling): + * - Init fallback: constructor datafile → bundled → one-time fetch → throw + * - Read fallback: in-memory value → constructor datafile → bundled → one-time fetch → throw + */ +export class Controller implements ControllerInterface { + private options: NormalizedOptions; + + // State machine + private state: State = 'idle'; + + // Data state — tagged with origin + private data: TaggedData | undefined; + + // Sources (I/O delegates) + private streamSource: StreamSource; + private pollingSource: PollingSource; + private bundledSource: BundledSource; + + // Usage tracking + private usageTracker: UsageTracker; + private isFirstGetData: boolean = true; + + // Build-step deduplication + private buildDataPromise: Promise | null = null; + private buildReadTracked = false; + + // Suppresses usage tracking when the SDK key is unauthorized + private unauthorized = false; + + constructor(options: ControllerOptions) { + if ( + !options.sdkKey || + typeof options.sdkKey !== 'string' || + !options.sdkKey.startsWith('vf_') + ) { + throw new Error( + '@vercel/flags-core: SDK key must be a string starting with "vf_"', + ); + } + + this.options = normalizeOptions(options); + + // Create source modules + this.streamSource = new StreamSource( + this.options, + () => this.data?.revision, + ); + + this.pollingSource = new PollingSource(this.options); + + this.bundledSource = new BundledSource({ + sdkKey: this.options.sdkKey, + readBundledDefinitions, + }); + + // Wire source events to state machine + this.wireSourceEvents(); + + // If datafile provided, use it immediately + if (this.options.datafile) { + this.data = tagData(this.options.datafile, 'provided'); + } + + this.usageTracker = new UsageTracker(this.options); + } + + // Source event handlers (stored for cleanup) + private onStreamData = (data: DatafileInput) => { + if (this.isNewerData(data)) { + this.data = tagData(data, 'stream'); + } + }; + private onStreamPrimed = () => { + // The server confirmed our revision is current — no new data needed. + // Transition to streaming like a normal connected event. + if (this.state === 'degraded' || this.state === 'initializing:stream') { + this.transition('streaming'); + } + }; + private onStreamConnected = () => { + if (this.state === 'degraded' || this.state === 'initializing:stream') { + this.transition('streaming'); + } + }; + private onStreamDisconnected = () => { + if (this.state === 'streaming') { + this.transition('degraded'); + } + }; + private onPollData = (data: DatafileInput) => { + if (this.isNewerData(data)) { + this.data = tagData(data, 'poll'); + } + }; + private onPollError = (error: Error) => { + console.error('@vercel/flags-core: Poll failed:', error); + }; + + // --------------------------------------------------------------------------- + // Source event wiring + // --------------------------------------------------------------------------- + + private wireSourceEvents(): void { + this.streamSource.on('data', this.onStreamData); + this.streamSource.on('primed', this.onStreamPrimed); + this.streamSource.on('connected', this.onStreamConnected); + this.streamSource.on('disconnected', this.onStreamDisconnected); + this.pollingSource.on('data', this.onPollData); + this.pollingSource.on('error', this.onPollError); + } + + private unwireSourceEvents(): void { + this.streamSource.off('data', this.onStreamData); + this.streamSource.off('primed', this.onStreamPrimed); + this.streamSource.off('connected', this.onStreamConnected); + this.streamSource.off('disconnected', this.onStreamDisconnected); + this.pollingSource.off('data', this.onPollData); + this.pollingSource.off('error', this.onPollError); + } + + // --------------------------------------------------------------------------- + // State machine + // --------------------------------------------------------------------------- + + private transition(to: State): void { + this.state = to; + } + + private get isConnected(): boolean { + return this.state === 'streaming'; + } + + private get mode(): Metrics['mode'] { + if (this.options.buildStep) return 'build'; + switch (this.state) { + case 'streaming': + return 'streaming'; + case 'polling': + return 'polling'; + default: + return 'offline'; + } + } + + // --------------------------------------------------------------------------- + // Public API (DataSource interface) + // --------------------------------------------------------------------------- + + /** + * Initializes the data source. + * + * Build step: datafile → bundled → one-time fetch + * Streaming mode: stream → datafile → bundled + * Polling mode (no stream): poll → datafile → bundled + * Offline mode (neither): datafile → bundled → one-time fetch + */ + async initialize(): Promise { + if (this.options.buildStep) { + this.transition('build:loading'); + await this.initializeForBuildStep(); + this.transition('build:ready'); + return; + } + + // Hydrate from provided datafile if not already set (e.g., after shutdown) + if (!this.data && this.options.datafile) { + this.data = tagData(this.options.datafile, 'provided'); + } + + // If no data yet, try loading bundled definitions eagerly so we can + // send the revision to the stream and potentially get a lightweight + // "primed" response instead of a full datafile. + if (!this.data) { + try { + const bundled = await this.bundledSource.tryLoad(); + if (bundled) { + this.data = tagData(bundled, 'bundled'); + } + } catch { + // Bundled definitions not available — proceed without revision + } + } + + // If we already have data (from provided datafile or bundled definitions), + // start updates. Both streaming and polling wait for initial data before + // being considered initialized, so we know we have fresh data. + // For no-updates (offline), return immediately since we already have usable data. + if (this.data) { + if (this.options.stream.enabled) { + this.transition('initializing:stream'); + await this.tryInitializeStream(); + } else if (this.options.polling.enabled) { + this.transition('initializing:polling'); + await this.tryInitializePolling(); + } else { + this.transition('degraded'); + } + return; + } + + // Try the configured primary source (stream or poll, never both) + if (this.options.stream.enabled) { + this.transition('initializing:stream'); + const streamSuccess = await this.tryInitializeStream(); + if (streamSuccess) { + this.transition('streaming'); + return; + } + } else if (this.options.polling.enabled) { + this.transition('initializing:polling'); + const pollingSuccess = await this.tryInitializePolling(); + if (pollingSuccess) { + this.transition('polling'); + return; + } + } + + // Fallback chain: datafile → bundled → one-time fetch (offline only) + await this.initializeFromFallbacks(); + } + + /** + * Reads the current datafile with metrics. + */ + async read(): Promise { + const startTime = Date.now(); + const cacheHadDefinitions = this.data !== undefined; + const isFirstRead = this.isFirstGetData; + this.isFirstGetData = false; + + const [result, cacheStatus] = await this.resolveData(); + + const readMs = Date.now() - startTime; + const { _origin, ...data } = result; + const source = originToMetricsSource(_origin); + this.trackRead(startTime, cacheHadDefinitions, isFirstRead, source); + + return { + ...data, + metrics: { + readMs, + source, + cacheStatus, + connectionState: this.isConnected + ? ('connected' as const) + : ('disconnected' as const), + mode: this.mode, + }, + } satisfies Datafile; + } + + /** + * Shuts down the data source and releases resources. + */ + async shutdown(): Promise { + this.unwireSourceEvents(); + this.streamSource.stop(); + this.pollingSource.stop(); + this.data = this.options.datafile + ? tagData(this.options.datafile, 'provided') + : undefined; + this.transition('shutdown'); + await this.usageTracker.flush(); + } + + /** + * Returns the datafile with metrics. + * Uses in-memory data if available, otherwise falls back to bundled, + * then to a one-time fetch if called without prior initialization. + */ + async getDatafile(): Promise { + const startTime = Date.now(); + this.isFirstGetData = false; + + let result: TaggedData; + let cacheStatus: Metrics['cacheStatus']; + + if (this.options.buildStep) { + [result, cacheStatus] = await this.resolveDataForBuildStep(); + } else if (this.data) { + cacheStatus = this.isConnected ? 'HIT' : 'STALE'; + result = this.data; + } else { + // No in-memory data — try bundled, then one-time fetch + const bundled = await this.bundledSource.tryLoad(); + if (bundled) { + this.data = tagData(bundled, 'bundled'); + result = this.data; + cacheStatus = 'MISS'; + } else { + // One-time fetch as last resort + try { + const fetched = await fetchDatafile({ + host: this.options.host, + sdkKey: this.options.sdkKey, + fetch: this.options.fetch, + }); + this.data = tagData(fetched, 'fetched'); + result = this.data; + cacheStatus = 'MISS'; + } catch { + throw new Error( + '@vercel/flags-core: No flag definitions available. ' + + 'Initialize the client or provide a datafile.', + ); + } + } + } + + const source = originToMetricsSource(result._origin); + + return { + ...result, + metrics: { + readMs: Date.now() - startTime, + source, + cacheStatus, + connectionState: this.isConnected + ? ('connected' as const) + : ('disconnected' as const), + mode: this.mode, + }, + } satisfies Datafile; + } + + /** + * Returns the bundled fallback datafile. + */ + async getFallbackDatafile(): Promise { + return this.bundledSource.getRaw(); + } + + // --------------------------------------------------------------------------- + // Data resolution (shared by read() and getDatafile()) + // --------------------------------------------------------------------------- + + /** + * Resolves the current data, using the appropriate strategy for the + * current mode. Returns tagged data and cache status. + * + * Build step: cached → bundled → one-time fetch + * Runtime with cache: return cached data + * Runtime without cache: stream/poll → datafile → bundled → fetch → throw + */ + private async resolveData(): Promise<[TaggedData, Metrics['cacheStatus']]> { + if (this.options.buildStep) { + return this.resolveDataForBuildStep(); + } + + if (this.data) { + const cacheStatus = this.isConnected ? 'HIT' : 'STALE'; + return [this.data, cacheStatus]; + } + + return this.resolveDataWithFallbacks(); + } + + // --------------------------------------------------------------------------- + // Stream initialization + // --------------------------------------------------------------------------- + + /** + * Attempts to initialize via stream with timeout. + * Returns true if stream connected successfully within timeout. + */ + private async tryInitializeStream(): Promise { + if (this.options.stream.initTimeoutMs <= 0) { + try { + await this.streamSource.start(); + return true; + } catch (error) { + if (error instanceof UnauthorizedError) { + this.unauthorized = true; + } + return false; + } + } + + // Race against timeout + let timeoutId: ReturnType; + const timeoutPromise = new Promise<'timeout'>((resolve) => { + timeoutId = setTimeout( + () => resolve('timeout'), + this.options.stream.initTimeoutMs, + ); + }); + + try { + const result = await Promise.race([ + this.streamSource.start(), + timeoutPromise, + ]); + clearTimeout(timeoutId!); + + if (result === 'timeout') { + console.warn( + '@vercel/flags-core: Stream initialization timeout, falling back', + ); + // Don't stop stream - let it continue trying in background. + // Swallow the rejection from the background stream promise to + // avoid unhandled promise rejections when it is eventually aborted. + void this.streamSource.start().catch(() => {}); + return false; + } + + return true; + } catch (error) { + clearTimeout(timeoutId!); + if (error instanceof Error && error.message.includes('401')) { + this.unauthorized = true; + } + return false; + } + } + + // --------------------------------------------------------------------------- + // Polling initialization + // --------------------------------------------------------------------------- + + /** + * Attempts to initialize via polling with timeout. + * Returns true if first poll succeeded within timeout. + * + * Only used when streaming is disabled and polling is the primary source. + */ + private async tryInitializePolling(): Promise { + const pollPromise = this.pollingSource.poll(); + + if (this.options.polling.initTimeoutMs <= 0) { + try { + await pollPromise; + if (this.data) { + this.pollingSource.startInterval(); + return true; + } + return false; + } catch { + return false; + } + } + + // Race against timeout + let timeoutId: ReturnType; + const timeoutPromise = new Promise<'timeout'>((resolve) => { + timeoutId = setTimeout( + () => resolve('timeout'), + this.options.polling.initTimeoutMs, + ); + }); + + try { + const result = await Promise.race([pollPromise, timeoutPromise]); + clearTimeout(timeoutId!); + + if (result === 'timeout') { + console.warn( + '@vercel/flags-core: Polling initialization timeout, falling back', + ); + return false; + } + + if (this.data) { + this.pollingSource.startInterval(); + return true; + } + return false; + } catch { + clearTimeout(timeoutId!); + return false; + } + } + + // --------------------------------------------------------------------------- + // Build step helpers + // --------------------------------------------------------------------------- + + /** + * Initializes data for build step environments. + */ + private async initializeForBuildStep(): Promise { + if (this.data) return; + + if (!this.buildDataPromise) { + this.buildDataPromise = this.loadBuildData(); + } + this.data = await this.buildDataPromise; + } + + /** + * Retrieves data during build steps. + * Concurrent callers share a single load promise. The first caller to + * populate `this.data` gets cacheStatus MISS; subsequent callers get HIT. + */ + private async resolveDataForBuildStep(): Promise< + [TaggedData, Metrics['cacheStatus']] + > { + if (this.data) { + return [this.data, 'HIT']; + } + + if (!this.buildDataPromise) { + this.buildDataPromise = this.loadBuildData(); + } + + const data = await this.buildDataPromise; + + if (!this.data) { + this.data = data; + return [data, 'MISS']; + } + return [this.data, 'HIT']; + } + + /** + * Loads data for a build step: bundled → one-time fetch. + */ + private async loadBuildData(): Promise { + const bundled = await this.bundledSource.tryLoad(); + if (bundled) return tagData(bundled, 'bundled'); + + // Fallback: one-time fetch + try { + const fetched = await fetchDatafile({ + host: this.options.host, + sdkKey: this.options.sdkKey, + fetch: this.options.fetch, + }); + return tagData(fetched, 'fetched'); + } catch { + // fetch failed — fall through to throw + } + + throw new Error( + '@vercel/flags-core: No flag definitions available during build. ' + + 'Provide a datafile or bundled definitions.', + ); + } + + // --------------------------------------------------------------------------- + // Fallback helpers + // --------------------------------------------------------------------------- + + /** + * Shared fallback chain used by both initialize() and resolveData(). + */ + private async initializeFromFallbacks(): Promise { + this.transition('initializing:fallback'); + + if (this.data) { + this.transition('degraded'); + return; + } + + const bundled = await this.bundledSource.tryLoad(); + if (bundled) { + this.data = tagData(bundled, 'bundled'); + this.transition('degraded'); + return; + } + + // Last resort: one-time fetch (only when no stream/poll configured) + if (!this.options.stream.enabled && !this.options.polling.enabled) { + try { + const fetched = await fetchDatafile({ + host: this.options.host, + sdkKey: this.options.sdkKey, + fetch: this.options.fetch, + }); + this.data = tagData(fetched, 'fetched'); + this.transition('degraded'); + return; + } catch { + // fetch failed — fall through to throw + } + } + + throw new Error( + '@vercel/flags-core: No flag definitions available. ' + + 'Bundled definitions not found.', + ); + } + + /** + * Retrieves data using the fallback chain (called when no cached data exists). + * Streaming mode: stream → datafile → bundled. + * Polling mode: poll → datafile → bundled. + * Offline mode: datafile → bundled → one-time fetch. + */ + private async resolveDataWithFallbacks(): Promise< + [TaggedData, Metrics['cacheStatus']] + > { + // Try the configured primary source + if (this.options.stream.enabled) { + this.transition('initializing:stream'); + const streamSuccess = await this.tryInitializeStream(); + if (streamSuccess && this.data) { + this.transition('streaming'); + return [this.data, 'MISS']; + } + } else if (this.options.polling.enabled) { + this.transition('initializing:polling'); + const pollingSuccess = await this.tryInitializePolling(); + if (pollingSuccess && this.data) { + this.transition('polling'); + return [this.data, 'MISS']; + } + } + + // Fallback chain: datafile → bundled → one-time fetch + this.transition('initializing:fallback'); + + if (this.options.datafile) { + this.data = tagData(this.options.datafile, 'provided'); + this.transition('degraded'); + return [this.data, 'STALE']; + } + + const bundled = await this.bundledSource.tryLoad(); + if (bundled) { + console.warn('@vercel/flags-core: Using bundled definitions as fallback'); + this.data = tagData(bundled, 'bundled'); + this.transition('degraded'); + return [this.data, 'STALE']; + } + + // Last resort: one-time fetch (only when no stream/poll configured) + if (!this.options.stream.enabled && !this.options.polling.enabled) { + try { + const fetched = await fetchDatafile({ + host: this.options.host, + sdkKey: this.options.sdkKey, + fetch: this.options.fetch, + }); + this.data = tagData(fetched, 'fetched'); + this.transition('degraded'); + return [this.data, 'MISS']; + } catch { + // fetch failed — fall through to throw + } + } + + throw new Error( + '@vercel/flags-core: No flag definitions available. ' + + 'Provide a datafile or bundled definitions.', + ); + } + + // --------------------------------------------------------------------------- + // Data comparison + // --------------------------------------------------------------------------- + + /** + * Checks if the incoming data is newer than the current in-memory data. + * Returns true if the update should proceed, false if it should be skipped. + * + * Always accepts the update if: + * - There is no current data + * - The current data has no configUpdatedAt + * - The incoming data has no configUpdatedAt + * + * Skips the update only when both have configUpdatedAt and incoming is not newer. + */ + private isNewerData(incoming: DatafileInput): boolean { + if (!this.data) return true; + + const currentTs = parseConfigUpdatedAt(this.data.configUpdatedAt); + const incomingTs = parseConfigUpdatedAt(incoming.configUpdatedAt); + + if (currentTs === undefined || incomingTs === undefined) { + return true; + } + + return incomingTs > currentTs; + } + + // --------------------------------------------------------------------------- + // Usage tracking + // --------------------------------------------------------------------------- + + /** + * Tracks a read operation for usage analytics. + * During build steps, only the first read is tracked. + */ + private trackRead( + startTime: number, + cacheHadDefinitions: boolean, + isFirstRead: boolean, + source: Metrics['source'], + ): void { + if (this.unauthorized) return; + if (this.options.buildStep && this.buildReadTracked) return; + if (this.options.buildStep) this.buildReadTracked = true; + + const configOrigin: 'in-memory' | 'embedded' = + source === 'embedded' ? 'embedded' : 'in-memory'; + const cacheAction: 'FOLLOWING' | 'REFRESHING' | 'NONE' = + this.state === 'streaming' + ? 'FOLLOWING' + : this.state === 'polling' + ? 'REFRESHING' + : 'NONE'; + const mode = this.mode; + const trackOptions: TrackReadOptions = { + configOrigin, + cacheStatus: cacheHadDefinitions ? 'HIT' : 'MISS', + cacheAction, + cacheIsBlocking: !cacheHadDefinitions, + duration: Date.now() - startTime, + mode: + mode === 'streaming' ? 'stream' : mode === 'polling' ? 'poll' : mode, + }; + const configUpdatedAt = this.data?.configUpdatedAt; + if (typeof configUpdatedAt === 'number') { + trackOptions.configUpdatedAt = configUpdatedAt; + } + const revision = this.data?.revision; + if (typeof revision === 'number') { + trackOptions.revision = revision; + } + if (isFirstRead) { + trackOptions.cacheIsFirstRead = true; + } + this.usageTracker.trackRead(trackOptions); + } +} diff --git a/packages/vercel-flags-core/src/controller/normalized-options.ts b/packages/vercel-flags-core/src/controller/normalized-options.ts new file mode 100644 index 00000000..5474e4a3 --- /dev/null +++ b/packages/vercel-flags-core/src/controller/normalized-options.ts @@ -0,0 +1,114 @@ +import type { DatafileInput, PollingOptions, StreamOptions } from '../types'; + +const DEFAULT_STREAM_INIT_TIMEOUT_MS = 3000; +const DEFAULT_POLLING_INTERVAL_MS = 30_000; +const MIN_POLLING_INTERVAL_MS = 30_000; +const DEFAULT_POLLING_INIT_TIMEOUT_MS = 3_000; + +/** + * Configuration options for Controller + */ +export type ControllerOptions = { + /** SDK key for authentication (must start with "vf_") */ + sdkKey: string; + + /** + * Initial datafile to use immediately + * - At runtime: used while waiting for stream/poll, then updated in background + * - At build step: used as primary source (skips network) + */ + datafile?: DatafileInput; + + /** + * Configure streaming connection (runtime only, ignored during build step) + * - `true`: Enable with default options (initTimeoutMs: 3000) + * - `false`: Disable streaming + * - `{ initTimeoutMs: number }`: Enable with custom timeout + * @default true + */ + stream?: boolean | StreamOptions; + + /** + * Configure polling fallback (runtime only, ignored during build step) + * - `true`: Enable with default options (intervalMs: 30000, initTimeoutMs: 3000) + * - `false`: Disable polling + * - `{ intervalMs: number, initTimeoutMs: number }`: Enable with custom options + * @default true + */ + polling?: boolean | PollingOptions; + + /** + * Override build step detection + * - `true`: Treat as build step (use datafile/bundled only, no network) + * - `false`: Treat as runtime (try stream/poll first) + * @default auto-detected via CI=1 or NEXT_PHASE=phase-production-build + */ + buildStep?: boolean; + + /** + * Custom fetch function for making HTTP requests. + * Useful for testing (e.g. resolving to a different IP). + * @default globalThis.fetch + */ + fetch?: typeof globalThis.fetch; +}; + +export type NormalizedOptions = { + sdkKey: string; + datafile: DatafileInput | undefined; + stream: { enabled: boolean; initTimeoutMs: number }; + polling: { enabled: boolean; intervalMs: number; initTimeoutMs: number }; + buildStep: boolean; + fetch: typeof globalThis.fetch; + host: string; +}; + +export function normalizeOptions( + options: ControllerOptions, +): NormalizedOptions { + const autoDetectedBuildStep = + process.env.CI === '1' || + process.env.NEXT_PHASE === 'phase-production-build'; + const buildStep = options.buildStep ?? autoDetectedBuildStep; + + let stream: NormalizedOptions['stream']; + if (options.stream === undefined || options.stream === true) { + stream = { enabled: true, initTimeoutMs: DEFAULT_STREAM_INIT_TIMEOUT_MS }; + } else if (options.stream === false) { + stream = { enabled: false, initTimeoutMs: 0 }; + } else { + stream = { enabled: true, initTimeoutMs: options.stream.initTimeoutMs }; + } + + let polling: NormalizedOptions['polling']; + if (options.polling === undefined || options.polling === true) { + polling = { + enabled: true, + intervalMs: DEFAULT_POLLING_INTERVAL_MS, + initTimeoutMs: DEFAULT_POLLING_INIT_TIMEOUT_MS, + }; + } else if (options.polling === false) { + polling = { enabled: false, intervalMs: 0, initTimeoutMs: 0 }; + } else { + if (options.polling.intervalMs < MIN_POLLING_INTERVAL_MS) { + throw new Error( + `@vercel/flags-core: Polling interval must be at least ${MIN_POLLING_INTERVAL_MS}ms, got ${options.polling.intervalMs}ms.`, + ); + } + polling = { + enabled: true, + intervalMs: options.polling.intervalMs, + initTimeoutMs: options.polling.initTimeoutMs, + }; + } + + return { + sdkKey: options.sdkKey, + datafile: options.datafile, + stream, + polling, + buildStep, + fetch: options.fetch ?? globalThis.fetch, + host: 'https://flags.vercel.com', + }; +} diff --git a/packages/vercel-flags-core/src/controller/polling-source.ts b/packages/vercel-flags-core/src/controller/polling-source.ts new file mode 100644 index 00000000..59c808cb --- /dev/null +++ b/packages/vercel-flags-core/src/controller/polling-source.ts @@ -0,0 +1,81 @@ +import type { DatafileInput } from '../types'; +import { fetchDatafile } from './fetch-datafile'; +import { TypedEmitter } from './typed-emitter'; + +export type PollingSourceConfig = { + host: string; + sdkKey: string; + polling: { + intervalMs: number; + }; + fetch: typeof globalThis.fetch; +}; + +export type PollingSourceEvents = { + data: (data: DatafileInput) => void; + error: (error: Error) => void; +}; + +/** + * Manages interval-based polling for flag data. + * Wraps fetchDatafile() and emits typed events. + */ +export class PollingSource extends TypedEmitter { + private config: PollingSourceConfig; + private intervalId: ReturnType | undefined; + private abortController: AbortController | undefined; + + constructor(config: PollingSourceConfig) { + super(); + this.config = config; + } + + /** + * Perform a single poll request. + * Emits 'data' on success, 'error' on failure. + */ + async poll(): Promise { + if (this.abortController?.signal.aborted) return; + + try { + const data = await fetchDatafile({ + ...this.config, + signal: this.abortController?.signal, + }); + this.emit('data', data); + } catch (error) { + const err = + error instanceof Error ? error : new Error('Unknown poll error'); + this.emit('error', err); + } + } + + /** + * Start interval-based polling. + * Polls at the configured interval. Does not perform an initial poll — + * callers should call poll() first if an immediate poll is needed. + */ + startInterval(): void { + if (this.intervalId) return; + + this.abortController = new AbortController(); + + // Start interval + this.intervalId = setInterval( + () => void this.poll(), + this.config.polling.intervalMs, + ); + } + + /** + * Stop interval-based polling. + */ + stop(): void { + if (this.intervalId) { + clearInterval(this.intervalId); + this.intervalId = undefined; + } + this.abortController?.abort(); + this.abortController = undefined; + } +} diff --git a/packages/vercel-flags-core/src/controller/stream-connection.test.ts b/packages/vercel-flags-core/src/controller/stream-connection.test.ts new file mode 100644 index 00000000..5093984e --- /dev/null +++ b/packages/vercel-flags-core/src/controller/stream-connection.test.ts @@ -0,0 +1,938 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { connectStream } from './stream-connection'; + +const HOST = 'https://flags.vercel.com'; +const fetchMock = vi.fn(); + +beforeEach(() => { + vi.clearAllMocks(); + fetchMock.mockReset(); +}); + +function createNdjsonStream( + messages: object[], + options?: { delayMs?: number; keepOpen?: boolean }, +): ReadableStream { + const { delayMs = 0, keepOpen = false } = options ?? {}; + return new ReadableStream({ + async start(controller) { + for (const message of messages) { + if (delayMs > 0) await new Promise((r) => setTimeout(r, delayMs)); + controller.enqueue( + new TextEncoder().encode(`${JSON.stringify(message)}\n`), + ); + } + if (!keepOpen) { + controller.close(); + } + }, + }); +} + +function streamResponse( + body: ReadableStream | null, + status = 200, +): Promise { + return Promise.resolve( + new Response(body, { + status, + headers: { 'Content-Type': 'application/x-ndjson' }, + }), + ); +} + +function ndjsonResponse(messages: object[], options?: { keepOpen?: boolean }) { + return streamResponse(createNdjsonStream(messages, options)); +} + +const datafileMsg = (definitions = {}) => ({ + type: 'datafile' as const, + data: { projectId: 'test', definitions }, +}); + +describe('connectStream', () => { + describe('connection success', () => { + it('should resolve when first datafile message is received', async () => { + const definitions = { projectId: 'test', definitions: {} }; + + fetchMock.mockImplementation(() => + ndjsonResponse([{ type: 'datafile', data: definitions }]), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + expect(onDatafile).toHaveBeenCalledWith(definitions); + abortController.abort(); + }); + + it('should call onDatafile callback with parsed data', async () => { + const definitions = { + projectId: 'test', + definitions: { flag: { variants: [true] } }, + }; + + fetchMock.mockImplementation(() => + ndjsonResponse([{ type: 'datafile', data: definitions }]), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + expect(onDatafile).toHaveBeenCalledTimes(1); + expect(onDatafile).toHaveBeenCalledWith(definitions); + abortController.abort(); + }); + + it('should ignore ping messages', async () => { + const definitions = { projectId: 'test', definitions: {} }; + + fetchMock.mockImplementation(() => + ndjsonResponse([ + { type: 'ping' }, + { type: 'datafile', data: definitions }, + { type: 'ping' }, + ]), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + expect(onDatafile).toHaveBeenCalledTimes(1); + expect(onDatafile).toHaveBeenCalledWith(definitions); + abortController.abort(); + }); + + it('should handle NDJSON messages split across chunks', async () => { + const definitions = { projectId: 'test', definitions: { flag: true } }; + const fullMessage = JSON.stringify({ + type: 'datafile', + data: definitions, + }); + const part1 = fullMessage.slice(0, 20); + const part2 = `${fullMessage.slice(20)}\n`; + + fetchMock.mockImplementation(() => + streamResponse( + new ReadableStream({ + async start(controller) { + controller.enqueue(new TextEncoder().encode(part1)); + await new Promise((r) => setTimeout(r, 10)); + controller.enqueue(new TextEncoder().encode(part2)); + controller.close(); + }, + }), + ), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + expect(onDatafile).toHaveBeenCalledWith(definitions); + abortController.abort(); + }); + + it('should skip empty lines in stream', async () => { + const definitions = { projectId: 'test', definitions: {} }; + + fetchMock.mockImplementation(() => + streamResponse( + new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode('\n\n')); + controller.enqueue( + new TextEncoder().encode( + JSON.stringify({ type: 'datafile', data: definitions }) + + '\n', + ), + ); + controller.enqueue(new TextEncoder().encode('\n')); + controller.close(); + }, + }), + ), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + expect(onDatafile).toHaveBeenCalledTimes(1); + abortController.abort(); + }); + }); + + describe('headers', () => { + beforeEach(() => { + fetchMock.mockImplementation(() => ndjsonResponse([datafileMsg()])); + }); + + it('should include Authorization header with Bearer token', async () => { + const abortController = new AbortController(); + await connectStream( + { host: HOST, sdkKey: 'vf_my_key', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + const headers = fetchMock.mock.calls[0]![1]!.headers as Record< + string, + string + >; + expect(headers.Authorization).toBe('Bearer vf_my_key'); + abortController.abort(); + }); + + it('should include User-Agent header with version', async () => { + const abortController = new AbortController(); + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + const headers = fetchMock.mock.calls[0]![1]!.headers as Record< + string, + string + >; + expect(headers['User-Agent']).toMatch(/^VercelFlagsCore\//); + abortController.abort(); + }); + + it('should include X-Retry-Attempt header starting at 0', async () => { + const abortController = new AbortController(); + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + const headers = fetchMock.mock.calls[0]![1]!.headers as Record< + string, + string + >; + expect(headers['X-Retry-Attempt']).toBe('0'); + abortController.abort(); + }); + }); + + describe('retry behavior', () => { + beforeEach(() => vi.useFakeTimers()); + afterEach(() => vi.useRealTimers()); + + it('should increment X-Retry-Attempt on reconnect after stream closes', async () => { + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + return ndjsonResponse([datafileMsg()], { keepOpen: requestCount >= 2 }); + }); + + const abortController = new AbortController(); + const onDisconnect = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn(), onDisconnect }, + ); + + // Advance past the reconnection backoff delay + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + expect(requestCount).toBeGreaterThanOrEqual(2); + const h0 = fetchMock.mock.calls[0]![1]!.headers as Record; + const h1 = fetchMock.mock.calls[1]![1]!.headers as Record; + expect(h0['X-Retry-Attempt']).toBe('0'); + expect(h1['X-Retry-Attempt']).toBe('1'); + expect(onDisconnect).toHaveBeenCalled(); + + abortController.abort(); + }); + + it('should reset retryCount to 0 after receiving datafile', async () => { + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + return ndjsonResponse([datafileMsg()], { keepOpen: requestCount >= 3 }); + }); + + const abortController = new AbortController(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + // Advance past first reconnection backoff + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + // Advance past second reconnection backoff + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + expect(requestCount).toBeGreaterThanOrEqual(3); + + // Each reconnect after successful datafile should reset to 0, then increment by 1 + const h0 = fetchMock.mock.calls[0]![1]!.headers as Record; + const h1 = fetchMock.mock.calls[1]![1]!.headers as Record; + const h2 = fetchMock.mock.calls[2]![1]!.headers as Record; + expect(h0['X-Retry-Attempt']).toBe('0'); + expect(h1['X-Retry-Attempt']).toBe('1'); + expect(h2['X-Retry-Attempt']).toBe('1'); + + abortController.abort(); + }); + + it('should enforce minimum delay between reconnection attempts when retryCount resets', async () => { + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + return ndjsonResponse([datafileMsg()], { keepOpen: requestCount >= 4 }); + }); + + const abortController = new AbortController(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + // After the first stream closes, retryCount was reset to 0 then + // incremented to 1 — backoff(1) = 0 but minimum gap is 1s. + // Advance 999ms — not enough for the minimum gap + await vi.advanceTimersByTimeAsync(999); + expect(requestCount).toBe(1); + + // Advance past the 1s minimum gap + await vi.advanceTimersByTimeAsync(1); + await vi.advanceTimersByTimeAsync(0); + expect(requestCount).toBe(2); + + // Same pattern for the next reconnection + await vi.advanceTimersByTimeAsync(999); + expect(requestCount).toBe(2); + + await vi.advanceTimersByTimeAsync(1); + await vi.advanceTimersByTimeAsync(0); + expect(requestCount).toBe(3); + + abortController.abort(); + }); + + it('should call onDisconnect when stream ends normally', async () => { + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + return ndjsonResponse([datafileMsg()], { keepOpen: requestCount >= 2 }); + }); + + const abortController = new AbortController(); + const onDisconnect = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn(), onDisconnect }, + ); + + // Advance past the reconnection backoff delay + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + expect(onDisconnect).toHaveBeenCalled(); + + abortController.abort(); + }); + }); + + describe('failure cases', () => { + // Note: 401 response behavior is tested through Controller + // which handles the timeout fallback. The stream-connection aborts on 401 + // but the promise resolution is handled by the timeout mechanism in + // Controller. + + it('should retry on error before first datafile and reject when aborted', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }); + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + return Promise.resolve(new Response(null, { status: 500 })); + }); + + const abortController = new AbortController(); + + const promise = connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + // First request fires immediately, first retry has 0ms backoff + await vi.advanceTimersByTimeAsync(0); + // Advance past the second retry backoff (1s base + jitter) + await vi.advanceTimersByTimeAsync(2000); + await vi.advanceTimersByTimeAsync(0); + + expect(requestCount).toBeGreaterThanOrEqual(2); + + // Abort to stop retries + abortController.abort(); + + // The init promise should reject since no data was received + await expect(promise).rejects.toThrow( + 'stream: aborted before receiving data', + ); + + errorSpy.mockRestore(); + vi.useRealTimers(); + }); + + it('should retry if response has no body and reject when aborted', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }); + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + return Promise.resolve( + new Response(null, { + status: 200, + headers: { 'Content-Type': 'application/x-ndjson' }, + }), + ); + }); + + const abortController = new AbortController(); + + const promise = connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + // First request fires immediately, first retry has 0ms backoff + await vi.advanceTimersByTimeAsync(0); + // Advance past the second retry backoff (1s base + jitter) + await vi.advanceTimersByTimeAsync(2000); + await vi.advanceTimersByTimeAsync(0); + + expect(requestCount).toBeGreaterThanOrEqual(2); + + // Abort to stop retries + abortController.abort(); + + // The init promise should reject since no data was received + await expect(promise).rejects.toThrow( + 'stream: aborted before receiving data', + ); + + errorSpy.mockRestore(); + vi.useRealTimers(); + }); + + it('should call onDisconnect on error after initial data received', async () => { + vi.useFakeTimers({ shouldAdvanceTime: true }); + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + let requestCount = 0; + + fetchMock.mockImplementation(() => { + requestCount++; + if (requestCount === 1) { + return ndjsonResponse([datafileMsg()]); + } + return Promise.resolve(new Response(null, { status: 500 })); + }); + + const abortController = new AbortController(); + const onDisconnect = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn(), onDisconnect }, + ); + + // Advance past the reconnection backoff delay + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + expect(onDisconnect).toHaveBeenCalled(); + + abortController.abort(); + errorSpy.mockRestore(); + vi.useRealTimers(); + }); + + // Note: Testing MAX_RETRY_COUNT exceeded is skipped because the backoff delays + // make the test too slow. The behavior is: + // - After 10 retries without receiving data, the connection aborts + // - console.error('@vercel/flags-core: Max retry count exceeded') is logged + // This is tested indirectly through Controller integration tests. + + it('should stop when abortController is aborted externally', async () => { + fetchMock.mockImplementation((_input, init) => + streamResponse( + new ReadableStream({ + start(controller) { + controller.enqueue( + new TextEncoder().encode( + `${JSON.stringify({ + type: 'datafile', + data: { projectId: 'test', definitions: {} }, + })}\n`, + ), + ); + // Keep stream open + init?.signal?.addEventListener('abort', () => { + controller.close(); + }); + }, + }), + ), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + expect(onDatafile).toHaveBeenCalledTimes(1); + + // Abort externally + abortController.abort(); + + // Should stop without errors + expect(abortController.signal.aborted).toBe(true); + }); + }); + + describe('ping timeout', () => { + beforeEach(() => vi.useFakeTimers()); + afterEach(() => vi.useRealTimers()); + + it('should abort connection when no messages received within ping timeout', async () => { + let requestCount = 0; + + fetchMock.mockImplementation((_input, init) => { + requestCount++; + return streamResponse( + new ReadableStream({ + start(controller) { + controller.enqueue( + new TextEncoder().encode(`${JSON.stringify(datafileMsg())}\n`), + ); + // Keep stream open — simulates a zombie connection + init?.signal?.addEventListener('abort', () => { + controller.close(); + }); + }, + }), + ); + }); + + const abortController = new AbortController(); + const onDisconnect = vi.fn(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn(), onDisconnect }, + ); + + expect(requestCount).toBe(1); + + // Advance past the 90s ping timeout + await vi.advanceTimersByTimeAsync(90_000); + // Allow microtasks from stream cancellation to settle + await vi.advanceTimersByTimeAsync(0); + // Advance past the reconnection backoff (min 1s gap) + await vi.advanceTimersByTimeAsync(1_000); + await vi.advanceTimersByTimeAsync(0); + + expect(onDisconnect).toHaveBeenCalled(); + + // Should have attempted reconnection + expect(requestCount).toBeGreaterThanOrEqual(2); + + abortController.abort(); + }); + + it('should reset timeout on each ping', async () => { + let streamController: ReadableStreamDefaultController; + + fetchMock.mockImplementation((_input, init) => + streamResponse( + new ReadableStream({ + start(c) { + streamController = c; + c.enqueue( + new TextEncoder().encode(`${JSON.stringify(datafileMsg())}\n`), + ); + init?.signal?.addEventListener('abort', () => { + c.close(); + }); + }, + }), + ), + ); + + const abortController = new AbortController(); + + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + // Send pings at 30s intervals (before the 90s timeout) + for (let i = 0; i < 5; i++) { + await vi.advanceTimersByTimeAsync(30_000); + streamController!.enqueue( + new TextEncoder().encode(`${JSON.stringify({ type: 'ping' })}\n`), + ); + await vi.advanceTimersByTimeAsync(0); + } + + // 150s total elapsed but no timeout because pings kept resetting it + // Verify no reconnection was attempted (still on the original connection) + expect(fetchMock).toHaveBeenCalledTimes(1); + + abortController.abort(); + }); + + it('should not start timeout before initial data received', async () => { + const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + + fetchMock.mockImplementation((_input, init) => + streamResponse( + new ReadableStream({ + start(controller) { + // Keep stream open without sending any data + init?.signal?.addEventListener('abort', () => { + controller.close(); + }); + }, + }), + ), + ); + + const abortController = new AbortController(); + + const promise = connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + // Advance past 90s — ping timeout should NOT fire since no initial data + await vi.advanceTimersByTimeAsync(90_000); + await vi.advanceTimersByTimeAsync(0); + + // No reconnection should have been triggered (timeout only starts after initial data) + expect(fetchMock).toHaveBeenCalledTimes(1); + + abortController.abort(); + await expect(promise).rejects.toThrow( + 'stream: aborted before receiving data', + ); + + errorSpy.mockRestore(); + }); + }); + + describe('multiple datafile messages', () => { + it('should call onDatafile for each datafile but only resolve once', async () => { + const data1 = { projectId: 'test', definitions: { v: 1 } }; + const data2 = { projectId: 'test', definitions: { v: 2 } }; + + fetchMock.mockImplementation(() => + ndjsonResponse([ + { type: 'datafile', data: data1 }, + { type: 'datafile', data: data2 }, + ]), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + + const promise = connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile }, + ); + + // Should resolve (not hang waiting for more data) + await promise; + + // Wait for all messages to be processed + await vi.waitFor(() => { + expect(onDatafile).toHaveBeenCalledTimes(2); + }); + + expect(onDatafile).toHaveBeenNthCalledWith(1, data1); + expect(onDatafile).toHaveBeenNthCalledWith(2, data2); + + abortController.abort(); + }); + }); + + describe('X-Revision header', () => { + beforeEach(() => { + fetchMock.mockImplementation(() => ndjsonResponse([datafileMsg()])); + }); + + it('should include X-Revision header when revision is provided', async () => { + const abortController = new AbortController(); + await connectStream( + { + host: HOST, + sdkKey: 'vf_test', + abortController, + fetch: fetchMock, + revision: () => 42, + }, + { onDatafile: vi.fn() }, + ); + + const headers = fetchMock.mock.calls[0]![1]!.headers as Record< + string, + string + >; + expect(headers['X-Revision']).toBe('42'); + abortController.abort(); + }); + + it('should not include X-Revision header when revision is undefined', async () => { + const abortController = new AbortController(); + await connectStream( + { host: HOST, sdkKey: 'vf_test', abortController, fetch: fetchMock }, + { onDatafile: vi.fn() }, + ); + + const headers = fetchMock.mock.calls[0]![1]!.headers as Record< + string, + string + >; + expect(headers['X-Revision']).toBeUndefined(); + abortController.abort(); + }); + + it('should call revision getter on each reconnect to get latest value', async () => { + vi.useFakeTimers(); + let requestCount = 0; + let currentRevision = 5; + + fetchMock.mockImplementation(() => { + requestCount++; + const nextRevision = currentRevision + 1; + return ndjsonResponse( + [ + { + type: 'datafile', + data: { + projectId: 'test', + definitions: {}, + revision: nextRevision, + }, + }, + ], + { keepOpen: requestCount >= 3 }, + ); + }); + + const abortController = new AbortController(); + + await connectStream( + { + host: HOST, + sdkKey: 'vf_test', + abortController, + fetch: fetchMock, + revision: () => currentRevision, + }, + { + onDatafile: (data) => { + // Simulate controller updating revision from received datafile + currentRevision = (data as Record) + .revision as number; + }, + }, + ); + + // First request should send revision 5 + const h0 = fetchMock.mock.calls[0]![1]!.headers as Record; + expect(h0['X-Revision']).toBe('5'); + + // Advance past reconnection backoff + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + // Second request should send the updated revision (6), not the initial (5) + const h1 = fetchMock.mock.calls[1]![1]!.headers as Record; + expect(h1['X-Revision']).toBe('6'); + + // Advance past reconnection backoff again + await vi.advanceTimersByTimeAsync(1000); + await vi.advanceTimersByTimeAsync(0); + + // Third request should send the updated revision (7) + const h2 = fetchMock.mock.calls[2]![1]!.headers as Record; + expect(h2['X-Revision']).toBe('7'); + + abortController.abort(); + vi.useRealTimers(); + }); + }); + + describe('primed message', () => { + it('should resolve init promise when primed message is received', async () => { + const primedMsg = { + type: 'primed' as const, + revision: 33, + projectId: 'prj_test', + environment: 'production', + }; + + fetchMock.mockImplementation(() => ndjsonResponse([primedMsg])); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + const onPrimed = vi.fn(); + + await connectStream( + { + host: HOST, + sdkKey: 'vf_test', + abortController, + fetch: fetchMock, + revision: () => 33, + }, + { onDatafile, onPrimed }, + ); + + expect(onDatafile).not.toHaveBeenCalled(); + expect(onPrimed).toHaveBeenCalledWith(primedMsg); + abortController.abort(); + }); + + it('should call onPrimed but not onDatafile for primed messages', async () => { + const primedMsg = { + type: 'primed' as const, + revision: 5, + projectId: 'prj_test', + environment: 'production', + }; + + fetchMock.mockImplementation(() => + ndjsonResponse([ + primedMsg, + { type: 'datafile', data: { projectId: 'test', definitions: {} } }, + ]), + ); + + const abortController = new AbortController(); + const onDatafile = vi.fn(); + const onPrimed = vi.fn(); + + await connectStream( + { + host: HOST, + sdkKey: 'vf_test', + abortController, + fetch: fetchMock, + revision: () => 5, + }, + { onDatafile, onPrimed }, + ); + + // Wait for all messages to be processed + await vi.waitFor(() => { + expect(onDatafile).toHaveBeenCalledTimes(1); + }); + + expect(onPrimed).toHaveBeenCalledTimes(1); + expect(onPrimed).toHaveBeenCalledWith(primedMsg); + abortController.abort(); + }); + + it('should reset ping timeout on primed message', async () => { + vi.useFakeTimers(); + let streamController: ReadableStreamDefaultController; + + fetchMock.mockImplementation((_input, init) => + streamResponse( + new ReadableStream({ + start(c) { + streamController = c; + c.enqueue( + new TextEncoder().encode( + `${JSON.stringify({ + type: 'primed', + revision: 1, + projectId: 'prj_test', + environment: 'production', + })}\n`, + ), + ); + init?.signal?.addEventListener('abort', () => { + c.close(); + }); + }, + }), + ), + ); + + const abortController = new AbortController(); + + await connectStream( + { + host: HOST, + sdkKey: 'vf_test', + abortController, + fetch: fetchMock, + revision: () => 1, + }, + { onDatafile: vi.fn(), onPrimed: vi.fn() }, + ); + + // Send pings at 30s intervals (before the 90s timeout) + for (let i = 0; i < 4; i++) { + await vi.advanceTimersByTimeAsync(30_000); + streamController!.enqueue( + new TextEncoder().encode(`${JSON.stringify({ type: 'ping' })}\n`), + ); + await vi.advanceTimersByTimeAsync(0); + } + + // 120s elapsed but no timeout because pings kept resetting it + // Verify no reconnection was attempted (still on the original connection) + expect(fetchMock).toHaveBeenCalledTimes(1); + + abortController.abort(); + vi.useRealTimers(); + }); + }); +}); diff --git a/packages/vercel-flags-core/src/controller/stream-connection.ts b/packages/vercel-flags-core/src/controller/stream-connection.ts new file mode 100644 index 00000000..2f533052 --- /dev/null +++ b/packages/vercel-flags-core/src/controller/stream-connection.ts @@ -0,0 +1,261 @@ +import { version } from '../../package.json'; +import type { BundledDefinitions } from '../types'; +import { sleep } from '../utils/sleep'; + +export type PrimedMessage = { + type: 'primed'; + revision: number; + projectId: string; + environment: string; +}; + +export type StreamMessage = + | { type: 'datafile'; data: BundledDefinitions } + | PrimedMessage + | { type: 'ping' }; + +const MAX_RETRY_COUNT = 15; +const BASE_RETRY_DELAY_MS = 1000; +const MAX_RETRY_DELAY_MS = 60_000; +const PING_TIMEOUT_MS = 90_000; + +function backoff(retryCount: number): number { + if (retryCount === 1) return 0; + const delay = Math.min( + BASE_RETRY_DELAY_MS * 2 ** (retryCount - 2), + MAX_RETRY_DELAY_MS, + ); + return delay + Math.random() * 1000; +} + +export class UnauthorizedError extends Error { + constructor() { + super('stream: unauthorized (401)'); + this.name = 'UnauthorizedError'; + } +} + +export type StreamCallbacks = { + onDatafile: (data: BundledDefinitions) => void; + onPrimed?: (message: PrimedMessage) => void; + onDisconnect?: () => void; +}; + +export type StreamConfig = { + host: string; + sdkKey: string; + abortController: AbortController; + fetch?: typeof globalThis.fetch; + /** Returns the current revision number to send as X-Revision header */ + revision?: () => number | undefined; +}; + +/** + * Connects to the flags stream endpoint and handles reconnection with backoff. + * Resolves when the first datafile message is received. + * Rejects if the connection fails before receiving any data. + */ +export async function connectStream( + config: StreamConfig, + callbacks: StreamCallbacks, +): Promise { + const { + host, + sdkKey, + abortController, + fetch: fetchFn = globalThis.fetch, + } = config; + const { onDatafile, onPrimed, onDisconnect } = callbacks; + let retryCount = 0; + let lastAttemptTime = 0; + + let resolveInit: () => void; + let rejectInit: (error: unknown) => void; + const initPromise = new Promise((resolve, reject) => { + resolveInit = resolve; + rejectInit = reject; + }); + + void (async () => { + let initialDataReceived = false; + + while (!abortController.signal.aborted) { + if (retryCount > MAX_RETRY_COUNT) { + console.error('@vercel/flags-core: Max retry count exceeded'); + if (!initialDataReceived) { + rejectInit!( + new Error('stream: max retry count exceeded before receiving data'), + ); + } + abortController.abort(); + break; + } + + // Per-connection abort controller — allows ping timeout to abort a single + // connection without stopping the entire retry loop. + const connectionAbort = new AbortController(); + const onMainAbort = (): void => connectionAbort.abort(); + abortController.signal.addEventListener('abort', onMainAbort, { + once: true, + }); + + let pingTimeoutId: ReturnType | undefined; + // Reference to the response body so the ping timeout can cancel it + // to break out of the for-await loop. + let responseBody: ReadableStream | undefined; + const resetPingTimeout = (): void => { + if (pingTimeoutId !== undefined) clearTimeout(pingTimeoutId); + if (!initialDataReceived) return; + pingTimeoutId = setTimeout(() => { + responseBody?.cancel().catch(() => {}); + connectionAbort.abort(); + }, PING_TIMEOUT_MS); + }; + + try { + lastAttemptTime = Date.now(); + const headers: Record = { + Authorization: `Bearer ${sdkKey}`, + 'User-Agent': `VercelFlagsCore/${version}`, + 'X-Retry-Attempt': String(retryCount), + }; + const revision = config.revision?.(); + if (revision !== undefined) { + headers['X-Revision'] = String(revision); + } + const response = await fetchFn(`${host}/v1/stream`, { + headers, + signal: connectionAbort.signal, + }); + + if (!response.ok) { + if (response.status === 401) { + if (!initialDataReceived) { + rejectInit!(new UnauthorizedError()); + } + abortController.abort(); + break; + } + + throw new Error(`stream was not ok: ${response.status}`); + } + + if (!response.body) { + throw new Error('stream body was not present'); + } + + responseBody = response.body; + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + const bufferChunks: string[] = []; + + // Allow the ping timeout (or main abort) to cancel the reader, + // which breaks the read loop immediately even on a zombie connection. + const onConnectionAbort = (): void => { + reader.cancel().catch(() => {}); + }; + connectionAbort.signal.addEventListener('abort', onConnectionAbort, { + once: true, + }); + + try { + while (true) { + const { done, value: chunk } = await reader.read(); + if (done || abortController.signal.aborted) break; + + bufferChunks.push(decoder.decode(chunk, { stream: true })); + const combined = bufferChunks.join(''); + bufferChunks.length = 0; + const lines = combined.split('\n'); + bufferChunks.push(lines.pop()!); + + for (const line of lines) { + if (line === '') continue; + + let message: StreamMessage; + try { + message = JSON.parse(line) as StreamMessage; + } catch { + console.warn( + '@vercel/flags-core: Failed to parse stream message, skipping', + ); + continue; + } + + if (message.type === 'datafile') { + onDatafile(message.data); + retryCount = 0; + if (!initialDataReceived) { + initialDataReceived = true; + resolveInit!(); + } + resetPingTimeout(); + } + + // Primed means the server confirmed our revision is current, + // so no full datafile is needed. Treat it like initial data + // for init resolution purposes. + if (message.type === 'primed') { + onPrimed?.(message); + retryCount = 0; + if (!initialDataReceived) { + initialDataReceived = true; + resolveInit!(); + } + resetPingTimeout(); + } + + // Pings prove the connection is alive — reset retry count + // once initial data has been received + if (message.type === 'ping' && initialDataReceived) { + retryCount = 0; + resetPingTimeout(); + } + } + } + } finally { + connectionAbort.signal.removeEventListener( + 'abort', + onConnectionAbort, + ); + } + + // Stream ended normally (server closed connection) - reconnect + clearTimeout(pingTimeoutId); + abortController.signal.removeEventListener('abort', onMainAbort); + if (!abortController.signal.aborted) { + onDisconnect?.(); + retryCount++; + const elapsed = Date.now() - lastAttemptTime; + const minGap = Math.max(0, BASE_RETRY_DELAY_MS - elapsed); + await sleep(Math.max(backoff(retryCount), minGap)); + continue; + } + } catch (error) { + clearTimeout(pingTimeoutId); + abortController.signal.removeEventListener('abort', onMainAbort); + if (abortController.signal.aborted) { + break; + } + // Ping timeout aborts only the per-connection controller; this is + // an expected reconnect, not a real error — skip the noisy log. + if (!connectionAbort.signal.aborted) { + console.error('@vercel/flags-core: Stream error', error); + } + onDisconnect?.(); + retryCount++; + const elapsed = Date.now() - lastAttemptTime; + const minGap = Math.max(0, BASE_RETRY_DELAY_MS - elapsed); + await sleep(Math.max(backoff(retryCount), minGap)); + } + } + + // Reject the init promise if the loop exited without receiving data + // (e.g. aborted externally before any data arrived) + if (!initialDataReceived) { + rejectInit!(new Error('stream: aborted before receiving data')); + } + })(); + + return initPromise; +} diff --git a/packages/vercel-flags-core/src/controller/stream-source.ts b/packages/vercel-flags-core/src/controller/stream-source.ts new file mode 100644 index 00000000..93fba85c --- /dev/null +++ b/packages/vercel-flags-core/src/controller/stream-source.ts @@ -0,0 +1,95 @@ +import type { DatafileInput } from '../types'; +import type { NormalizedOptions } from './normalized-options'; +import { connectStream, type PrimedMessage } from './stream-connection'; +import { TypedEmitter } from './typed-emitter'; + +export type StreamSourceEvents = { + data: (data: DatafileInput) => void; + primed: (message: PrimedMessage) => void; + connected: () => void; + disconnected: () => void; +}; + +/** + * Manages a streaming connection to the flags service. + * Wraps connectStream() and emits typed events. + */ +export class StreamSource extends TypedEmitter { + private options: NormalizedOptions; + private revision: () => number | undefined; + private abortController: AbortController | undefined; + private promise: Promise | undefined; + + constructor(options: NormalizedOptions, revision: () => number | undefined) { + super(); + this.options = options; + this.revision = revision; + } + + /** + * Start the stream connection. + * Returns a promise that resolves when the first datafile or primed message arrives. + * If already started, returns the existing promise. + */ + start(): Promise { + if (this.promise) return this.promise; + + const abortController = new AbortController(); + this.abortController = abortController; + + // Clear cached state when the stream terminates so that a subsequent + // start() call creates a fresh connection instead of returning a stale + // resolved promise. + abortController.signal.addEventListener( + 'abort', + () => { + if (this.abortController === abortController) { + this.promise = undefined; + this.abortController = undefined; + } + }, + { once: true }, + ); + + try { + const promise = connectStream( + { + host: this.options.host, + sdkKey: this.options.sdkKey, + abortController, + fetch: this.options.fetch, + revision: this.revision, + }, + { + onDatafile: (newData) => { + this.emit('data', newData); + this.emit('connected'); + }, + onPrimed: (message) => { + this.emit('primed', message); + this.emit('connected'); + }, + onDisconnect: () => { + this.emit('disconnected'); + }, + }, + ); + + this.promise = promise; + return promise; + } catch (error) { + this.promise = undefined; + this.abortController = undefined; + throw error; + } + } + + /** + * Stop the stream connection. + */ + stop(): void { + this.abortController?.abort(); + this.abortController = undefined; + this.promise = undefined; + } +} diff --git a/packages/vercel-flags-core/src/controller/tagged-data.ts b/packages/vercel-flags-core/src/controller/tagged-data.ts new file mode 100644 index 00000000..d86d76ab --- /dev/null +++ b/packages/vercel-flags-core/src/controller/tagged-data.ts @@ -0,0 +1,38 @@ +import type { DatafileInput, Metrics } from '../types'; + +/** + * Internal origin tracking for how data was obtained. + * This flows with the data from point of origin through to metrics. + */ +export type DataOrigin = 'stream' | 'poll' | 'bundled' | 'provided' | 'fetched'; + +/** + * DatafileInput with origin metadata attached at the point of arrival. + * Internal only — stripped before returning to consumers. + */ +export type TaggedData = DatafileInput & { + _origin: DataOrigin; +}; + +/** + * Tags a DatafileInput with its origin. + */ +export function tagData(data: DatafileInput, origin: DataOrigin): TaggedData { + return Object.assign(data, { _origin: origin }) as TaggedData; +} + +/** + * Maps internal DataOrigin to the public Metrics.source value. + */ +export function originToMetricsSource(origin: DataOrigin): Metrics['source'] { + switch (origin) { + case 'stream': + case 'poll': + case 'provided': + return 'in-memory'; + case 'fetched': + return 'remote'; + case 'bundled': + return 'embedded'; + } +} diff --git a/packages/vercel-flags-core/src/controller/typed-emitter.ts b/packages/vercel-flags-core/src/controller/typed-emitter.ts new file mode 100644 index 00000000..9c3a59d1 --- /dev/null +++ b/packages/vercel-flags-core/src/controller/typed-emitter.ts @@ -0,0 +1,34 @@ +/** + * Lightweight typed event emitter base class. + * Each source module extends this to emit typed events. + */ +export class TypedEmitter< + Events extends Record void>, +> { + private handlers = new Map>(); + + on(event: E, handler: Events[E]): void { + let set = this.handlers.get(event); + if (!set) { + set = new Set(); + this.handlers.set(event, set); + } + set.add(handler as Events[keyof Events]); + } + + off(event: E, handler: Events[E]): void { + this.handlers.get(event)?.delete(handler as Events[keyof Events]); + } + + protected emit( + event: E, + ...args: Parameters + ): void { + const set = this.handlers.get(event); + if (set) { + for (const handler of set) { + (handler as (...a: any[]) => void)(...args); + } + } + } +} diff --git a/packages/vercel-flags-core/src/create-raw-client.test.ts b/packages/vercel-flags-core/src/create-raw-client.test.ts deleted file mode 100644 index 3522cacd..00000000 --- a/packages/vercel-flags-core/src/create-raw-client.test.ts +++ /dev/null @@ -1,402 +0,0 @@ -import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; -import { clientMap } from './client-map'; -import { createCreateRawClient } from './create-raw-client'; -import type { BundledDefinitions, DataSource } from './types'; - -function createMockDataSource(overrides?: Partial): DataSource { - return { - read: vi.fn().mockResolvedValue({ - projectId: 'test-project', - definitions: {}, - segments: {}, - environment: 'production', - metrics: { - readMs: 0, - source: 'in-memory', - cacheStatus: 'HIT', - }, - }), - getDatafile: vi.fn().mockResolvedValue({ - projectId: 'test-project', - definitions: {}, - segments: {}, - environment: 'production', - metrics: { - readMs: 0, - source: 'in-memory', - cacheStatus: 'HIT', - }, - }), - initialize: vi.fn().mockResolvedValue(undefined), - shutdown: vi.fn().mockResolvedValue(undefined), - ...overrides, - }; -} - -function createMockFns() { - return { - initialize: vi.fn().mockResolvedValue(undefined), - shutdown: vi.fn().mockResolvedValue(undefined), - getFallbackDatafile: vi.fn().mockResolvedValue({ - projectId: 'test', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - } satisfies BundledDefinitions), - evaluate: vi.fn().mockResolvedValue({ value: true, reason: 'static' }), - getDatafile: vi.fn().mockResolvedValue({ - projectId: 'test', - definitions: {}, - segments: {}, - environment: 'production', - metrics: { - readMs: 0, - source: 'in-memory', - cacheStatus: 'HIT', - }, - }), - }; -} - -describe('createCreateRawClient', () => { - beforeEach(() => { - clientMap.clear(); - }); - - afterEach(() => { - clientMap.clear(); - }); - - describe('client creation', () => { - it('should add dataSource to clientMap on creation', () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - expect(clientMap.size).toBe(0); - - createRawClient({ dataSource }); - - expect(clientMap.size).toBe(1); - }); - - it('should store the correct dataSource in clientMap', () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const initialSize = clientMap.size; - createRawClient({ dataSource }); - - // The dataSource should be stored in the map - expect(clientMap.size).toBe(initialSize + 1); - // Find the entry that was just added - const entries = Array.from(clientMap.entries()); - const lastEntry = entries[entries.length - 1]; - expect(lastEntry?.[1].dataSource).toBe(dataSource); - }); - - it('should assign incrementing IDs to each client', () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - - const ds1 = createMockDataSource(); - const ds2 = createMockDataSource(); - const ds3 = createMockDataSource(); - - const initialSize = clientMap.size; - - createRawClient({ dataSource: ds1 }); - createRawClient({ dataSource: ds2 }); - createRawClient({ dataSource: ds3 }); - - expect(clientMap.size).toBe(initialSize + 3); - // Each dataSource should be stored under a different key - const entries = Array.from(clientMap.entries()).slice(-3); - expect(entries?.[0]?.[1].dataSource).toBe(ds1); - expect(entries?.[1]?.[1].dataSource).toBe(ds2); - expect(entries?.[2]?.[1].dataSource).toBe(ds3); - // IDs should be incrementing - expect(entries?.[1]?.[0]).toBe(entries![0]![0] + 1); - expect(entries?.[2]?.[0]).toBe(entries![1]![0] + 1); - }); - }); - - describe('initialize', () => { - it('should call fns.initialize with the client ID', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - await client.initialize(); - - expect(fns.initialize).toHaveBeenCalledTimes(1); - // The ID passed should be consistent - expect(fns.initialize).toHaveBeenCalledWith(expect.any(Number)); - }); - - it('should re-add dataSource to clientMap if removed', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - // Simulate removal from map (e.g., after shutdown) - clientMap.clear(); - expect(clientMap.size).toBe(0); - - await client.initialize(); - - // Should be re-added - expect(clientMap.size).toBe(1); - }); - - it('should not duplicate if already in clientMap', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - expect(clientMap.size).toBe(1); - - await client.initialize(); - - expect(clientMap.size).toBe(1); - }); - - it('should deduplicate concurrent initialize() calls', async () => { - const fns = createMockFns(); - // Make initialize take some time so concurrent calls overlap - fns.initialize.mockImplementation( - () => new Promise((resolve) => setTimeout(resolve, 50)), - ); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - await Promise.all([ - client.initialize(), - client.initialize(), - client.initialize(), - ]); - - expect(fns.initialize).toHaveBeenCalledTimes(1); - }); - - it('should deduplicate concurrent evaluate() calls that trigger initialize()', async () => { - const fns = createMockFns(); - fns.initialize.mockImplementation( - () => new Promise((resolve) => setTimeout(resolve, 50)), - ); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - await Promise.all([ - client.evaluate('flag-a'), - client.evaluate('flag-b'), - client.evaluate('flag-c'), - ]); - - expect(fns.initialize).toHaveBeenCalledTimes(1); - expect(fns.evaluate).toHaveBeenCalledTimes(3); - }); - - it('should allow re-initialization after failure', async () => { - const fns = createMockFns(); - fns.initialize - .mockRejectedValueOnce(new Error('init failed')) - .mockResolvedValueOnce(undefined); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - await expect(client.initialize()).rejects.toThrow('init failed'); - await client.initialize(); - - expect(fns.initialize).toHaveBeenCalledTimes(2); - }); - }); - - describe('shutdown', () => { - it('should call fns.shutdown with the client ID', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - await client.shutdown(); - - expect(fns.shutdown).toHaveBeenCalledTimes(1); - expect(fns.shutdown).toHaveBeenCalledWith(expect.any(Number)); - }); - - it('should remove dataSource from clientMap after shutdown', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - expect(clientMap.size).toBe(1); - - await client.shutdown(); - - expect(clientMap.size).toBe(0); - }); - }); - - describe('getFallbackDatafile', () => { - it('should call fns.getFallbackDatafile with the client ID', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - await client.getFallbackDatafile(); - - expect(fns.getFallbackDatafile).toHaveBeenCalledTimes(1); - expect(fns.getFallbackDatafile).toHaveBeenCalledWith(expect.any(Number)); - }); - - it('should return the fallback definitions', async () => { - const fns = createMockFns(); - const mockFallback = { - projectId: 'test-project', - definitions: {}, - environment: 'production', - configUpdatedAt: 123, - digest: 'abc', - revision: 2, - } satisfies BundledDefinitions; - fns.getFallbackDatafile.mockResolvedValue(mockFallback); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - const result = await client.getFallbackDatafile(); - - expect(result).toEqual(mockFallback); - }); - - it('should propagate errors from fns.getFallbackDatafile', async () => { - const fns = createMockFns(); - fns.getFallbackDatafile.mockRejectedValue( - new Error('Fallback not supported'), - ); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - - await expect(client.getFallbackDatafile()).rejects.toThrow( - 'Fallback not supported', - ); - }); - }); - - describe('evaluate', () => { - it('should call fns.evaluate with correct arguments', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - await client.evaluate('my-flag', false, { user: { id: '123' } }); - - expect(fns.evaluate).toHaveBeenCalledTimes(1); - expect(fns.evaluate).toHaveBeenCalledWith( - expect.any(Number), - 'my-flag', - false, - { user: { id: '123' } }, - ); - }); - - it('should return the evaluation result', async () => { - const fns = createMockFns(); - const expectedResult = { - value: 'variant-a', - reason: 'targeting', - outcomeType: 'value', - }; - fns.evaluate.mockResolvedValue(expectedResult); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - const result = await client.evaluate('my-flag'); - - expect(result).toEqual(expectedResult); - }); - - it('should work with generic types', async () => { - const fns = createMockFns(); - fns.evaluate.mockResolvedValue({ value: 42, reason: 'static' }); - const createRawClient = createCreateRawClient(fns); - const dataSource = createMockDataSource(); - - const client = createRawClient({ dataSource }); - const result = await client.evaluate('numeric-flag', 0); - - expect(result.value).toBe(42); - }); - }); - - describe('multiple clients', () => { - it('should maintain independent state for each client', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - - const ds1 = createMockDataSource(); - const ds2 = createMockDataSource(); - - const initialSize = clientMap.size; - - const client1 = createRawClient({ dataSource: ds1 }); - const client2 = createRawClient({ dataSource: ds2 }); - - expect(clientMap.size).toBe(initialSize + 2); - - // Shutdown client1 - await client1.shutdown(); - - // client2 should still be in the map - expect(clientMap.size).toBe(initialSize + 1); - // ds2 should still be in the map - const dataSources = Array.from(clientMap.values()).map( - (v) => v.dataSource, - ); - expect(dataSources).toContain(ds2); - await client2.shutdown(); - }); - - it('should use correct ID for each client method call', async () => { - const fns = createMockFns(); - const createRawClient = createCreateRawClient(fns); - - const ds1 = createMockDataSource(); - const ds2 = createMockDataSource(); - - const client1 = createRawClient({ dataSource: ds1 }); - const client2 = createRawClient({ dataSource: ds2 }); - - await client1.evaluate('flag1'); - await client2.evaluate('flag2'); - - expect(fns.evaluate).toHaveBeenCalledTimes(2); - // First call should use client1's ID (lower) - const call1Id = fns.evaluate.mock.calls?.[0]?.[0]; - const call2Id = fns.evaluate.mock.calls?.[1]?.[0]; - expect(call1Id).toBeLessThan(call2Id); - }); - }); -}); diff --git a/packages/vercel-flags-core/src/create-raw-client.ts b/packages/vercel-flags-core/src/create-raw-client.ts index 4ab80b80..105e64eb 100644 --- a/packages/vercel-flags-core/src/create-raw-client.ts +++ b/packages/vercel-flags-core/src/create-raw-client.ts @@ -4,11 +4,14 @@ import type { getFallbackDatafile, initialize, shutdown, -} from './client-fns'; -import { type ClientInstance, clientMap } from './client-map'; +} from './controller-fns'; +import { + type ControllerInstance, + controllerInstanceMap, +} from './controller-fns'; import type { BundledDefinitions, - DataSource, + ControllerInterface, EvaluationResult, FlagsClient, Value, @@ -17,7 +20,7 @@ import type { let idCount = 0; async function performInitialize( - instance: ClientInstance, + instance: ControllerInstance, initFn: () => Promise, ): Promise { try { @@ -38,22 +41,26 @@ export function createCreateRawClient(fns: { getDatafile: typeof getDatafile; }) { return function createRawClient({ - dataSource, + controller, origin, }: { - dataSource: DataSource; + controller: ControllerInterface; origin?: { provider: string; sdkKey: string }; }): FlagsClient { const id = idCount++; - clientMap.set(id, { dataSource, initialized: false, initPromise: null }); + controllerInstanceMap.set(id, { + controller, + initialized: false, + initPromise: null, + }); const api = { origin, initialize: async () => { - let instance = clientMap.get(id); + let instance = controllerInstanceMap.get(id); if (!instance) { - instance = { dataSource, initialized: false, initPromise: null }; - clientMap.set(id, instance); + instance = { controller, initialized: false, initPromise: null }; + controllerInstanceMap.set(id, instance); } // skip if already initialized @@ -69,9 +76,19 @@ export function createCreateRawClient(fns: { }, shutdown: async () => { await fns.shutdown(id); - clientMap.delete(id); + controllerInstanceMap.delete(id); + }, + getDatafile: async () => { + const instance = controllerInstanceMap.get(id); + if (instance?.initPromise) { + try { + await instance.initPromise; + } catch { + // Initialization failed — let getDatafile handle its own fallbacks + } + } + return fns.getDatafile(id); }, - getDatafile: () => fns.getDatafile(id), getFallbackDatafile: (): Promise => { return fns.getFallbackDatafile(id); }, @@ -80,8 +97,15 @@ export function createCreateRawClient(fns: { defaultValue?: T, entities?: E, ): Promise> => { - const instance = clientMap.get(id); - if (!instance?.initialized) await api.initialize(); + const instance = controllerInstanceMap.get(id); + if (!instance?.initialized) { + try { + await api.initialize(); + } catch { + // Initialization failed — let evaluate() handle the fallback + // chain (last known value → datafile → bundled → defaultValue → throw) + } + } return fns.evaluate(id, flagKey, defaultValue, entities); }, }; diff --git a/packages/vercel-flags-core/src/data-source/flag-network-data-source.test.ts b/packages/vercel-flags-core/src/data-source/flag-network-data-source.test.ts deleted file mode 100644 index 07e85ab7..00000000 --- a/packages/vercel-flags-core/src/data-source/flag-network-data-source.test.ts +++ /dev/null @@ -1,2069 +0,0 @@ -import { HttpResponse, http } from 'msw'; -import { setupServer } from 'msw/node'; -import { - afterAll, - afterEach, - beforeAll, - beforeEach, - describe, - expect, - it, - vi, -} from 'vitest'; -import type { BundledDefinitions, DatafileInput } from '../types'; -import { FlagNetworkDataSource } from './flag-network-data-source'; - -// Mock the bundled definitions module -vi.mock('../utils/read-bundled-definitions', () => ({ - readBundledDefinitions: vi.fn(() => - Promise.resolve({ definitions: null, state: 'missing-file' }), - ), -})); - -import { readBundledDefinitions } from '../utils/read-bundled-definitions'; - -let ingestRequests: { body: unknown; headers: Headers }[] = []; - -const server = setupServer( - http.post('https://flags.vercel.com/v1/ingest', async ({ request }) => { - ingestRequests.push({ - body: await request.json(), - headers: request.headers, - }); - return HttpResponse.json({ ok: true }); - }), -); - -const originalEnv = { ...process.env }; - -beforeAll(() => server.listen()); -beforeEach(() => { - ingestRequests = []; - vi.mocked(readBundledDefinitions).mockReset(); - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: null, - state: 'missing-file', - }); - // Reset env vars that affect build step detection - delete process.env.CI; - delete process.env.NEXT_PHASE; -}); -afterEach(() => { - server.resetHandlers(); - // Restore original env - process.env = { ...originalEnv }; -}); -afterAll(() => server.close()); - -function createNdjsonStream(messages: object[], delayMs = 0): ReadableStream { - return new ReadableStream({ - async start(controller) { - for (const message of messages) { - if (delayMs > 0) await new Promise((r) => setTimeout(r, delayMs)); - controller.enqueue( - new TextEncoder().encode(`${JSON.stringify(message)}\n`), - ); - } - controller.close(); - }, - }); -} - -async function assertIngestRequest( - sdkKey: string, - expectedEvents: Array<{ type: string; payload?: object }>, -) { - await vi.waitFor(() => { - expect(ingestRequests.length).toBeGreaterThan(0); - }); - - const request = ingestRequests[0]!; - expect(request.headers.get('Authorization')).toBe(`Bearer ${sdkKey}`); - expect(request.headers.get('Content-Type')).toBe('application/json'); - expect(request.headers.get('User-Agent')).toMatch(/^VercelFlagsCore\//); - - expect(request.body).toEqual( - expectedEvents.map((event) => - expect.objectContaining({ - type: event.type, - ts: expect.any(Number), - payload: event.payload ?? expect.any(Object), - }), - ), - ); -} - -describe('FlagNetworkDataSource', () => { - // Note: Low-level NDJSON parsing tests (parse datafile, ignore ping, handle split chunks) - // are in stream-connection.test.ts. These tests focus on FlagNetworkDataSource-specific behavior. - - it('should abort the stream connection when shutdown is called', async () => { - let abortSignalReceived: AbortSignal | undefined; - - server.use( - http.get('https://flags.vercel.com/v1/stream', async ({ request }) => { - abortSignalReceived = request.signal; - - const stream = new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - })}\n`, - ), - ); - - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }); - - return new HttpResponse(stream, { - headers: { 'Content-Type': 'application/x-ndjson' }, - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - await dataSource.read(); - - expect(abortSignalReceived).toBeDefined(); - expect(abortSignalReceived!.aborted).toBe(false); - - await dataSource.shutdown(); - - expect(abortSignalReceived!.aborted).toBe(true); - }); - - it('should handle messages split across chunks', async () => { - const definitions = { - projectId: 'test-project', - definitions: { flag: { variants: [1, 2, 3] } }, - }; - - const fullMessage = JSON.stringify({ type: 'datafile', data: definitions }); - const part1 = fullMessage.slice(0, 20); - const part2 = `${fullMessage.slice(20)}\n`; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - controller.enqueue(new TextEncoder().encode(part1)); - await new Promise((r) => setTimeout(r, 10)); - controller.enqueue(new TextEncoder().encode(part2)); - controller.close(); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.read(); - - expect(result).toMatchObject(definitions); - expect(result.metrics.source).toBe('in-memory'); - expect(result.metrics.cacheStatus).toBe('MISS'); - expect(result.metrics.connectionState).toBe('connected'); - - await dataSource.shutdown(); - await assertIngestRequest('vf_test_key', [{ type: 'FLAGS_CONFIG_READ' }]); - }); - - it('should update definitions when new datafile messages arrive', async () => { - const definitions1 = { projectId: 'test', definitions: { v: 1 } }; - const definitions2 = { projectId: 'test', definitions: { v: 2 } }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - createNdjsonStream([ - { type: 'datafile', data: definitions1 }, - { type: 'datafile', data: definitions2 }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - - // First call gets initial data - await dataSource.read(); - - // Wait for stream to process second message, then verify via read - await vi.waitFor(async () => { - const result = await dataSource.read(); - expect(result).toMatchObject(definitions2); - }); - - await dataSource.shutdown(); - }); - - it('should fall back to bundledDefinitions when stream times out', async () => { - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled-project', - definitions: {}, - environment: 'production', - configUpdatedAt: 1000, - digest: 'aa', - revision: 1, - }; - - // Mock bundled definitions to return valid data - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - // Create a stream that never sends data (simulating timeout) - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - new ReadableStream({ - start() { - // Never enqueue anything, never close - simulates hanging connection - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, // Disable polling to test stream timeout in isolation - }); - - // read should return bundledDefinitions after timeout (3s default) - const startTime = Date.now(); - const result = await dataSource.read(); - const elapsed = Date.now() - startTime; - - // Should have returned bundled definitions with STALE status - expect(result).toMatchObject({ - projectId: 'bundled-project', - definitions: {}, - environment: 'production', - }); - expect(result.metrics.source).toBe('embedded'); - expect(result.metrics.cacheStatus).toBe('STALE'); - expect(result.metrics.connectionState).toBe('disconnected'); - - // Should have taken roughly 3 seconds (the timeout) - expect(elapsed).toBeGreaterThanOrEqual(2900); - expect(elapsed).toBeLessThan(4000); - - // Don't await shutdown - the stream never closes in this test - dataSource.shutdown(); - }, 10000); - - it('should fall back to bundledDefinitions when stream errors (4xx)', async () => { - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled-project', - definitions: {}, - environment: 'production', - configUpdatedAt: 1000, - digest: 'aa', - revision: 1, - }; - - // Mock bundled definitions to return valid data - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - // Return a 401 error - this will cause the stream to fail permanently - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse(null, { status: 401 }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, // Disable polling to test stream error fallback in isolation - }); - - // Suppress expected error logs for this test - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - const result = await dataSource.read(); - - expect(result).toMatchObject({ - projectId: 'bundled-project', - definitions: {}, - environment: 'production', - }); - expect(result.metrics.source).toBe('embedded'); - expect(result.metrics.cacheStatus).toBe('STALE'); - expect(result.metrics.connectionState).toBe('disconnected'); - - await dataSource.shutdown(); - - errorSpy.mockRestore(); - }); - - it('should include X-Retry-Attempt header in stream requests', async () => { - let capturedHeaders: Headers | null = null; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - capturedHeaders = request.headers; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - await dataSource.read(); - - expect(capturedHeaders).not.toBeNull(); - expect(capturedHeaders!.get('X-Retry-Attempt')).toBe('0'); - - await dataSource.shutdown(); - }); - - it('should warn when returning in-memory data while stream is disconnected', async () => { - const definitions = { - projectId: 'test-project', - definitions: { flag: true }, - }; - - const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - // First, successfully connect and get data - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - createNdjsonStream([{ type: 'datafile', data: definitions }]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - await dataSource.read(); - - // Verify no warning on first successful read (stream is connected) - expect(warnSpy).not.toHaveBeenCalled(); - - // Now simulate stream disconnection by changing handler to error - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse(null, { status: 500 }); - }), - ); - - // Wait for the stream to close and try to reconnect (and fail) - await vi.waitFor( - () => { - expect(errorSpy).toHaveBeenCalled(); - }, - { timeout: 3000 }, - ); - - // Next read should warn about potentially stale data - await dataSource.read(); - - expect(warnSpy).toHaveBeenCalledWith( - expect.stringContaining('Returning in-memory flag definitions'), - ); - - // Should only warn once - warnSpy.mockClear(); - await dataSource.read(); - expect(warnSpy).not.toHaveBeenCalled(); - - await dataSource.shutdown(); - - warnSpy.mockRestore(); - errorSpy.mockRestore(); - }, 10000); - - describe('constructor validation', () => { - it('should throw for missing SDK key', () => { - expect(() => new FlagNetworkDataSource({ sdkKey: '' })).toThrow( - '@vercel/flags-core: SDK key must be a string starting with "vf_"', - ); - }); - - it('should throw for SDK key not starting with vf_', () => { - expect( - () => new FlagNetworkDataSource({ sdkKey: 'invalid_key' }), - ).toThrow( - '@vercel/flags-core: SDK key must be a string starting with "vf_"', - ); - }); - - it('should throw for non-string SDK key', () => { - expect( - () => new FlagNetworkDataSource({ sdkKey: 123 as unknown as string }), - ).toThrow( - '@vercel/flags-core: SDK key must be a string starting with "vf_"', - ); - }); - - it('should accept valid SDK key', () => { - expect( - () => new FlagNetworkDataSource({ sdkKey: 'vf_valid_key' }), - ).not.toThrow(); - }); - }); - - describe('build step detection', () => { - it('should detect build step when CI=1', async () => { - process.env.CI = '1'; - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: { - flag: { variants: [true], environments: {} }, - }, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.read(); - - // Should use bundled definitions without making stream request - expect(result).toMatchObject(bundledDefinitions); - expect(result.metrics.source).toBe('embedded'); - expect(result.metrics.cacheStatus).toBe('MISS'); - expect(result.metrics.connectionState).toBe('disconnected'); - - await dataSource.shutdown(); - }); - - it('should detect build step when NEXT_PHASE=phase-production-build', async () => { - process.env.NEXT_PHASE = 'phase-production-build'; - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.read(); - - expect(result).toMatchObject(bundledDefinitions); - expect(result.metrics.source).toBe('embedded'); - - await dataSource.shutdown(); - }); - - it('should NOT detect build step when neither CI nor NEXT_PHASE is set', async () => { - // Neither env var is set (cleared in beforeEach) - let streamRequested = false; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - streamRequested = true; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - await dataSource.read(); - - expect(streamRequested).toBe(true); - - await dataSource.shutdown(); - }); - }); - - describe('build step behavior', () => { - it('should fall back to HTTP fetch when bundled definitions missing during build', async () => { - process.env.CI = '1'; - - const fetchedDefinitions = { - projectId: 'fetched', - definitions: { flag: true }, - environment: 'production', - }; - - // Bundled definitions not available - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: null, - state: 'missing-file', - }); - - server.use( - http.get('https://flags.vercel.com/v1/datafile', () => { - return HttpResponse.json(fetchedDefinitions); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.read(); - - expect(result).toMatchObject(fetchedDefinitions); - expect(result.metrics.source).toBe('remote'); - expect(result.metrics.cacheStatus).toBe('MISS'); - expect(result.metrics.connectionState).toBe('disconnected'); - - await dataSource.shutdown(); - }); - - it('should cache data after first build step read', async () => { - process.env.CI = '1'; - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - - // First read - const firstResult = await dataSource.read(); - expect(firstResult.metrics.cacheStatus).toBe('MISS'); - - // Second read should use cached data - const result = await dataSource.read(); - expect(result).toMatchObject(bundledDefinitions); - expect(result.metrics.cacheStatus).toBe('HIT'); - - // readBundledDefinitions should have been called only during construction - expect(readBundledDefinitions).toHaveBeenCalledTimes(1); - - await dataSource.shutdown(); - }); - }); - - describe('getFallbackDatafile', () => { - it('should return bundled definitions when available', async () => { - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - }); - - const result = await dataSource.getFallbackDatafile(); - expect(result).toEqual(bundledDefinitions); - - await dataSource.shutdown(); - }); - - it('should throw FallbackNotFoundError for missing-file state', async () => { - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: null, - state: 'missing-file', - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - }); - - await expect(dataSource.getFallbackDatafile()).rejects.toThrow( - 'Bundled definitions file not found', - ); - - try { - await dataSource.getFallbackDatafile(); - } catch (error) { - expect((error as Error).name).toBe('FallbackNotFoundError'); - } - - await dataSource.shutdown(); - }); - - it('should throw FallbackEntryNotFoundError for missing-entry state', async () => { - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: null, - state: 'missing-entry', - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - }); - - await expect(dataSource.getFallbackDatafile()).rejects.toThrow( - 'No bundled definitions found for SDK key', - ); - - try { - await dataSource.getFallbackDatafile(); - } catch (error) { - expect((error as Error).name).toBe('FallbackEntryNotFoundError'); - } - - await dataSource.shutdown(); - }); - - it('should throw for unexpected-error state', async () => { - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: null, - state: 'unexpected-error', - error: new Error('Some error'), - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - }); - - await expect(dataSource.getFallbackDatafile()).rejects.toThrow( - 'Failed to read bundled definitions', - ); - - await dataSource.shutdown(); - }); - }); - - describe('custom stream options', () => { - it('should use custom initTimeoutMs value', async () => { - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - // Stream that never responds - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse(new ReadableStream({ start() {} }), { - headers: { 'Content-Type': 'application/x-ndjson' }, - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: { initTimeoutMs: 500 }, // Much shorter timeout - polling: false, // Disable polling to test stream timeout directly - }); - - const startTime = Date.now(); - const result = await dataSource.read(); - const elapsed = Date.now() - startTime; - - expect(result).toMatchObject({ - projectId: 'bundled', - definitions: {}, - environment: 'production', - }); - expect(result.metrics.source).toBe('embedded'); - expect(result.metrics.cacheStatus).toBe('STALE'); - expect(result.metrics.connectionState).toBe('disconnected'); - expect(elapsed).toBeGreaterThanOrEqual(450); - expect(elapsed).toBeLessThan(1500); - - dataSource.shutdown(); - }, 5000); - - it('should disable stream when stream: false', async () => { - let streamRequested = false; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - streamRequested = true; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - return HttpResponse.json({ - projectId: 'polled', - definitions: {}, - environment: 'production', - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: false, - polling: true, - }); - - await dataSource.read(); - - expect(streamRequested).toBe(false); - - await dataSource.shutdown(); - }); - }); - - describe('polling options', () => { - it('should use polling when enabled', async () => { - let pollCount = 0; - - server.use( - http.get('https://flags.vercel.com/v1/datafile', () => { - pollCount++; - return HttpResponse.json({ - projectId: 'polled', - definitions: { count: pollCount }, - environment: 'production', - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: false, - polling: { intervalMs: 100, initTimeoutMs: 5000 }, - }); - - const result = await dataSource.read(); - - expect(result.projectId).toBe('polled'); - expect(pollCount).toBeGreaterThanOrEqual(1); - - // Wait for a few poll intervals - await new Promise((r) => setTimeout(r, 350)); - - expect(pollCount).toBeGreaterThanOrEqual(3); - - await dataSource.shutdown(); - }); - - it('should disable polling when polling: false', async () => { - let pollCount = 0; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollCount++; - return HttpResponse.json({ - projectId: 'polled', - definitions: {}, - environment: 'production', - }); - }), - ); - - const providedDatafile: DatafileInput = { - projectId: 'static-data', - definitions: {}, - environment: 'production', - }; - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - datafile: providedDatafile, - stream: false, - polling: false, - }); - - await dataSource.read(); - - expect(pollCount).toBe(0); - - await dataSource.shutdown(); - }); - }); - - describe('datafile option', () => { - it('should use provided datafile immediately', async () => { - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const providedDatafile: DatafileInput = { - projectId: 'provided', - definitions: {}, - environment: 'production', - }; - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - datafile: providedDatafile, - }); - - // Should immediately return provided datafile - const result = await dataSource.read(); - - expect(result.projectId).toBe('provided'); - expect(result.metrics.source).toBe('in-memory'); - - await dataSource.shutdown(); - }); - - it('should work with datafile only (stream and polling disabled)', async () => { - let streamRequested = false; - let pollRequested = false; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - streamRequested = true; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollRequested = true; - return HttpResponse.json({ - projectId: 'polled', - definitions: {}, - environment: 'production', - }); - }), - ); - - const providedDatafile: DatafileInput = { - projectId: 'static-data', - definitions: { myFlag: { variants: [true, false], environments: {} } }, - environment: 'production', - }; - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - datafile: providedDatafile, - stream: false, - polling: false, - }); - - // Initialize and read - await dataSource.initialize(); - const result = await dataSource.read(); - - // Should use provided datafile - expect(result.projectId).toBe('static-data'); - expect(result.definitions).toEqual({ - myFlag: { variants: [true, false], environments: {} }, - }); - - // No network requests should have been made - expect(streamRequested).toBe(false); - expect(pollRequested).toBe(false); - - // Wait to ensure no delayed requests happen - await new Promise((r) => setTimeout(r, 100)); - expect(streamRequested).toBe(false); - expect(pollRequested).toBe(false); - - await dataSource.shutdown(); - }); - }); - - describe('stream/polling coordination', () => { - it('should stop polling when stream connects', async () => { - let pollCount = 0; - let streamDataSent = false; - - server.use( - http.get('https://flags.vercel.com/v1/stream', async ({ request }) => { - // Wait a bit to let polling start first - await new Promise((r) => setTimeout(r, 200)); - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - })}\n`, - ), - ); - streamDataSent = true; - // Keep stream open - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollCount++; - return HttpResponse.json({ - projectId: 'polled', - definitions: { count: pollCount }, - environment: 'production', - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: { initTimeoutMs: 100 }, // Short timeout to trigger polling fallback - polling: { intervalMs: 50, initTimeoutMs: 5000 }, - }); - - // This should initially get data from polling (stream times out) - await dataSource.read(); - - // Wait for stream data to be sent - await vi.waitFor( - () => { - expect(streamDataSent).toBe(true); - }, - { timeout: 2000 }, - ); - - // Record poll count at this point - const pollCountAfterStreamConnect = pollCount; - - // Wait for what would be several poll intervals - await new Promise((r) => setTimeout(r, 200)); - - // Polling should have stopped - count should not have increased much - // (there might be 1-2 more polls in flight when stream connected) - expect(pollCount).toBeGreaterThan(0); - expect(pollCount).toBeLessThanOrEqual(pollCountAfterStreamConnect + 2); - - await dataSource.shutdown(); - }); - - it('should fall back to polling when stream fails', async () => { - let pollCount = 0; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse(null, { status: 500 }); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollCount++; - return HttpResponse.json({ - projectId: 'polled', - definitions: { count: pollCount }, - environment: 'production', - }); - }), - ); - - // Suppress expected error logs - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: { initTimeoutMs: 100 }, - polling: { intervalMs: 100, initTimeoutMs: 5000 }, - }); - - const result = await dataSource.read(); - - // Should have gotten data from polling - expect(result.projectId).toBe('polled'); - expect(pollCount).toBeGreaterThanOrEqual(1); - - await dataSource.shutdown(); - - errorSpy.mockRestore(); - warnSpy.mockRestore(); - }); - - it('should never stream and poll simultaneously when stream is connected', async () => { - let streamRequestCount = 0; - let pollRequestCount = 0; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - streamRequestCount++; - // Create a stream that stays open (simulating connected stream) - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - })}\n`, - ), - ); - // Keep stream open by not closing controller - // Will be closed when test calls shutdown() - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollRequestCount++; - return HttpResponse.json({ - projectId: 'polled', - definitions: {}, - environment: 'production', - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: true, - polling: false, // Disable polling to test stream-only mode - }); - - await dataSource.read(); - - // Stream should be used, polling should not be triggered - expect(streamRequestCount).toBe(1); - expect(pollRequestCount).toBe(0); - - // Wait to see if any polls happen - await new Promise((r) => setTimeout(r, 200)); - - // Still no polls should have happened - expect(pollRequestCount).toBe(0); - - await dataSource.shutdown(); - }); - - it('should use datafile immediately while starting background stream', async () => { - let streamConnected = false; - let dataUpdated = false; - - server.use( - http.get('https://flags.vercel.com/v1/stream', async ({ request }) => { - // Simulate slow stream connection - await new Promise((r) => setTimeout(r, 200)); - streamConnected = true; - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ - type: 'datafile', - data: { - projectId: 'stream', - definitions: { updated: true }, - }, - })}\n`, - ), - ); - dataUpdated = true; - // Keep stream open - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const providedDatafile: DatafileInput = { - projectId: 'provided', - definitions: {}, - environment: 'production', - }; - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - datafile: providedDatafile, - stream: true, - polling: false, - }); - - // Call initialize to start background updates - await dataSource.initialize(); - - // First read should be immediate (from provided datafile) - const startTime = Date.now(); - const result = await dataSource.read(); - const elapsed = Date.now() - startTime; - - expect(result.projectId).toBe('provided'); - expect(elapsed).toBeLessThan(100); // Should be very fast - expect(streamConnected).toBe(false); // Stream hasn't connected yet - - // Wait for stream to connect and update data - await vi.waitFor( - () => { - expect(dataUpdated).toBe(true); - }, - { timeout: 2000 }, - ); - - // Now read should return stream data - const updatedResult = await dataSource.read(); - expect(updatedResult.definitions).toEqual({ updated: true }); - expect(updatedResult.projectId).toBe('stream'); - - await dataSource.shutdown(); - }); - - it('should not start polling from stream disconnect during initialization', async () => { - let pollCount = 0; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - // Stream fails immediately, triggering onDisconnect - return new HttpResponse(null, { status: 500 }); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollCount++; - return HttpResponse.json({ - projectId: 'polled', - definitions: {}, - environment: 'production', - }); - }), - ); - - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: { initTimeoutMs: 5000 }, - polling: { intervalMs: 100, initTimeoutMs: 5000 }, - }); - - await dataSource.initialize(); - - // Only 1 poll request should have been made (from tryInitializePolling), - // not 2 (onDisconnect should not have started a separate poll) - expect(pollCount).toBe(1); - - await dataSource.shutdown(); - errorSpy.mockRestore(); - warnSpy.mockRestore(); - }); - }); - - describe('getDatafile', () => { - it('should fetch from network when called without initialize', async () => { - const remoteDefinitions = { - projectId: 'remote', - definitions: { flag: true }, - environment: 'production', - }; - - server.use( - http.get('https://flags.vercel.com/v1/datafile', () => { - return HttpResponse.json(remoteDefinitions); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.getDatafile(); - - expect(result).toMatchObject(remoteDefinitions); - expect(result.metrics.source).toBe('remote'); - expect(result.metrics.cacheStatus).toBe('MISS'); - expect(result.metrics.connectionState).toBe('disconnected'); - - await dataSource.shutdown(); - }); - - it('should fetch from network even when bundled definitions exist (not in build step)', async () => { - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const remoteDefinitions = { - projectId: 'remote', - definitions: { flag: true }, - environment: 'production', - }; - - server.use( - http.get('https://flags.vercel.com/v1/datafile', () => { - return HttpResponse.json(remoteDefinitions); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.getDatafile(); - - // Should fetch from network, NOT use bundled definitions - expect(result.projectId).toBe('remote'); - expect(result.metrics.source).toBe('remote'); - expect(result.metrics.cacheStatus).toBe('MISS'); - - await dataSource.shutdown(); - }); - - it('should return cached data when stream is connected', async () => { - const streamDefinitions = { - projectId: 'stream', - definitions: { flag: true }, - }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ - type: 'datafile', - data: streamDefinitions, - })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - - // First read via initialize/read to establish stream connection - await dataSource.read(); - - // getDatafile should return cached stream data - const result = await dataSource.getDatafile(); - - expect(result.projectId).toBe('stream'); - expect(result.metrics.source).toBe('in-memory'); - expect(result.metrics.cacheStatus).toBe('HIT'); - expect(result.metrics.connectionState).toBe('connected'); - - await dataSource.shutdown(); - }); - - it('should use getDataForBuildStep when in build step', async () => { - process.env.CI = '1'; - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ sdkKey: 'vf_test_key' }); - const result = await dataSource.getDatafile(); - - expect(result.projectId).toBe('bundled'); - expect(result.metrics.source).toBe('embedded'); - expect(result.metrics.cacheStatus).toBe('MISS'); - expect(result.metrics.connectionState).toBe('disconnected'); - - await dataSource.shutdown(); - }); - - it('should fetch fresh data on each call when stream is not connected', async () => { - let fetchCount = 0; - - server.use( - http.get('https://flags.vercel.com/v1/datafile', () => { - fetchCount++; - return HttpResponse.json({ - projectId: 'remote', - definitions: { version: fetchCount }, - environment: 'production', - }); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: false, - polling: false, - }); - - const result1 = await dataSource.getDatafile(); - expect(result1.definitions).toEqual({ version: 1 }); - - // The second call hits the cache since this.data was set by the first call - // and the stream is not connected, so isStreamConnected is false - // which means the else branch fires again, fetching fresh data - const result2 = await dataSource.getDatafile(); - expect(result2.definitions).toEqual({ version: 2 }); - expect(fetchCount).toBe(2); - - await dataSource.shutdown(); - }); - }); - - describe('buildStep option', () => { - it('should always load bundled definitions regardless of buildStep', async () => { - // bundled definitions are always loaded as ultimate fallback - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - buildStep: false, - stream: false, - polling: false, - }); - - expect(readBundledDefinitions).toHaveBeenCalledWith('vf_test_key'); - - await dataSource.shutdown(); - }); - - it('should skip network when buildStep: true', async () => { - let streamRequested = false; - let pollRequested = false; - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - streamRequested = true; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - http.get('https://flags.vercel.com/v1/datafile', () => { - pollRequested = true; - return HttpResponse.json({ - projectId: 'polled', - definitions: {}, - environment: 'production', - }); - }), - ); - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - buildStep: true, // Force build step behavior - stream: true, // Would normally enable streaming - polling: true, // Would normally enable polling - }); - - const result = await dataSource.read(); - - // Should use bundled definitions, not network - expect(result.projectId).toBe('bundled'); - expect(streamRequested).toBe(false); - expect(pollRequested).toBe(false); - - await dataSource.shutdown(); - }); - - it('should use datafile over bundled in build step', async () => { - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const providedDatafile: DatafileInput = { - projectId: 'provided', - definitions: {}, - environment: 'production', - }; - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - buildStep: true, - datafile: providedDatafile, - }); - - const result = await dataSource.read(); - - // Should prefer provided datafile over bundled - expect(result.projectId).toBe('provided'); - - await dataSource.shutdown(); - }); - - it('should auto-detect build step when CI=1', async () => { - process.env.CI = '1'; - - let streamRequested = false; - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - streamRequested = true; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - // buildStep not specified - should auto-detect from CI=1 - }); - - const result = await dataSource.read(); - - // Should use bundled (build step detected), not stream - expect(result.projectId).toBe('bundled'); - expect(streamRequested).toBe(false); - - await dataSource.shutdown(); - }); - - it('should auto-detect build step when NEXT_PHASE=phase-production-build', async () => { - process.env.NEXT_PHASE = 'phase-production-build'; - - let streamRequested = false; - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - streamRequested = true; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const bundledDefinitions: BundledDefinitions = { - projectId: 'bundled', - definitions: {}, - environment: 'production', - configUpdatedAt: 1, - digest: 'a', - revision: 1, - }; - - vi.mocked(readBundledDefinitions).mockResolvedValue({ - definitions: bundledDefinitions, - state: 'ok', - }); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - // buildStep not specified - should auto-detect from NEXT_PHASE - }); - - const result = await dataSource.read(); - - // Should use bundled (build step detected), not stream - expect(result.projectId).toBe('bundled'); - expect(streamRequested).toBe(false); - - await dataSource.shutdown(); - }); - - it('should override auto-detection with buildStep: false', async () => { - process.env.CI = '1'; // Would normally trigger build step - - server.use( - http.get('https://flags.vercel.com/v1/stream', () => { - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'stream', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - buildStep: false, // Explicitly override CI detection - }); - - const result = await dataSource.read(); - - // Should use stream (buildStep: false overrides CI detection) - expect(result.projectId).toBe('stream'); - - await dataSource.shutdown(); - }); - }); - - describe('configUpdatedAt guard (never overwrite newer data with older)', () => { - it('should not overwrite newer in-memory data with older stream message', async () => { - const newerDefinitions = { - projectId: 'test', - definitions: { version: 'newer' }, - environment: 'production', - configUpdatedAt: 2000, - }; - - const olderDefinitions = { - projectId: 'test', - definitions: { version: 'older' }, - environment: 'production', - configUpdatedAt: 1000, - }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - // Send newer data first, then older data - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: newerDefinitions })}\n`, - ), - ); - await new Promise((r) => setTimeout(r, 50)); - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: olderDefinitions })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, - }); - - // First read gets the newer data - const result1 = await dataSource.read(); - expect(result1.definitions).toEqual({ version: 'newer' }); - - // Wait for the older message to arrive - await new Promise((r) => setTimeout(r, 100)); - - // Should still have newer data (older message was rejected) - const result2 = await dataSource.read(); - expect(result2.definitions).toEqual({ version: 'newer' }); - - await dataSource.shutdown(); - }); - - it('should not overwrite newer in-memory data with older poll response', async () => { - let pollCount = 0; - - const newerDefinitions = { - projectId: 'test', - definitions: { version: 'newer' }, - environment: 'production', - configUpdatedAt: 2000, - }; - - const olderDefinitions = { - projectId: 'test', - definitions: { version: 'older' }, - environment: 'production', - configUpdatedAt: 1000, - }; - - // Stream delivers newer data - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: newerDefinitions })}\n`, - ), - ); - // Stream closes, triggering polling fallback - controller.close(); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - // Polling returns older data - http.get('https://flags.vercel.com/v1/datafile', () => { - pollCount++; - return HttpResponse.json(olderDefinitions); - }), - ); - - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - stream: true, - polling: { intervalMs: 50, initTimeoutMs: 5000 }, - }); - - // First read gets newer data from stream - const result1 = await dataSource.read(); - expect(result1.definitions).toEqual({ version: 'newer' }); - - // Wait for stream to disconnect and polling to kick in - await vi.waitFor( - () => { - expect(pollCount).toBeGreaterThanOrEqual(1); - }, - { timeout: 3000 }, - ); - - // Should still have newer data (older poll response was rejected) - const result2 = await dataSource.read(); - expect(result2.definitions).toEqual({ version: 'newer' }); - - await dataSource.shutdown(); - - errorSpy.mockRestore(); - warnSpy.mockRestore(); - }, 10000); - - it('should accept stream data with equal configUpdatedAt', async () => { - const data1 = { - projectId: 'test', - definitions: { version: 'first' }, - environment: 'production', - configUpdatedAt: 1000, - }; - - const data2 = { - projectId: 'test', - definitions: { version: 'second' }, - environment: 'production', - configUpdatedAt: 1000, // Same configUpdatedAt - }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: data1 })}\n`, - ), - ); - await new Promise((r) => setTimeout(r, 50)); - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: data2 })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, - }); - - await dataSource.read(); - - // Wait for second message - await new Promise((r) => setTimeout(r, 100)); - - // Should accept data with equal configUpdatedAt - const result = await dataSource.read(); - expect(result.definitions).toEqual({ version: 'second' }); - - await dataSource.shutdown(); - }); - - it('should accept updates when current data has no configUpdatedAt', async () => { - const providedDatafile: DatafileInput = { - projectId: 'provided', - definitions: { - testFlag: { - environments: { production: 0 }, - variants: [false, true], - }, - }, - environment: 'production', - // No configUpdatedAt - this is a plain DatafileInput - }; - - const streamData: DatafileInput = { - projectId: 'test', - definitions: { - testFlag: { - environments: { production: 1 }, - variants: [false, true], - }, - }, - environment: 'production', - configUpdatedAt: 1000, - }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - await new Promise((r) => setTimeout(r, 50)); - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: streamData })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - datafile: providedDatafile, - polling: false, - }); - - // Initialize to start background stream updates - await dataSource.initialize(); - - // Initial read returns provided datafile - const result1 = await dataSource.read(); - expect(result1.definitions).toEqual(providedDatafile.definitions); - - // Wait for stream to deliver data - await vi.waitFor( - async () => { - const result = await dataSource.read(); - expect(result.definitions).toEqual(streamData.definitions); - }, - { timeout: 2000 }, - ); - - await dataSource.shutdown(); - }); - - it('should handle configUpdatedAt as string', async () => { - const newerDefinitions = { - projectId: 'test', - definitions: { version: 'newer' }, - environment: 'production', - configUpdatedAt: '2000', - }; - - const olderDefinitions = { - projectId: 'test', - definitions: { version: 'older' }, - environment: 'production', - configUpdatedAt: '1000', - }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: newerDefinitions })}\n`, - ), - ); - await new Promise((r) => setTimeout(r, 50)); - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: olderDefinitions })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, - }); - - const result1 = await dataSource.read(); - expect(result1.definitions).toEqual({ version: 'newer' }); - - // Wait for the older message to arrive - await new Promise((r) => setTimeout(r, 100)); - - // Should still have newer data (older message was rejected) - const result2 = await dataSource.read(); - expect(result2.definitions).toEqual({ version: 'newer' }); - - await dataSource.shutdown(); - }); - - it('should accept updates when configUpdatedAt is a non-numeric string', async () => { - const currentData = { - projectId: 'test', - definitions: { version: 'first' }, - environment: 'production', - configUpdatedAt: 'not-a-number', - }; - - const newData = { - projectId: 'test', - definitions: { version: 'second' }, - environment: 'production', - configUpdatedAt: 1000, - }; - - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: currentData })}\n`, - ), - ); - await new Promise((r) => setTimeout(r, 50)); - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: newData })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, - }); - - await dataSource.read(); - - // Wait for second message - await new Promise((r) => setTimeout(r, 100)); - - // Should accept update since current configUpdatedAt is unparseable - const result = await dataSource.read(); - expect(result.definitions).toEqual({ version: 'second' }); - - await dataSource.shutdown(); - }); - - it('should not overwrite newer in-memory data via getDatafile', async () => { - const newerDefinitions = { - projectId: 'test', - definitions: { version: 'newer' }, - environment: 'production', - configUpdatedAt: 2000, - }; - - const olderDefinitions = { - projectId: 'test', - definitions: { version: 'older' }, - environment: 'production', - configUpdatedAt: 1000, - }; - - // Stream delivers newer data first - server.use( - http.get('https://flags.vercel.com/v1/stream', ({ request }) => { - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ type: 'datafile', data: newerDefinitions })}\n`, - ), - ); - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const dataSource = new FlagNetworkDataSource({ - sdkKey: 'vf_test_key', - polling: false, - }); - - // Establish stream connection and get newer data - await dataSource.read(); - - // Now change the datafile endpoint to return older data - server.use( - http.get('https://flags.vercel.com/v1/datafile', () => { - return HttpResponse.json(olderDefinitions); - }), - ); - - // getDatafile when stream is connected returns cache, so we need to - // verify via read() that the data wasn't overwritten - const result = await dataSource.read(); - expect(result.definitions).toEqual({ version: 'newer' }); - - await dataSource.shutdown(); - }); - }); -}); diff --git a/packages/vercel-flags-core/src/data-source/flag-network-data-source.ts b/packages/vercel-flags-core/src/data-source/flag-network-data-source.ts deleted file mode 100644 index 8666acb8..00000000 --- a/packages/vercel-flags-core/src/data-source/flag-network-data-source.ts +++ /dev/null @@ -1,914 +0,0 @@ -import { version } from '../../package.json'; -import { FallbackEntryNotFoundError, FallbackNotFoundError } from '../errors'; -import type { - BundledDefinitions, - BundledDefinitionsResult, - Datafile, - DatafileInput, - DataSource, - Metrics, - PollingOptions, - StreamOptions, -} from '../types'; -import { readBundledDefinitions } from '../utils/read-bundled-definitions'; -import { sleep } from '../utils/sleep'; -import { type TrackReadOptions, UsageTracker } from '../utils/usage-tracker'; -import { connectStream } from './stream-connection'; - -const FLAGS_HOST = 'https://flags.vercel.com'; -const DEFAULT_STREAM_INIT_TIMEOUT_MS = 3000; -const DEFAULT_POLLING_INTERVAL_MS = 30_000; -const DEFAULT_POLLING_INIT_TIMEOUT_MS = 3_000; -const DEFAULT_FETCH_TIMEOUT_MS = 10_000; -const MAX_FETCH_RETRIES = 3; -const FETCH_RETRY_BASE_DELAY_MS = 500; - -/** - * Configuration options for FlagNetworkDataSource - */ -export type FlagNetworkDataSourceOptions = { - /** SDK key for authentication (must start with "vf_") */ - sdkKey: string; - - /** - * Initial datafile to use immediately - * - At runtime: used while waiting for stream/poll, then updated in background - * - At build step: used as primary source (skips network) - */ - datafile?: DatafileInput; - - /** - * Configure streaming connection (runtime only, ignored during build step) - * - `true`: Enable with default options (initTimeoutMs: 3000) - * - `false`: Disable streaming - * - `{ initTimeoutMs: number }`: Enable with custom timeout - * @default true - */ - stream?: boolean | StreamOptions; - - /** - * Configure polling fallback (runtime only, ignored during build step) - * - `true`: Enable with default options (intervalMs: 30000, initTimeoutMs: 3000) - * - `false`: Disable polling - * - `{ intervalMs: number, initTimeoutMs: number }`: Enable with custom options - * @default true - */ - polling?: boolean | PollingOptions; - - /** - * Override build step detection - * - `true`: Treat as build step (use datafile/bundled only, no network) - * - `false`: Treat as runtime (try stream/poll first) - * @default auto-detected via CI=1 or NEXT_PHASE=phase-production-build - */ - buildStep?: boolean; - - /** - * Custom fetch function for making HTTP requests. - * Useful for testing (e.g. resolving to a different IP). - * @default globalThis.fetch - */ - fetch?: typeof globalThis.fetch; -}; - -/** - * Normalized internal options - */ -type NormalizedOptions = { - sdkKey: string; - datafile: DatafileInput | undefined; - stream: { enabled: boolean; initTimeoutMs: number }; - polling: { enabled: boolean; intervalMs: number; initTimeoutMs: number }; - buildStep: boolean; - fetch: typeof globalThis.fetch; -}; - -/** - * Normalizes user-provided options to internal format with defaults - */ -function normalizeOptions( - options: FlagNetworkDataSourceOptions, -): NormalizedOptions { - const autoDetectedBuildStep = - process.env.CI === '1' || - process.env.NEXT_PHASE === 'phase-production-build'; - const buildStep = options.buildStep ?? autoDetectedBuildStep; - - let stream: NormalizedOptions['stream']; - if (options.stream === undefined || options.stream === true) { - stream = { enabled: true, initTimeoutMs: DEFAULT_STREAM_INIT_TIMEOUT_MS }; - } else if (options.stream === false) { - stream = { enabled: false, initTimeoutMs: 0 }; - } else { - stream = { enabled: true, initTimeoutMs: options.stream.initTimeoutMs }; - } - - let polling: NormalizedOptions['polling']; - if (options.polling === undefined || options.polling === true) { - polling = { - enabled: true, - intervalMs: DEFAULT_POLLING_INTERVAL_MS, - initTimeoutMs: DEFAULT_POLLING_INIT_TIMEOUT_MS, - }; - } else if (options.polling === false) { - polling = { enabled: false, intervalMs: 0, initTimeoutMs: 0 }; - } else { - polling = { - enabled: true, - intervalMs: options.polling.intervalMs, - initTimeoutMs: options.polling.initTimeoutMs, - }; - } - - return { - sdkKey: options.sdkKey, - datafile: options.datafile, - stream, - polling, - buildStep, - fetch: options.fetch ?? globalThis.fetch, - }; -} - -/** - * Fetches the datafile from the flags service with retry logic. - * - * Implements exponential backoff with jitter for transient failures. - * Does not retry 4xx errors (except 429) as they indicate client errors. - */ -async function fetchDatafile( - host: string, - sdkKey: string, - fetchFn: typeof globalThis.fetch, -): Promise { - let lastError: Error | undefined; - - for (let attempt = 0; attempt < MAX_FETCH_RETRIES; attempt++) { - const controller = new AbortController(); - const timeoutId = setTimeout( - () => controller.abort(), - DEFAULT_FETCH_TIMEOUT_MS, - ); - - let shouldRetry = true; - try { - const res = await fetchFn(`${host}/v1/datafile`, { - headers: { - Authorization: `Bearer ${sdkKey}`, - 'User-Agent': `VercelFlagsCore/${version}`, - }, - signal: controller.signal, - }); - - clearTimeout(timeoutId); - - if (!res.ok) { - // Don't retry 4xx errors (except 429) - if (res.status >= 400 && res.status < 500 && res.status !== 429) { - shouldRetry = false; - } - throw new Error(`Failed to fetch data: ${res.statusText}`); - } - - return res.json() as Promise; - } catch (error) { - clearTimeout(timeoutId); - lastError = - error instanceof Error ? error : new Error('Unknown fetch error'); - - if (!shouldRetry) throw lastError; - - if (attempt < MAX_FETCH_RETRIES - 1) { - const delay = - FETCH_RETRY_BASE_DELAY_MS * 2 ** attempt + Math.random() * 500; - await sleep(delay); - } - } - } - - throw lastError ?? new Error('Failed to fetch data after retries'); -} - -/** - * A DataSource implementation that connects to flags.vercel.com. - * - * Behavior differs based on environment: - * - * **Build step** (CI=1 or Next.js build, or buildStep: true): - * - Uses datafile (if provided) or bundled definitions - * - No streaming or polling (avoids network during build) - * - * **Runtime** (default): - * - Tries stream first, then poll, then datafile, then bundled - * - Stream and polling never run simultaneously - * - If stream reconnects while polling → stop polling - * - If stream disconnects → start polling (if enabled) - */ -export class FlagNetworkDataSource implements DataSource { - private options: NormalizedOptions; - private host = FLAGS_HOST; - - // Data state - private data: DatafileInput | undefined; - private bundledDefinitionsPromise: - | Promise - | undefined; - - // Stream state - private streamAbortController: AbortController | undefined; - private streamPromise: Promise | undefined; - private isStreamConnected: boolean = false; - private hasWarnedAboutStaleData: boolean = false; - - // Polling state - private pollingIntervalId: ReturnType | undefined; - private pollingAbortController: AbortController | undefined; - - // Initialization state — suppresses onDisconnect from starting polling - // while initialize() is still running its own fallback chain - private isInitializing: boolean = false; - - // Usage tracking - private usageTracker: UsageTracker; - private isFirstGetData: boolean = true; - - /** - * Creates a new FlagNetworkDataSource instance. - */ - constructor(options: FlagNetworkDataSourceOptions) { - if ( - !options.sdkKey || - typeof options.sdkKey !== 'string' || - !options.sdkKey.startsWith('vf_') - ) { - throw new Error( - '@vercel/flags-core: SDK key must be a string starting with "vf_"', - ); - } - - this.options = normalizeOptions(options); - - // Always load bundled definitions as ultimate fallback - this.bundledDefinitionsPromise = readBundledDefinitions( - this.options.sdkKey, - ); - - // If datafile provided, use it immediately - if (this.options.datafile) { - this.data = this.options.datafile; - } - - this.usageTracker = new UsageTracker({ - sdkKey: this.options.sdkKey, - host: this.host, - }); - } - - // --------------------------------------------------------------------------- - // Public API (DataSource interface) - // --------------------------------------------------------------------------- - - /** - * Initializes the data source. - * - * Build step: datafile → bundled → fetch - * Runtime: stream → poll → datafile → bundled - */ - async initialize(): Promise { - if (this.options.buildStep) { - await this.initializeForBuildStep(); - return; - } - - // Hydrate from provided datafile if not already set (e.g., after shutdown) - // Usually the constructor sets this, but if the client was shutdown and - // then init'd again we need to set it again. This also means that any - // previous data we've seen before shutdown is lost. We'll "start fresh". - if (!this.data && this.options.datafile) { - this.data = this.options.datafile; - } - - // If we already have data (from provided datafile), start background updates - // but don't block on them - if (this.data) { - this.startBackgroundUpdates(); - return; - } - - this.isInitializing = true; - try { - // Try stream first - if (this.options.stream.enabled) { - const streamSuccess = await this.tryInitializeStream(); - if (streamSuccess) return; - } - - // Fall back to polling - if (this.options.polling.enabled) { - const pollingSuccess = await this.tryInitializePolling(); - if (pollingSuccess) return; - } - - // Fall back to provided datafile (already set in constructor if provided) - if (this.data) return; - - // Fall back to bundled definitions - await this.initializeFromBundled(); - } finally { - this.isInitializing = false; - } - } - - /** - * Reads the current datafile with metrics. - */ - async read(): Promise { - const startTime = Date.now(); - const cachedData = this.data; - const cacheHadDefinitions = cachedData !== undefined; - const isFirstRead = this.isFirstGetData; - this.isFirstGetData = false; - - let result: DatafileInput; - let source: Metrics['source']; - let cacheStatus: Metrics['cacheStatus']; - - if (this.options.buildStep) { - [result, source, cacheStatus] = await this.getDataForBuildStep(); - } else if (cachedData) { - [result, source, cacheStatus] = this.getDataFromCache(cachedData); - } else { - [result, source, cacheStatus] = await this.getDataWithFallbacks(); - } - - const readMs = Date.now() - startTime; - this.trackRead(startTime, cacheHadDefinitions, isFirstRead, source); - - return Object.assign(result, { - metrics: { - readMs, - source, - cacheStatus, - connectionState: this.isStreamConnected - ? ('connected' as const) - : ('disconnected' as const), - }, - }) satisfies Datafile; - } - - /** - * Shuts down the data source and releases resources. - */ - async shutdown(): Promise { - this.stopStream(); - this.stopPolling(); - this.data = this.options.datafile; - this.isInitializing = false; - this.isStreamConnected = false; - this.hasWarnedAboutStaleData = false; - await this.usageTracker.flush(); - } - - /** - * Returns the datafile with metrics. - * - * During builds this will read from the bundled file if available. - * - * This method never opens a streaming connection, but will read from - * the stream if it is already open. Otherwise it fetches over the network. - */ - async getDatafile(): Promise { - const startTime = Date.now(); - - let result: DatafileInput; - let source: Metrics['source']; - let cacheStatus: Metrics['cacheStatus']; - - if (this.options.buildStep) { - [result, source, cacheStatus] = await this.getDataForBuildStep(); - } else if (this.isStreamConnected && this.data) { - [result, source, cacheStatus] = this.getDataFromCache(); - } else { - const fetched = await fetchDatafile( - this.host, - this.options.sdkKey, - this.options.fetch, - ); - if (this.isNewerData(fetched)) { - this.data = fetched; - } - [result, source, cacheStatus] = [this.data ?? fetched, 'remote', 'MISS']; - } - - return Object.assign(result, { - metrics: { - readMs: Date.now() - startTime, - source, - cacheStatus, - connectionState: this.isStreamConnected - ? ('connected' as const) - : ('disconnected' as const), - }, - }) satisfies Datafile; - } - - /** - * Returns the bundled fallback datafile. - */ - async getFallbackDatafile(): Promise { - if (!this.bundledDefinitionsPromise) { - throw new FallbackNotFoundError(); - } - - const bundledResult = await this.bundledDefinitionsPromise; - - if (!bundledResult) { - throw new FallbackNotFoundError(); - } - - switch (bundledResult.state) { - case 'ok': - return bundledResult.definitions; - case 'missing-file': - throw new FallbackNotFoundError(); - case 'missing-entry': - throw new FallbackEntryNotFoundError(); - case 'unexpected-error': - throw new Error( - '@vercel/flags-core: Failed to read bundled definitions: ' + - String(bundledResult.error), - ); - } - } - - // --------------------------------------------------------------------------- - // Stream management - // --------------------------------------------------------------------------- - - /** - * Attempts to initialize via stream with timeout. - * Returns true if stream connected successfully within timeout. - */ - private async tryInitializeStream(): Promise { - let streamPromise: Promise; - - if (this.options.stream.initTimeoutMs <= 0) { - // No timeout - wait indefinitely - try { - streamPromise = this.startStream(); - await streamPromise; - return true; - } catch { - return false; - } - } - - // Race against timeout - let timeoutId: ReturnType; - const timeoutPromise = new Promise<'timeout'>((resolve) => { - timeoutId = setTimeout( - () => resolve('timeout'), - this.options.stream.initTimeoutMs, - ); - }); - - try { - streamPromise = this.startStream(); - const result = await Promise.race([streamPromise, timeoutPromise]); - clearTimeout(timeoutId!); - - if (result === 'timeout') { - console.warn( - '@vercel/flags-core: Stream initialization timeout, falling back', - ); - // Don't abort stream - let it continue trying in background - return false; - } - - return true; - } catch { - clearTimeout(timeoutId!); - return false; - } - } - - /** - * Starts the stream connection with callbacks for data and disconnect. - */ - private startStream(): Promise { - if (this.streamPromise) return this.streamPromise; - - this.streamAbortController = new AbortController(); - this.isStreamConnected = false; - this.hasWarnedAboutStaleData = false; - - try { - const streamPromise = connectStream( - { - host: this.host, - sdkKey: this.options.sdkKey, - abortController: this.streamAbortController, - fetch: this.options.fetch, - }, - { - onMessage: (newData) => { - if (this.isNewerData(newData)) { - this.data = newData; - } - this.isStreamConnected = true; - this.hasWarnedAboutStaleData = false; - - // Stream is working - stop polling if it's running - if (this.pollingIntervalId) { - this.stopPolling(); - } - }, - onDisconnect: () => { - this.isStreamConnected = false; - - // Fall back to polling if enabled and not already polling. - // Skip during initialization — initialize() manages its own - // fallback chain and will start polling itself if needed. - if ( - this.options.polling.enabled && - !this.pollingIntervalId && - !this.isInitializing - ) { - this.startPolling(); - } - }, - }, - ); - - this.streamPromise = streamPromise; - return streamPromise; - } catch (error) { - this.streamPromise = undefined; - this.streamAbortController = undefined; - throw error; - } - } - - /** - * Stops the stream connection. - */ - private stopStream(): void { - this.streamAbortController?.abort(); - this.streamAbortController = undefined; - this.streamPromise = undefined; - } - - // --------------------------------------------------------------------------- - // Polling management - // --------------------------------------------------------------------------- - - /** - * Attempts to initialize via polling with timeout. - * Returns true if first poll succeeded within timeout. - */ - private async tryInitializePolling(): Promise { - this.pollingAbortController = new AbortController(); - - // Perform initial poll - const pollPromise = this.performPoll(); - - if (this.options.polling.initTimeoutMs <= 0) { - // No timeout - wait indefinitely - try { - await pollPromise; - if (this.data) { - this.startPollingInterval(); - return true; - } - return false; - } catch { - return false; - } - } - - // Race against timeout - let timeoutId: ReturnType; - const timeoutPromise = new Promise<'timeout'>((resolve) => { - timeoutId = setTimeout( - () => resolve('timeout'), - this.options.polling.initTimeoutMs, - ); - }); - - try { - const result = await Promise.race([pollPromise, timeoutPromise]); - clearTimeout(timeoutId!); - - if (result === 'timeout') { - console.warn( - '@vercel/flags-core: Polling initialization timeout, falling back', - ); - return false; - } - - if (this.data) { - this.startPollingInterval(); - return true; - } - return false; - } catch { - clearTimeout(timeoutId!); - return false; - } - } - - /** - * Starts polling (initial poll + interval). - */ - private startPolling(): void { - if (this.pollingIntervalId) return; - - this.pollingAbortController = new AbortController(); - - // Perform initial poll - void this.performPoll(); - - // Start interval - this.startPollingInterval(); - } - - /** - * Starts the polling interval (without initial poll). - */ - private startPollingInterval(): void { - if (this.pollingIntervalId) return; - - this.pollingIntervalId = setInterval( - () => void this.performPoll(), - this.options.polling.intervalMs, - ); - } - - /** - * Stops polling. - */ - private stopPolling(): void { - if (this.pollingIntervalId) { - clearInterval(this.pollingIntervalId); - this.pollingIntervalId = undefined; - } - this.pollingAbortController?.abort(); - this.pollingAbortController = undefined; - } - - /** - * Performs a single poll request. - */ - private async performPoll(): Promise { - if (this.pollingAbortController?.signal.aborted) return; - - try { - const data = await fetchDatafile( - this.host, - this.options.sdkKey, - this.options.fetch, - ); - if (this.isNewerData(data)) { - this.data = data; - } - } catch (error) { - console.error('@vercel/flags-core: Poll failed:', error); - } - } - - // --------------------------------------------------------------------------- - // Background updates - // --------------------------------------------------------------------------- - - /** - * Starts background updates (stream or polling) without blocking. - * Used when we already have data from provided datafile. - */ - private startBackgroundUpdates(): void { - if (this.options.stream.enabled) { - void this.startStream(); - } else if (this.options.polling.enabled) { - this.startPolling(); - } - } - - // --------------------------------------------------------------------------- - // Build step helpers - // --------------------------------------------------------------------------- - - /** - * Initializes data for build step environments. - */ - private async initializeForBuildStep(): Promise { - if (this.data) return; - - if (this.bundledDefinitionsPromise) { - const bundledResult = await this.bundledDefinitionsPromise; - if (bundledResult?.state === 'ok' && bundledResult.definitions) { - this.data = bundledResult.definitions; - return; - } - } - - this.data = await fetchDatafile( - this.host, - this.options.sdkKey, - this.options.fetch, - ); - } - - /** - * Retrieves data during build steps. - */ - private async getDataForBuildStep(): Promise< - [DatafileInput, Metrics['source'], Metrics['cacheStatus']] - > { - if (this.data) { - return [this.data, 'in-memory', 'HIT']; - } - - if (this.bundledDefinitionsPromise) { - const bundledResult = await this.bundledDefinitionsPromise; - if (bundledResult?.state === 'ok' && bundledResult.definitions) { - this.data = bundledResult.definitions; - return [this.data, 'embedded', 'MISS']; - } - } - - this.data = await fetchDatafile( - this.host, - this.options.sdkKey, - this.options.fetch, - ); - return [this.data, 'remote', 'MISS']; - } - - // --------------------------------------------------------------------------- - // Runtime helpers - // --------------------------------------------------------------------------- - - /** - * Returns data from the in-memory cache. - */ - private getDataFromCache( - cachedData?: DatafileInput, - ): [DatafileInput, Metrics['source'], Metrics['cacheStatus']] { - const data = cachedData ?? this.data!; - this.warnIfDisconnected(); - const cacheStatus = this.isStreamConnected ? 'HIT' : 'STALE'; - return [data, 'in-memory', cacheStatus]; - } - - /** - * Retrieves data using the fallback chain. - */ - private async getDataWithFallbacks(): Promise< - [DatafileInput, Metrics['source'], Metrics['cacheStatus']] - > { - // Try stream with timeout - if (this.options.stream.enabled) { - const streamSuccess = await this.tryInitializeStream(); - if (streamSuccess && this.data) { - return [this.data, 'in-memory', 'MISS']; - } - } - - // Try polling with timeout - if (this.options.polling.enabled) { - const pollingSuccess = await this.tryInitializePolling(); - if (pollingSuccess && this.data) { - return [this.data, 'remote', 'MISS']; - } - } - - // Use provided datafile - if (this.options.datafile) { - this.data = this.options.datafile; - return [this.data, 'in-memory', 'STALE']; - } - - // Use bundled definitions - if (this.bundledDefinitionsPromise) { - const bundledResult = await this.bundledDefinitionsPromise; - if (bundledResult?.state === 'ok' && bundledResult.definitions) { - console.warn( - '@vercel/flags-core: Using bundled definitions as fallback', - ); - this.data = bundledResult.definitions; - return [this.data, 'embedded', 'STALE']; - } - } - - throw new Error( - '@vercel/flags-core: No flag definitions available. ' + - 'Ensure streaming/polling is enabled or provide a datafile.', - ); - } - - /** - * Initializes from bundled definitions. - */ - private async initializeFromBundled(): Promise { - if (!this.bundledDefinitionsPromise) { - throw new Error( - '@vercel/flags-core: No flag definitions available. ' + - 'Ensure streaming/polling is enabled or provide a datafile.', - ); - } - - const bundledResult = await this.bundledDefinitionsPromise; - if (bundledResult?.state === 'ok' && bundledResult.definitions) { - this.data = bundledResult.definitions; - return; - } - - throw new Error( - '@vercel/flags-core: No flag definitions available. ' + - 'Bundled definitions not found.', - ); - } - - /** - * Parses a configUpdatedAt value (number or string) into a numeric timestamp. - * Returns undefined if the value is missing or cannot be parsed. - */ - private static parseConfigUpdatedAt(value: unknown): number | undefined { - if (typeof value === 'number') return value; - if (typeof value === 'string') { - const parsed = Number(value); - return Number.isNaN(parsed) ? undefined : parsed; - } - return undefined; - } - - /** - * Checks if the incoming data is newer than the current in-memory data. - * Returns true if the update should proceed, false if it should be skipped. - * - * Always accepts the update if: - * - There is no current data - * - The current data has no configUpdatedAt - * - The incoming data has no configUpdatedAt - * - * Skips the update only when both have configUpdatedAt and incoming is older. - */ - private isNewerData(incoming: DatafileInput): boolean { - if (!this.data) return true; - - const currentTs = FlagNetworkDataSource.parseConfigUpdatedAt( - this.data.configUpdatedAt, - ); - const incomingTs = FlagNetworkDataSource.parseConfigUpdatedAt( - incoming.configUpdatedAt, - ); - - if (currentTs === undefined || incomingTs === undefined) { - return true; - } - - return incomingTs >= currentTs; - } - - /** - * Logs a warning if returning cached data while stream is disconnected. - */ - private warnIfDisconnected(): void { - if (!this.isStreamConnected && !this.hasWarnedAboutStaleData) { - this.hasWarnedAboutStaleData = true; - console.warn( - '@vercel/flags-core: Returning in-memory flag definitions while stream is disconnected. Data may be stale.', - ); - } - } - - // --------------------------------------------------------------------------- - // Usage tracking - // --------------------------------------------------------------------------- - - /** - * Tracks a read operation for usage analytics. - */ - private trackRead( - startTime: number, - cacheHadDefinitions: boolean, - isFirstRead: boolean, - source: Metrics['source'], - ): void { - const configOrigin: 'in-memory' | 'embedded' = - source === 'embedded' ? 'embedded' : 'in-memory'; - const trackOptions: TrackReadOptions = { - configOrigin, - cacheStatus: cacheHadDefinitions ? 'HIT' : 'MISS', - cacheIsBlocking: !cacheHadDefinitions, - duration: Date.now() - startTime, - }; - const configUpdatedAt = this.data?.configUpdatedAt; - if (typeof configUpdatedAt === 'number') { - trackOptions.configUpdatedAt = configUpdatedAt; - } - if (isFirstRead) { - trackOptions.cacheIsFirstRead = true; - } - this.usageTracker.trackRead(trackOptions); - } -} diff --git a/packages/vercel-flags-core/src/data-source/in-memory-data-source.ts b/packages/vercel-flags-core/src/data-source/in-memory-data-source.ts deleted file mode 100644 index 05807bb5..00000000 --- a/packages/vercel-flags-core/src/data-source/in-memory-data-source.ts +++ /dev/null @@ -1,48 +0,0 @@ -import type { Datafile, DatafileInput, DataSource, Packed } from '../types'; - -const RESOLVED_VOID = Promise.resolve(); - -export class InMemoryDataSource implements DataSource { - private data: DatafileInput; - private cachedDatafile: Datafile | undefined; - - constructor({ - data, - projectId, - environment, - }: { data: Packed.Data; projectId: string; environment: string }) { - this.data = { - ...data, - projectId, - environment, - }; - } - - getDatafile(): Promise { - return Promise.resolve(this.getDatafileSync()); - } - - initialize(): Promise { - return RESOLVED_VOID; - } - - shutdown(): void {} - - read(): Promise { - return Promise.resolve(this.getDatafileSync()); - } - - private getDatafileSync(): Datafile { - if (!this.cachedDatafile) { - this.cachedDatafile = Object.assign(this.data, { - metrics: { - readMs: 0, - source: 'in-memory' as const, - cacheStatus: 'HIT' as const, - connectionState: 'connected' as const, - }, - }) satisfies Datafile; - } - return this.cachedDatafile; - } -} diff --git a/packages/vercel-flags-core/src/data-source/stream-connection.test.ts b/packages/vercel-flags-core/src/data-source/stream-connection.test.ts deleted file mode 100644 index 8c7f6fc2..00000000 --- a/packages/vercel-flags-core/src/data-source/stream-connection.test.ts +++ /dev/null @@ -1,611 +0,0 @@ -import { HttpResponse, http } from 'msw'; -import { setupServer } from 'msw/node'; -import { - afterAll, - afterEach, - beforeAll, - beforeEach, - describe, - expect, - it, - vi, -} from 'vitest'; -import { connectStream } from './stream-connection'; - -const HOST = 'https://flags.vercel.com'; - -const server = setupServer(); - -beforeAll(() => server.listen()); -beforeEach(() => { - vi.clearAllMocks(); -}); -afterEach(() => server.resetHandlers()); -afterAll(() => server.close()); - -function createNdjsonStream( - messages: object[], - options?: { delayMs?: number; keepOpen?: boolean }, -): ReadableStream { - const { delayMs = 0, keepOpen = false } = options ?? {}; - return new ReadableStream({ - async start(controller) { - for (const message of messages) { - if (delayMs > 0) await new Promise((r) => setTimeout(r, delayMs)); - controller.enqueue( - new TextEncoder().encode(`${JSON.stringify(message)}\n`), - ); - } - if (!keepOpen) { - controller.close(); - } - }, - }); -} - -describe('connectStream', () => { - describe('connection success', () => { - it('should resolve when first datafile message is received', async () => { - const definitions = { projectId: 'test', definitions: {} }; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse( - createNdjsonStream([{ type: 'datafile', data: definitions }]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - expect(onMessage).toHaveBeenCalledWith(definitions); - abortController.abort(); - }); - - it('should call onMessage callback with parsed data', async () => { - const definitions = { - projectId: 'test', - definitions: { flag: { variants: [true] } }, - }; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse( - createNdjsonStream([{ type: 'datafile', data: definitions }]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - expect(onMessage).toHaveBeenCalledTimes(1); - expect(onMessage).toHaveBeenCalledWith(definitions); - abortController.abort(); - }); - - it('should ignore ping messages', async () => { - const definitions = { projectId: 'test', definitions: {} }; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse( - createNdjsonStream([ - { type: 'ping' }, - { type: 'datafile', data: definitions }, - { type: 'ping' }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - expect(onMessage).toHaveBeenCalledTimes(1); - expect(onMessage).toHaveBeenCalledWith(definitions); - abortController.abort(); - }); - - it('should handle NDJSON messages split across chunks', async () => { - const definitions = { projectId: 'test', definitions: { flag: true } }; - const fullMessage = JSON.stringify({ - type: 'datafile', - data: definitions, - }); - const part1 = fullMessage.slice(0, 20); - const part2 = `${fullMessage.slice(20)}\n`; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse( - new ReadableStream({ - async start(controller) { - controller.enqueue(new TextEncoder().encode(part1)); - await new Promise((r) => setTimeout(r, 10)); - controller.enqueue(new TextEncoder().encode(part2)); - controller.close(); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - expect(onMessage).toHaveBeenCalledWith(definitions); - abortController.abort(); - }); - - it('should skip empty lines in stream', async () => { - const definitions = { projectId: 'test', definitions: {} }; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue(new TextEncoder().encode('\n\n')); - controller.enqueue( - new TextEncoder().encode( - JSON.stringify({ type: 'datafile', data: definitions }) + - '\n', - ), - ); - controller.enqueue(new TextEncoder().encode('\n')); - controller.close(); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - expect(onMessage).toHaveBeenCalledTimes(1); - abortController.abort(); - }); - }); - - describe('headers', () => { - it('should include Authorization header with Bearer token', async () => { - let capturedHeaders: Headers | null = null; - - server.use( - http.get(`${HOST}/v1/stream`, ({ request }) => { - capturedHeaders = request.headers; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - await connectStream( - { host: HOST, sdkKey: 'vf_my_key', abortController }, - { onMessage: vi.fn() }, - ); - - expect(capturedHeaders!.get('Authorization')).toBe('Bearer vf_my_key'); - abortController.abort(); - }); - - it('should include User-Agent header with version', async () => { - let capturedHeaders: Headers | null = null; - - server.use( - http.get(`${HOST}/v1/stream`, ({ request }) => { - capturedHeaders = request.headers; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn() }, - ); - - expect(capturedHeaders!.get('User-Agent')).toMatch(/^VercelFlagsCore\//); - abortController.abort(); - }); - - it('should include X-Retry-Attempt header starting at 0', async () => { - let capturedHeaders: Headers | null = null; - - server.use( - http.get(`${HOST}/v1/stream`, ({ request }) => { - capturedHeaders = request.headers; - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn() }, - ); - - expect(capturedHeaders!.get('X-Retry-Attempt')).toBe('0'); - abortController.abort(); - }); - }); - - describe('retry behavior', () => { - it('should increment X-Retry-Attempt on reconnect after stream closes', async () => { - const retryAttempts: string[] = []; - let requestCount = 0; - - server.use( - http.get(`${HOST}/v1/stream`, ({ request }) => { - retryAttempts.push(request.headers.get('X-Retry-Attempt') ?? ''); - requestCount++; - - // First request: send data then close - // Second request: send data and keep open - return new HttpResponse( - createNdjsonStream( - [ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ], - { keepOpen: requestCount >= 2 }, - ), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onDisconnect = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn(), onDisconnect }, - ); - - // Wait for reconnection attempt - await vi.waitFor( - () => { - expect(requestCount).toBeGreaterThanOrEqual(2); - }, - { timeout: 3000 }, - ); - - expect(retryAttempts[0]).toBe('0'); - expect(retryAttempts[1]).toBe('1'); - expect(onDisconnect).toHaveBeenCalled(); - - abortController.abort(); - }); - - it('should reset retryCount to 0 after receiving datafile', async () => { - const retryAttempts: string[] = []; - let requestCount = 0; - - server.use( - http.get(`${HOST}/v1/stream`, ({ request }) => { - retryAttempts.push(request.headers.get('X-Retry-Attempt') ?? ''); - requestCount++; - - // Close stream after each datafile to trigger reconnect - return new HttpResponse( - createNdjsonStream( - [ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ], - { keepOpen: requestCount >= 3 }, - ), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn() }, - ); - - // Wait for multiple reconnections - await vi.waitFor( - () => { - expect(requestCount).toBeGreaterThanOrEqual(3); - }, - { timeout: 5000 }, - ); - - // Each reconnect after successful datafile should reset to 0, then increment by 1 - // Request 1: retry=0, gets datafile, resets to 0, stream closes, increments to 1 - // Request 2: retry=1, gets datafile, resets to 0, stream closes, increments to 1 - // Request 3: retry=1, gets datafile, resets to 0 - expect(retryAttempts[0]).toBe('0'); - expect(retryAttempts[1]).toBe('1'); - expect(retryAttempts[2]).toBe('1'); - - abortController.abort(); - }); - - it('should call onDisconnect when stream ends normally', async () => { - let requestCount = 0; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - requestCount++; - return new HttpResponse( - createNdjsonStream( - [ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ], - { keepOpen: requestCount >= 2 }, - ), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onDisconnect = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn(), onDisconnect }, - ); - - await vi.waitFor(() => { - expect(onDisconnect).toHaveBeenCalled(); - }); - - abortController.abort(); - }); - }); - - describe('failure cases', () => { - // Note: 401 response behavior is tested through FlagNetworkDataSource - // which handles the timeout fallback. The stream-connection aborts on 401 - // but the promise resolution is handled by the timeout mechanism in - // FlagNetworkDataSource.getDataWithStreamTimeout(). - - it('should reject initPromise if error occurs before first datafile', async () => { - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse(null, { status: 500 }); - }), - ); - - const abortController = new AbortController(); - - await expect( - connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn() }, - ), - ).rejects.toThrow('stream was not ok: 500'); - - errorSpy.mockRestore(); - }); - - it('should reject if response has no body', async () => { - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - - server.use( - http.get(`${HOST}/v1/stream`, () => { - // Return a response without a body - return new HttpResponse(null, { - status: 200, - headers: { 'Content-Type': 'application/x-ndjson' }, - }); - }), - ); - - const abortController = new AbortController(); - - await expect( - connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn() }, - ), - ).rejects.toThrow('stream body was not present'); - - errorSpy.mockRestore(); - }); - - it('should call onDisconnect on error after initial data received', async () => { - const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); - let requestCount = 0; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - requestCount++; - if (requestCount === 1) { - // First request succeeds - return new HttpResponse( - createNdjsonStream([ - { - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - } - // Subsequent requests fail - return new HttpResponse(null, { status: 500 }); - }), - ); - - const abortController = new AbortController(); - const onDisconnect = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage: vi.fn(), onDisconnect }, - ); - - // Wait for disconnect to be called (from first stream close and error) - await vi.waitFor( - () => { - expect(onDisconnect).toHaveBeenCalled(); - }, - { timeout: 3000 }, - ); - - abortController.abort(); - errorSpy.mockRestore(); - }); - - // Note: Testing MAX_RETRY_COUNT exceeded is skipped because the backoff delays - // make the test too slow. The behavior is: - // - After 10 retries without receiving data, the connection aborts - // - console.error('@vercel/flags-core: Max retry count exceeded') is logged - // This is tested indirectly through FlagNetworkDataSource integration tests. - - it('should stop when abortController is aborted externally', async () => { - server.use( - http.get(`${HOST}/v1/stream`, ({ request }) => { - return new HttpResponse( - new ReadableStream({ - start(controller) { - controller.enqueue( - new TextEncoder().encode( - `${JSON.stringify({ - type: 'datafile', - data: { projectId: 'test', definitions: {} }, - })}\n`, - ), - ); - // Keep stream open - request.signal.addEventListener('abort', () => { - controller.close(); - }); - }, - }), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - await connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - expect(onMessage).toHaveBeenCalledTimes(1); - - // Abort externally - abortController.abort(); - - // Should stop without errors - expect(abortController.signal.aborted).toBe(true); - }); - }); - - describe('multiple datafile messages', () => { - it('should call onMessage for each datafile but only resolve once', async () => { - const data1 = { projectId: 'test', definitions: { v: 1 } }; - const data2 = { projectId: 'test', definitions: { v: 2 } }; - - server.use( - http.get(`${HOST}/v1/stream`, () => { - return new HttpResponse( - createNdjsonStream([ - { type: 'datafile', data: data1 }, - { type: 'datafile', data: data2 }, - ]), - { headers: { 'Content-Type': 'application/x-ndjson' } }, - ); - }), - ); - - const abortController = new AbortController(); - const onMessage = vi.fn(); - - const promise = connectStream( - { host: HOST, sdkKey: 'vf_test', abortController }, - { onMessage }, - ); - - // Should resolve (not hang waiting for more data) - await promise; - - // Wait for all messages to be processed - await vi.waitFor(() => { - expect(onMessage).toHaveBeenCalledTimes(2); - }); - - expect(onMessage).toHaveBeenNthCalledWith(1, data1); - expect(onMessage).toHaveBeenNthCalledWith(2, data2); - - abortController.abort(); - }); - }); -}); diff --git a/packages/vercel-flags-core/src/data-source/stream-connection.ts b/packages/vercel-flags-core/src/data-source/stream-connection.ts deleted file mode 100644 index f1bf8ee8..00000000 --- a/packages/vercel-flags-core/src/data-source/stream-connection.ts +++ /dev/null @@ -1,146 +0,0 @@ -import { version } from '../../package.json'; -import type { BundledDefinitions } from '../types'; -import { sleep } from '../utils/sleep'; - -export type StreamMessage = - | { type: 'datafile'; data: BundledDefinitions } - | { type: 'ping' }; - -const MAX_RETRY_COUNT = 15; -const BASE_DELAY_MS = 1000; -const MAX_DELAY_MS = 60_000; - -function backoff(retryCount: number): number { - if (retryCount === 1) return 0; - const delay = Math.min(BASE_DELAY_MS * 2 ** (retryCount - 2), MAX_DELAY_MS); - return delay + Math.random() * 1000; -} - -export type StreamCallbacks = { - onMessage: (data: BundledDefinitions) => void; - onDisconnect?: () => void; -}; - -export type StreamConfig = { - host: string; - sdkKey: string; - abortController: AbortController; - fetch?: typeof globalThis.fetch; -}; - -/** - * Connects to the flags stream endpoint and handles reconnection with backoff. - * Resolves when the first datafile message is received. - * Rejects if the connection fails before receiving any data. - */ -export async function connectStream( - config: StreamConfig, - callbacks: StreamCallbacks, -): Promise { - const { - host, - sdkKey, - abortController, - fetch: fetchFn = globalThis.fetch, - } = config; - const { onMessage, onDisconnect } = callbacks; - let retryCount = 0; - - let resolveInit: () => void; - let rejectInit: (error: unknown) => void; - const initPromise = new Promise((resolve, reject) => { - resolveInit = resolve; - rejectInit = reject; - }); - - void (async () => { - let initialDataReceived = false; - - while (!abortController.signal.aborted) { - if (retryCount > MAX_RETRY_COUNT) { - console.error('@vercel/flags-core: Max retry count exceeded'); - abortController.abort(); - break; - } - - try { - const response = await fetchFn(`${host}/v1/stream`, { - headers: { - Authorization: `Bearer ${sdkKey}`, - 'User-Agent': `VercelFlagsCore/${version}`, - 'X-Retry-Attempt': String(retryCount), - }, - signal: abortController.signal, - }); - - if (!response.ok) { - if (response.status === 401) { - abortController.abort(); - } - - throw new Error(`stream was not ok: ${response.status}`); - } - - if (!response.body) { - throw new Error('stream body was not present'); - } - - const decoder = new TextDecoder(); - let buffer = ''; - - for await (const chunk of response.body) { - if (abortController.signal.aborted) break; - - buffer += decoder.decode(chunk, { stream: true }); - const lines = buffer.split('\n'); - buffer = lines.pop()!; - - for (const line of lines) { - if (line === '') continue; - - let message: StreamMessage; - try { - message = JSON.parse(line) as StreamMessage; - } catch { - console.warn( - '@vercel/flags-core: Failed to parse stream message, skipping', - ); - continue; - } - - if (message.type === 'datafile') { - onMessage(message.data); - retryCount = 0; - if (!initialDataReceived) { - initialDataReceived = true; - resolveInit!(); - } - } - } - } - - // Stream ended normally (server closed connection) - reconnect - if (!abortController.signal.aborted) { - onDisconnect?.(); - retryCount++; - await sleep(backoff(retryCount)); - continue; - } - } catch (error) { - if (abortController.signal.aborted) { - break; - } - console.error('@vercel/flags-core: Stream error', error); - onDisconnect?.(); - if (!initialDataReceived) { - rejectInit!(error); - break; - } - retryCount++; - await sleep(backoff(retryCount)); - } - } - })(); - - return initPromise; -} diff --git a/packages/vercel-flags-core/src/evaluate.test.ts b/packages/vercel-flags-core/src/evaluate.test.ts index 31cfd632..d30677ff 100644 --- a/packages/vercel-flags-core/src/evaluate.test.ts +++ b/packages/vercel-flags-core/src/evaluate.test.ts @@ -1340,6 +1340,113 @@ describe('evaluate', () => { }); }); + describe('regex input length limit', () => { + it('should return false for REGEX when input exceeds MAX_REGEX_INPUT_LENGTH', () => { + const longString = 'a'.repeat(10_001); + expect( + evaluate({ + definition: { + seed: undefined, + environments: { + production: { + rules: [ + { + conditions: [ + [ + ['user', 'id'], + Comparator.REGEX, + { type: 'regex', pattern: 'a+', flags: '' }, + ], + ], + outcome: 1, + }, + ], + fallthrough: 0, + }, + }, + variants: [false, true], + } satisfies Packed.FlagDefinition, + environment: 'production', + entities: { user: { id: longString } }, + }), + ).toEqual({ + value: false, + reason: ResolutionReason.FALLTHROUGH, + outcomeType: OutcomeType.VALUE, + }); + }); + + it('should return false for NOT_REGEX when input exceeds MAX_REGEX_INPUT_LENGTH', () => { + const longString = 'a'.repeat(10_001); + expect( + evaluate({ + definition: { + seed: undefined, + environments: { + production: { + rules: [ + { + conditions: [ + [ + ['user', 'id'], + Comparator.NOT_REGEX, + { type: 'regex', pattern: 'b+', flags: '' }, + ], + ], + outcome: 1, + }, + ], + fallthrough: 0, + }, + }, + variants: [false, true], + } satisfies Packed.FlagDefinition, + environment: 'production', + entities: { user: { id: longString } }, + }), + ).toEqual({ + value: false, + reason: ResolutionReason.FALLTHROUGH, + outcomeType: OutcomeType.VALUE, + }); + }); + + it('should still match REGEX when input is within limit', () => { + const okString = 'a'.repeat(10_000); + expect( + evaluate({ + definition: { + seed: undefined, + environments: { + production: { + rules: [ + { + conditions: [ + [ + ['user', 'id'], + Comparator.REGEX, + { type: 'regex', pattern: 'a+', flags: '' }, + ], + ], + outcome: 1, + }, + ], + fallthrough: 0, + }, + }, + variants: [false, true], + } satisfies Packed.FlagDefinition, + environment: 'production', + entities: { user: { id: okString } }, + }), + ).toEqual({ + value: true, + reason: ResolutionReason.RULE_MATCH, + outcomeType: OutcomeType.VALUE, + }); + }); + }); + describe('splits', () => { it.each<{ name: string; diff --git a/packages/vercel-flags-core/src/evaluate.ts b/packages/vercel-flags-core/src/evaluate.ts index 6afa951f..91799f70 100644 --- a/packages/vercel-flags-core/src/evaluate.ts +++ b/packages/vercel-flags-core/src/evaluate.ts @@ -7,10 +7,15 @@ import { Packed, ResolutionReason, } from './types'; -import { exhaustivenessCheck } from './utils'; type PathArray = (string | number)[]; +const MAX_REGEX_INPUT_LENGTH = 10_000; + +function exhaustivenessCheck(_: never): never { + throw new Error('Exhaustiveness check failed'); +} + function getProperty(obj: any, pathArray: PathArray): any { return pathArray.reduce((acc: any, key: string | number) => { if (acc && key in acc) { @@ -54,10 +59,12 @@ function matchTargetList( targets: Packed.TargetList, params: EvaluationParams, ): boolean { - for (const [kind, attributes] of Object.entries(targets)) { - for (const [attribute, values] of Object.entries(attributes)) { + for (const kind in targets) { + const attributes = targets[kind]!; + for (const attribute in attributes) { const entity = access([kind, attribute], params); - if (isString(entity) && values.includes(entity)) return true; + if (isString(entity) && attributes[attribute]!.includes(entity)) + return true; } } return false; @@ -211,6 +218,7 @@ function matchConditions( case Comparator.REGEX: if ( isString(lhs) && + lhs.length <= MAX_REGEX_INPUT_LENGTH && typeof rhs === 'object' && !Array.isArray(rhs) && rhs?.type === 'regex' @@ -222,6 +230,7 @@ function matchConditions( case Comparator.NOT_REGEX: if ( isString(lhs) && + lhs.length <= MAX_REGEX_INPUT_LENGTH && typeof rhs === 'object' && !Array.isArray(rhs) && rhs?.type === 'regex' @@ -366,6 +375,8 @@ export function evaluate( * The params used for the evaluation */ params: EvaluationParams, + /** Tracks visited environments to detect circular reuse. */ + _visited?: Set, ): EvaluationResult { const envConfig = params.definition.environments[params.environment]; @@ -395,7 +406,17 @@ export function evaluate( ); } - return evaluate({ ...params, environment: envConfig.reuse }); + const visited = _visited ?? new Set(); + if (visited.has(envConfig.reuse)) { + return { + reason: ResolutionReason.ERROR, + errorMessage: `Circular environment reuse detected: "${envConfig.reuse}"`, + value: params.defaultValue, + }; + } + visited.add(params.environment); + + return evaluate({ ...params, environment: envConfig.reuse }, visited); } if (envConfig.targets) { diff --git a/packages/vercel-flags-core/src/index.common.ts b/packages/vercel-flags-core/src/index.common.ts index ed10c6ba..ffcebe51 100644 --- a/packages/vercel-flags-core/src/index.common.ts +++ b/packages/vercel-flags-core/src/index.common.ts @@ -1,7 +1,11 @@ export { - FlagNetworkDataSource, - type FlagNetworkDataSourceOptions, -} from './data-source/flag-network-data-source'; + Controller, + /** @deprecated Use `Controller` instead */ + Controller as FlagNetworkDataSource, + type ControllerOptions, + /** @deprecated Use `ControllerOptions` instead */ + type ControllerOptions as FlagNetworkDataSourceOptions, +} from './controller'; export { FallbackEntryNotFoundError, FallbackNotFoundError, diff --git a/packages/vercel-flags-core/src/index.default.ts b/packages/vercel-flags-core/src/index.default.ts index fe7f0bac..00e5796f 100644 --- a/packages/vercel-flags-core/src/index.default.ts +++ b/packages/vercel-flags-core/src/index.default.ts @@ -10,7 +10,7 @@ * We do not need to repeat the JSDoc on the next-js export. */ -import * as fns from './client-fns'; +import * as fns from './controller-fns'; import { createCreateRawClient } from './create-raw-client'; import { make } from './index.make'; diff --git a/packages/vercel-flags-core/src/index.make.test.ts b/packages/vercel-flags-core/src/index.make.test.ts index 135f612c..abe294cd 100644 --- a/packages/vercel-flags-core/src/index.make.test.ts +++ b/packages/vercel-flags-core/src/index.make.test.ts @@ -2,9 +2,9 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import type { createCreateRawClient } from './create-raw-client'; import { make } from './index.make'; -// Mock the FlagNetworkDataSource to avoid real network calls -vi.mock('./data-source/flag-network-data-source', () => ({ - FlagNetworkDataSource: vi.fn().mockImplementation(({ sdkKey }) => ({ +// Mock the Controller to avoid real network calls +vi.mock('./controller', () => ({ + Controller: vi.fn().mockImplementation(({ sdkKey }) => ({ sdkKey, read: vi.fn().mockResolvedValue({ projectId: 'test', @@ -17,7 +17,7 @@ vi.mock('./data-source/flag-network-data-source', () => ({ })), })); -import { FlagNetworkDataSource } from './data-source/flag-network-data-source'; +import { Controller } from './controller'; function createMockCreateRawClient(): ReturnType { return vi.fn().mockImplementation(({ dataSource }) => ({ @@ -62,7 +62,7 @@ describe('make', () => { const client = createClient('vf_test_key'); - expect(FlagNetworkDataSource).toHaveBeenCalledWith({ + expect(Controller).toHaveBeenCalledWith({ sdkKey: 'vf_test_key', }); expect(createRawClient).toHaveBeenCalled(); @@ -77,7 +77,7 @@ describe('make', () => { 'flags:edgeConfigId=ecfg_123&edgeConfigToken=token&sdkKey=vf_conn_key'; const client = createClient(connectionString); - expect(FlagNetworkDataSource).toHaveBeenCalledWith({ + expect(Controller).toHaveBeenCalledWith({ sdkKey: 'vf_conn_key', }); expect(client).toBeDefined(); @@ -87,7 +87,9 @@ describe('make', () => { const createRawClient = createMockCreateRawClient(); const { createClient } = make(createRawClient); - expect(() => createClient('')).toThrow('flags: Missing sdkKey'); + expect(() => createClient('')).toThrow( + '@vercel/flags-core: Missing sdkKey', + ); }); it('should throw for invalid connection string', () => { @@ -95,7 +97,7 @@ describe('make', () => { const { createClient } = make(createRawClient); expect(() => createClient('invalid_string')).toThrow( - 'flags: Missing sdkKey', + '@vercel/flags-core: Missing sdkKey', ); }); @@ -105,7 +107,7 @@ describe('make', () => { expect(() => createClient('flags:edgeConfigId=ecfg_123&edgeConfigToken=token'), - ).toThrow('flags: Missing sdkKey'); + ).toThrow('@vercel/flags-core: Missing sdkKey'); }); }); @@ -142,7 +144,9 @@ describe('make', () => { const { flagsClient } = make(createRawClient); - expect(() => flagsClient.evaluate).toThrow('flags: Missing sdkKey'); + expect(() => flagsClient.evaluate).toThrow( + '@vercel/flags-core: Missing sdkKey', + ); }); it('should cache the client after first access', () => { @@ -167,7 +171,7 @@ describe('make', () => { const { flagsClient } = make(createRawClient); const _ = flagsClient.evaluate; - expect(FlagNetworkDataSource).toHaveBeenCalledWith({ + expect(Controller).toHaveBeenCalledWith({ sdkKey: 'vf_env_key', }); }); @@ -180,7 +184,7 @@ describe('make', () => { const { flagsClient } = make(createRawClient); const _ = flagsClient.evaluate; - expect(FlagNetworkDataSource).toHaveBeenCalledWith({ + expect(Controller).toHaveBeenCalledWith({ sdkKey: 'vf_flags_key', }); }); @@ -213,7 +217,7 @@ describe('make', () => { // Access with first key const _ = flagsClient.evaluate; - expect(FlagNetworkDataSource).toHaveBeenCalledWith({ + expect(Controller).toHaveBeenCalledWith({ sdkKey: 'vf_first_key', }); @@ -223,7 +227,7 @@ describe('make', () => { // Access again with new key const __ = flagsClient.initialize; - expect(FlagNetworkDataSource).toHaveBeenCalledWith({ + expect(Controller).toHaveBeenCalledWith({ sdkKey: 'vf_second_key', }); }); diff --git a/packages/vercel-flags-core/src/index.make.ts b/packages/vercel-flags-core/src/index.make.ts index 422b984e..e1af5d93 100644 --- a/packages/vercel-flags-core/src/index.make.ts +++ b/packages/vercel-flags-core/src/index.make.ts @@ -2,18 +2,15 @@ * Factory functions for exports of index.default.ts and index.next-js.ts */ +import { Controller, type ControllerOptions } from './controller'; import type { createCreateRawClient } from './create-raw-client'; -import { - FlagNetworkDataSource, - type FlagNetworkDataSourceOptions, -} from './data-source/flag-network-data-source'; import type { FlagsClient } from './types'; import { parseSdkKeyFromFlagsConnectionString } from './utils/sdk-keys'; /** * Options for createClient */ -export type CreateClientOptions = Omit; +export type CreateClientOptions = Omit; export function make( createRawClient: ReturnType, @@ -34,20 +31,28 @@ export function make( sdkKeyOrConnectionString: string, options?: CreateClientOptions, ): FlagsClient { - if (!sdkKeyOrConnectionString) throw new Error('flags: Missing sdkKey'); + if (!sdkKeyOrConnectionString) + throw new Error('@vercel/flags-core: Missing sdkKey'); + + if (typeof sdkKeyOrConnectionString !== 'string') + throw new Error( + `@vercel/flags-core: Invalid sdkKey. Expected string, got ${typeof sdkKeyOrConnectionString}`, + ); // Parse connection string if needed (e.g., "flags:edgeConfigId=...&sdkKey=vf_xxx") const sdkKey = parseSdkKeyFromFlagsConnectionString( sdkKeyOrConnectionString, ); if (!sdkKey) { - throw new Error('flags: Missing sdkKey in connection string'); + throw new Error( + '@vercel/flags-core: Missing sdkKey in connection string', + ); } // sdk key contains the environment - const dataSource = new FlagNetworkDataSource({ sdkKey, ...options }); + const controller = new Controller({ sdkKey, ...options }); return createRawClient({ - dataSource, + controller, origin: { provider: 'vercel', sdkKey }, }); } @@ -65,7 +70,7 @@ export function make( const sdkKey = parseSdkKeyFromFlagsConnectionString(process.env.FLAGS); if (!sdkKey) { - throw new Error('flags: Missing sdkKey'); + throw new Error('@vercel/flags-core: Missing sdkKey'); } _defaultFlagsClient = createClient(sdkKey); } diff --git a/packages/vercel-flags-core/src/index.next-js.ts b/packages/vercel-flags-core/src/index.next-js.ts index 1e72da38..7cbb9127 100644 --- a/packages/vercel-flags-core/src/index.next-js.ts +++ b/packages/vercel-flags-core/src/index.next-js.ts @@ -11,7 +11,7 @@ */ import { cacheLife } from 'next/cache'; -import * as fns from './client-fns'; +import * as fns from './controller-fns'; import { createCreateRawClient } from './create-raw-client'; import { make } from './index.make'; @@ -31,7 +31,7 @@ function setCacheLife(): void { } } -const cachedFns: typeof fns = { +const cachedFns: Parameters[0] = { initialize: async (...args) => { 'use cache'; setCacheLife(); diff --git a/packages/vercel-flags-core/src/integration.test.ts b/packages/vercel-flags-core/src/integration.test.ts index ae63e941..5ac8e88b 100644 --- a/packages/vercel-flags-core/src/integration.test.ts +++ b/packages/vercel-flags-core/src/integration.test.ts @@ -48,7 +48,7 @@ describe('integration evaluate', () => { expect(result.reason).toBe(ResolutionReason.ERROR); expect(result.errorCode).toBe('FLAG_NOT_FOUND'); expect(result.errorMessage).toBe( - 'Definition not found for flag "does-not-exist"', + '@vercel/flags-core: Definition not found for flag "does-not-exist"', ); expect(result.metrics).toBeDefined(); }); diff --git a/packages/vercel-flags-core/src/lib/report-value.ts b/packages/vercel-flags-core/src/lib/report-value.ts index 94f5990a..43994038 100644 --- a/packages/vercel-flags-core/src/lib/report-value.ts +++ b/packages/vercel-flags-core/src/lib/report-value.ts @@ -2,7 +2,7 @@ import { version } from '../../package.json'; import type { OutcomeType, ResolutionReason } from '../types'; /** - * Only used interally for now. + * Only used internally for now. */ export function internalReportValue( key: string, diff --git a/packages/vercel-flags-core/src/openfeature.test.ts b/packages/vercel-flags-core/src/openfeature.test.ts index 2f5a7f10..20fd020b 100644 --- a/packages/vercel-flags-core/src/openfeature.test.ts +++ b/packages/vercel-flags-core/src/openfeature.test.ts @@ -1,22 +1,46 @@ import { StandardResolutionReasons } from '@openfeature/server-sdk'; import { describe, expect, it } from 'vitest'; -import * as fns from './client-fns'; +import * as fns from './controller-fns'; import { createCreateRawClient } from './create-raw-client'; -import { InMemoryDataSource } from './data-source/in-memory-data-source'; import { VercelProvider } from './openfeature.default'; -import type { Packed } from './types'; +import type { ControllerInterface, Datafile, Packed } from './types'; + +function createStaticController(opts: { + data: Packed.Data; + projectId: string; + environment: string; +}): ControllerInterface { + const datafile: Datafile = { + ...opts.data, + projectId: opts.projectId, + environment: opts.environment, + metrics: { + readMs: 0, + source: 'in-memory', + cacheStatus: 'HIT', + connectionState: 'connected', + mode: 'streaming', + }, + }; + return { + initialize: () => Promise.resolve(), + read: () => Promise.resolve(datafile), + getDatafile: () => Promise.resolve(datafile), + shutdown: () => {}, + }; +} const createRawClient = createCreateRawClient(fns); describe('VercelProvider', () => { describe('constructor', () => { it('should accept a FlagsClient', () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); expect(provider.metadata.name).toBe('vercel-nodejs-provider'); @@ -35,7 +59,7 @@ describe('VercelProvider', () => { describe('resolveBooleanEvaluation', () => { it('should resolve a boolean flag', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: { 'boolean-flag': { @@ -48,7 +72,7 @@ describe('VercelProvider', () => { projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveBooleanEvaluation( @@ -62,12 +86,12 @@ describe('VercelProvider', () => { }); it('should return default value when flag is not found', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveBooleanEvaluation( @@ -82,7 +106,7 @@ describe('VercelProvider', () => { }); it('should use fallthrough outcome for active flags', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: { 'active-flag': { @@ -99,7 +123,7 @@ describe('VercelProvider', () => { projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveBooleanEvaluation( @@ -115,7 +139,7 @@ describe('VercelProvider', () => { describe('resolveStringEvaluation', () => { it('should resolve a string flag', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: { 'string-flag': { @@ -128,7 +152,7 @@ describe('VercelProvider', () => { projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveStringEvaluation( @@ -142,12 +166,12 @@ describe('VercelProvider', () => { }); it('should return default value when flag is not found', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveStringEvaluation( @@ -164,7 +188,7 @@ describe('VercelProvider', () => { describe('resolveNumberEvaluation', () => { it('should resolve a number flag', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: { 'number-flag': { @@ -177,7 +201,7 @@ describe('VercelProvider', () => { projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveNumberEvaluation( @@ -191,12 +215,12 @@ describe('VercelProvider', () => { }); it('should return default value when flag is not found', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveNumberEvaluation( @@ -213,7 +237,7 @@ describe('VercelProvider', () => { describe('resolveObjectEvaluation', () => { it('should resolve an object flag', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: { 'object-flag': { @@ -226,7 +250,7 @@ describe('VercelProvider', () => { projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveObjectEvaluation( @@ -240,12 +264,12 @@ describe('VercelProvider', () => { }); it('should return default value when flag is not found', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveObjectEvaluation( @@ -262,12 +286,12 @@ describe('VercelProvider', () => { describe('initialize', () => { it('should initialize without errors', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); await expect(provider.initialize()).resolves.toBeUndefined(); @@ -276,12 +300,12 @@ describe('VercelProvider', () => { describe('onClose', () => { it('should close without errors', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: {}, segments: {} }, projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); await expect(provider.onClose()).resolves.toBeUndefined(); @@ -290,7 +314,7 @@ describe('VercelProvider', () => { describe('context passing', () => { it('should pass evaluation context to the client', async () => { - const dataSource = new InMemoryDataSource({ + const controller = createStaticController({ data: { definitions: { 'context-flag': { @@ -308,7 +332,7 @@ describe('VercelProvider', () => { projectId: 'test', environment: 'production', }); - const client = createRawClient({ dataSource }); + const client = createRawClient({ controller }); const provider = new VercelProvider(client); const result = await provider.resolveStringEvaluation( diff --git a/packages/vercel-flags-core/src/test-utils.ts b/packages/vercel-flags-core/src/test-utils.ts new file mode 100644 index 00000000..59dd8d30 --- /dev/null +++ b/packages/vercel-flags-core/src/test-utils.ts @@ -0,0 +1,15 @@ +const SYMBOL_FOR_REQ_CONTEXT = Symbol.for('@vercel/request-context'); + +/** + * Installs a fake Vercel request context on `globalThis`. + * Returns a cleanup function that removes it. + */ +export function setRequestContext(headers: Record): () => void { + const mockContext = { headers }; + (globalThis as any)[SYMBOL_FOR_REQ_CONTEXT] = { + get: () => mockContext, + }; + return () => { + delete (globalThis as any)[SYMBOL_FOR_REQ_CONTEXT]; + }; +} diff --git a/packages/vercel-flags-core/src/types.ts b/packages/vercel-flags-core/src/types.ts index 902d201e..c3fc9220 100644 --- a/packages/vercel-flags-core/src/types.ts +++ b/packages/vercel-flags-core/src/types.ts @@ -1,3 +1,5 @@ +import type { ControllerInstance } from './controller-fns'; + /** * Options for stream connection behavior */ @@ -30,6 +32,8 @@ export type DatafileInput = Packed.Data & { * Some older responses might return a string instead of a number. Both will be timestamps. */ configUpdatedAt?: number | string; + /** Version number of the data */ + revision?: number; }; /** Datafile with metrics attached (returned by the client) */ @@ -44,7 +48,7 @@ export type BundledDefinitions = DatafileInput & { configUpdatedAt: number; /** hash of the data */ digest: string; - /** version number of the dat */ + /** version number of the data */ revision: number; }; @@ -65,6 +69,8 @@ export type Metrics = { cacheStatus: 'HIT' | 'MISS' | 'STALE'; /** Whether the stream is currently connected */ connectionState: 'connected' | 'disconnected'; + /** The current operating mode of the client */ + mode: 'streaming' | 'polling' | 'build' | 'offline'; /** Time in ms for the pure flag evaluation logic (only present on EvaluationResult) */ evaluationMs?: number; }; @@ -72,7 +78,7 @@ export type Metrics = { /** * DataSource interface for the Vercel Flags client */ -export interface DataSource { +export interface ControllerInterface { /** * Initialize the data source by fetching the initial file or setting up polling or * subscriptions. @@ -273,7 +279,7 @@ export enum OutcomeType { * - ends with (endsWith) * - does not end with (!endsWith) * - exists (ex) - * - deos not exist (!ex) + * - does not exist (!ex) * - is greater than (gt) * - is greater than or equal to (gte) * - is lower than (lt) diff --git a/packages/vercel-flags-core/src/utils.ts b/packages/vercel-flags-core/src/utils.ts deleted file mode 100644 index 7db62cec..00000000 --- a/packages/vercel-flags-core/src/utils.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * This function is used to check for exhaustiveness in switch statements. - * - * @param _ - The value to check. - * - * @example - * Given `type Union = 'a' | 'b' | 'c'`, the following code will not compile: - * ```ts - * switch (union) { - * case 'a': - * return 'a'; - * case 'b': - * return 'b'; - * default: - * exhaustivenessCheck(union); // This will throw an error - * } - * ``` - * This is because `value` has been narrowed to `'c'` by the `default` arm, - * which is not assignable to `never`. If we covered the `'c'` case, the type - * would narrow to `never`, which is assignable to `never` and would not cause an error. - */ -export function exhaustivenessCheck(_: never): never { - throw new Error('Exhaustiveness check failed'); -} diff --git a/packages/vercel-flags-core/src/utils/read-bundled-definitions.test.ts b/packages/vercel-flags-core/src/utils/read-bundled-definitions.test.ts index cc7cf0f3..aa58c050 100644 --- a/packages/vercel-flags-core/src/utils/read-bundled-definitions.test.ts +++ b/packages/vercel-flags-core/src/utils/read-bundled-definitions.test.ts @@ -1,7 +1,7 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; // The readBundledDefinitions function uses dynamic import which is hard to mock. -// Instead, we test the behavior indirectly through the FlagNetworkDataSource +// Instead, we test the behavior indirectly through the Controller // which already mocks readBundledDefinitions. // Here we just test the function interface and basic behavior. @@ -49,7 +49,7 @@ describe('readBundledDefinitions', () => { }); // The detailed behavior of readBundledDefinitions is tested indirectly - // through FlagNetworkDataSource tests which mock readBundledDefinitions. + // through Controller tests which mock readBundledDefinitions. // Those tests cover: // - 'ok' state with bundled definitions // - 'missing-file' state diff --git a/packages/vercel-flags-core/src/utils/usage-tracker.test.ts b/packages/vercel-flags-core/src/utils/usage-tracker.test.ts index f9fa032d..524ffc0b 100644 --- a/packages/vercel-flags-core/src/utils/usage-tracker.test.ts +++ b/packages/vercel-flags-core/src/utils/usage-tracker.test.ts @@ -1,14 +1,5 @@ -import { HttpResponse, http } from 'msw'; -import { setupServer } from 'msw/node'; -import { - afterAll, - afterEach, - beforeAll, - describe, - expect, - it, - vi, -} from 'vitest'; +import { afterEach, describe, expect, it, vi } from 'vitest'; +import { setRequestContext } from '../test-utils'; import { type FlagsConfigReadEvent, UsageTracker } from './usage-tracker'; // Mock @vercel/functions @@ -16,56 +7,69 @@ vi.mock('@vercel/functions', () => ({ waitUntil: vi.fn(), })); -const server = setupServer(); +const fetchMock = vi.fn(); + +function jsonResponse( + body: unknown, + init?: { status?: number; headers?: Record }, +): Promise { + return Promise.resolve( + new Response(JSON.stringify(body), { + status: init?.status ?? 200, + headers: { + 'Content-Type': 'application/json', + ...init?.headers, + }, + }), + ); +} -beforeAll(() => server.listen()); afterEach(() => { - server.resetHandlers(); + fetchMock.mockReset(); vi.restoreAllMocks(); // Clean up environment variables delete process.env.VERCEL_DEPLOYMENT_ID; delete process.env.VERCEL_REGION; delete process.env.DEBUG; }); -afterAll(() => server.close()); + +function createTracker(sdkKey = 'test-key') { + return new UsageTracker({ + sdkKey, + host: 'https://example.com', + fetch: fetchMock, + }); +} + +function getBody(callIndex = 0): unknown { + const [, init] = fetchMock.mock.calls[callIndex]!; + return JSON.parse(init!.body as string); +} + +function getHeaders(callIndex = 0): Record { + const [, init] = fetchMock.mock.calls[callIndex]!; + return init!.headers as Record; +} describe('UsageTracker', () => { describe('constructor', () => { it('should create an instance with sdkKey and host', () => { - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); - + const tracker = createTracker(); expect(tracker).toBeInstanceOf(UsageTracker); }); }); describe('trackRead', () => { it('should batch events and send them after flush', async () => { - const receivedEvents: unknown[] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = await request.json(); - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); - - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); + await tracker.flush(); - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + expect(fetchMock).toHaveBeenCalledTimes(1); + const events = getBody() as FlagsConfigReadEvent[]; expect(events).toHaveLength(1); const event = events[0] as FlagsConfigReadEvent; expect(event.type).toBe('FLAGS_CONFIG_READ'); @@ -76,154 +80,80 @@ describe('UsageTracker', () => { process.env.VERCEL_DEPLOYMENT_ID = 'dpl_123'; process.env.VERCEL_REGION = 'iad1'; - const receivedEvents: unknown[] = []; + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = await request.json(); - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); - - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); - - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); + await tracker.flush(); - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.deploymentId).toBe('dpl_123'); expect(event.payload.region).toBe('iad1'); }); it('should batch multiple events', async () => { - const receivedEvents: unknown[] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = await request.json(); - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); // Track multiple reads (without request context, so they won't be deduplicated) tracker.trackRead(); tracker.trackRead(); tracker.trackRead(); - tracker.flush(); - - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); + await tracker.flush(); - const events = receivedEvents[0] as Array<{ type: string }>; + const events = getBody() as Array<{ type: string }>; expect(events).toHaveLength(3); }); it('should send correct authorization header', async () => { - let authHeader: string | null = null; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - authHeader = request.headers.get('Authorization'); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); const tracker = new UsageTracker({ sdkKey: 'my-secret-key', host: 'https://example.com', + fetch: fetchMock, }); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(authHeader).toBe('Bearer my-secret-key'); - }); + expect(getHeaders().Authorization).toBe('Bearer my-secret-key'); }); it('should send correct content-type header', async () => { - let contentType: string | null = null; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - contentType = request.headers.get('Content-Type'); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(contentType).toBe('application/json'); - }); + expect(getHeaders()['Content-Type']).toBe('application/json'); }); it('should send user-agent header', async () => { - let userAgent: string | null = null; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - userAgent = request.headers.get('User-Agent'); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(userAgent).toMatch(/^VercelFlagsCore\//); - }); + expect(getHeaders()['User-Agent']).toMatch(/^VercelFlagsCore\//); }); it('should not send empty batches', async () => { - let requestCount = 0; - - server.use( - http.post('https://example.com/v1/ingest', async () => { - requestCount++; - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); // Flush without tracking anything - tracker.flush(); + await tracker.flush(); - // Wait a bit to ensure no request is made - await new Promise((r) => setTimeout(r, 100)); - expect(requestCount).toBe(0); + expect(fetchMock).not.toHaveBeenCalled(); }); it('should handle fetch errors gracefully', async () => { @@ -231,22 +161,12 @@ describe('UsageTracker', () => { .spyOn(console, 'error') .mockImplementation(() => {}); - server.use( - http.post('https://example.com/v1/ingest', () => { - return HttpResponse.error(); - }), - ); + fetchMock.mockRejectedValue(new TypeError('Failed to fetch')); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); - - // Wait for the flush to complete - await new Promise((r) => setTimeout(r, 100)); + await tracker.flush(); // Should not throw and should not log error (only logs in debug mode) expect(consoleSpy).not.toHaveBeenCalled(); @@ -255,22 +175,12 @@ describe('UsageTracker', () => { it('should handle non-ok responses gracefully', async () => { const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - server.use( - http.post('https://example.com/v1/ingest', () => { - return new HttpResponse(null, { status: 500 }); - }), - ); + fetchMock.mockResolvedValue(new Response(null, { status: 500 })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); - - // Wait for the flush to complete - await new Promise((r) => setTimeout(r, 100)); + await tracker.flush(); // Should not log in non-debug mode expect(consoleSpy).not.toHaveBeenCalled(); @@ -284,26 +194,21 @@ describe('UsageTracker', () => { ); const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - server.use( - http.post('https://example.com/v1/ingest', () => { - return new HttpResponse(null, { status: 500 }); - }), - ); + fetchMock.mockResolvedValue(new Response(null, { status: 500 })); const tracker = new FreshUsageTracker({ sdkKey: 'test-key', host: 'https://example.com', + fetch: fetchMock, }); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(consoleSpy).toHaveBeenCalledWith( - '@vercel/flags-core: Failed to send events:', - expect.any(String), - ); - }); + expect(consoleSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Failed to send events:', + expect.any(String), + ); }); it('should send x-vercel-debug-ingest header in debug mode', async () => { @@ -312,49 +217,41 @@ describe('UsageTracker', () => { const { UsageTracker: FreshUsageTracker } = await import( './usage-tracker' ); - let debugHeader: string | null = null; + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - debugHeader = request.headers.get('x-vercel-debug-ingest'); - return HttpResponse.json({ ok: true }); - }), + fetchMock.mockImplementation(() => + jsonResponse( + { ok: true }, + { headers: { 'x-vercel-id': 'iad1::abcdef-1234' } }, + ), ); const tracker = new FreshUsageTracker({ sdkKey: 'test-key', host: 'https://example.com', + fetch: fetchMock, }); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(debugHeader).toBe('1'); - }); + expect(getHeaders()['x-vercel-debug-ingest']).toBe('1'); + expect(consoleSpy).toHaveBeenCalledWith( + '@vercel/flags-core: Ingest response 200 for 1 events on iad1::abcdef-1234', + ); + + consoleSpy.mockRestore(); }); it('should not send x-vercel-debug-ingest header when not in debug mode', async () => { - let debugHeader: string | null = 'initial'; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - debugHeader = request.headers.get('x-vercel-debug-ingest'); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(debugHeader).toBeNull(); - }); + expect(getHeaders()['x-vercel-debug-ingest']).toBeUndefined(); }); it('should log ingest response in debug mode', async () => { @@ -365,209 +262,222 @@ describe('UsageTracker', () => { ); const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - server.use( - http.post('https://example.com/v1/ingest', () => { - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); const tracker = new FreshUsageTracker({ sdkKey: 'test-key', host: 'https://example.com', + fetch: fetchMock, }); tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining( - '@vercel/flags-core: Ingest response 200 for 1 events', - ), - ); - }); + expect(consoleSpy).toHaveBeenCalledWith( + expect.stringContaining( + '@vercel/flags-core: Ingest response 200 for 1 events', + ), + ); }); }); describe('flush', () => { it('should trigger immediate flush of pending events', async () => { - const receivedEvents: unknown[] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = await request.json(); - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); // Flush immediately instead of waiting for timeout - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); + expect(fetchMock).toHaveBeenCalledTimes(1); }); it('should be safe to call flush multiple times', async () => { - let requestCount = 0; - - server.use( - http.post('https://example.com/v1/ingest', async () => { - requestCount++; - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead(); tracker.flush(); tracker.flush(); - tracker.flush(); - - await vi.waitFor(() => { - expect(requestCount).toBe(1); - }); + await tracker.flush(); - // Wait a bit more to ensure no additional requests - await new Promise((r) => setTimeout(r, 100)); - expect(requestCount).toBe(1); + expect(fetchMock).toHaveBeenCalledTimes(1); }); }); describe('request context deduplication', () => { it('should deduplicate events with the same request context', async () => { - const receivedEvents: unknown[] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = await request.json(); - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); - - // Set up a mock request context - const SYMBOL_FOR_REQ_CONTEXT = Symbol.for('@vercel/request-context'); - const mockContext = { - headers: { - 'x-vercel-id': 'test-request-id', - host: 'example.com', - }, - }; - - (globalThis as any)[SYMBOL_FOR_REQ_CONTEXT] = { - get: () => mockContext, - }; + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', + const cleanupContext = setRequestContext({ + 'x-vercel-id': 'test-request-id', + host: 'example.com', }); + const tracker = createTracker(); + // Track multiple times with same context tracker.trackRead(); tracker.trackRead(); tracker.trackRead(); - tracker.flush(); - - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); + await tracker.flush(); // Only one event should be recorded due to deduplication - const events = receivedEvents[0] as Array<{ type: string }>; + const events = getBody() as Array<{ type: string }>; expect(events).toHaveLength(1); - // Clean up - delete (globalThis as any)[SYMBOL_FOR_REQ_CONTEXT]; + cleanupContext(); }); it('should include headers from request context', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - // Set up a mock request context - const SYMBOL_FOR_REQ_CONTEXT = Symbol.for('@vercel/request-context'); - const mockContext = { - headers: { - 'x-vercel-id': 'req_123', - host: 'myapp.vercel.app', - }, - }; + const cleanupContext = setRequestContext({ + 'x-vercel-id': 'req_123', + host: 'myapp.vercel.app', + }); - (globalThis as any)[SYMBOL_FOR_REQ_CONTEXT] = { - get: () => mockContext, - }; + const tracker = createTracker(); - const tracker = new UsageTracker({ - sdkKey: 'test-key', + tracker.trackRead(); + await tracker.flush(); + + const events = getBody() as FlagsConfigReadEvent[]; + const event = events[0] as FlagsConfigReadEvent; + expect(event.payload.vercelRequestId).toBe('req_123'); + expect(event.payload.invocationHost).toBe('myapp.vercel.app'); + + cleanupContext(); + }); + }); + + describe('cross-instance deduplication', () => { + it('should not deduplicate across separate UsageTracker instances', async () => { + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); + + const cleanupContext = setRequestContext({ + 'x-vercel-id': 'shared-request-id', + host: 'example.com', + }); + + const tracker1 = new UsageTracker({ + sdkKey: 'key-1', host: 'https://example.com', + fetch: fetchMock, + }); + + const tracker2 = new UsageTracker({ + sdkKey: 'key-2', + host: 'https://example.com', + fetch: fetchMock, + }); + + // Both trackers track with the same request context + tracker1.trackRead(); + tracker2.trackRead(); + await tracker1.flush(); + await tracker2.flush(); + + // Each tracker should have sent its own event + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(getBody(0)).toHaveLength(1); + expect(getBody(1)).toHaveLength(1); + + cleanupContext(); + }); + }); + + describe('flush failure retry', () => { + it('should re-queue events on failed flush and send them on next flush', async () => { + let requestCount = 0; + + fetchMock.mockImplementation(async (_input, init) => { + requestCount++; + if (requestCount === 1) { + return new Response(null, { status: 500 }); + } + return new Response(JSON.stringify({ ok: true }), { + status: 200, + headers: { 'Content-Type': 'application/json' }, + }); }); + const tracker = createTracker(); + tracker.trackRead(); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); + expect(requestCount).toBe(1); + + // Events should have been re-queued — a new trackRead triggers + // a new schedule cycle which will include the re-queued events + tracker.trackRead(); + await tracker.flush(); + + expect(requestCount).toBe(2); + // Should contain both the re-queued event and the new one + expect(getBody(1)).toHaveLength(2); + }); + + it('should re-queue events on fetch error and send them on next flush', async () => { + let requestCount = 0; + + fetchMock.mockImplementation(async () => { + requestCount++; + if (requestCount === 1) { + throw new TypeError('Failed to fetch'); + } + return new Response(JSON.stringify({ ok: true }), { + status: 200, + headers: { 'Content-Type': 'application/json' }, + }); }); - const events = receivedEvents[0] as FlagsConfigReadEvent[]; - const event = events[0] as FlagsConfigReadEvent; - expect(event.payload.vercelRequestId).toBe('req_123'); - expect(event.payload.invocationHost).toBe('myapp.vercel.app'); + const consoleSpy = vi + .spyOn(console, 'error') + .mockImplementation(() => {}); - // Clean up - delete (globalThis as any)[SYMBOL_FOR_REQ_CONTEXT]; + const tracker = createTracker(); + + tracker.trackRead(); + await tracker.flush(); + + expect(requestCount).toBe(1); + + // Events should have been re-queued — a new trackRead triggers + // a new schedule cycle which will include the re-queued events + tracker.trackRead(); + await tracker.flush(); + + expect(requestCount).toBe(2); + // Should contain both the re-queued event and the new one + expect(getBody(1)).toHaveLength(2); + + consoleSpy.mockRestore(); }); }); describe('batch size limit', () => { it('should trigger flush when batch size reaches 50', async () => { - const receivedEvents: unknown[] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = await request.json(); - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); // Track 50 events (without request context to avoid deduplication) for (let i = 0; i < 50; i++) { tracker.trackRead(); } - // Should auto-flush at 50 events + // Should auto-flush at 50 events — wait for the scheduled flush await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); + expect(fetchMock).toHaveBeenCalledTimes(1); }); - const events = receivedEvents[0] as Array<{ type: string }>; + const events = getBody() as Array<{ type: string }>; expect(events).toHaveLength(50); }); }); @@ -582,10 +492,7 @@ describe('UsageTracker', () => { }, }; - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); // Should not throw expect(() => tracker.trackRead()).not.toThrow(); @@ -597,192 +504,91 @@ describe('UsageTracker', () => { describe('trackRead options', () => { it('should include configOrigin in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead({ configOrigin: 'in-memory' }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.configOrigin).toBe('in-memory'); }); it('should include cacheStatus in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead({ configOrigin: 'in-memory', cacheStatus: 'HIT' }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.cacheStatus).toBe('HIT'); }); it('should include cacheIsFirstRead in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead({ configOrigin: 'in-memory', cacheIsFirstRead: true }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.cacheIsFirstRead).toBe(true); }); it('should include cacheIsBlocking in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead({ configOrigin: 'in-memory', cacheIsBlocking: true }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.cacheIsBlocking).toBe(true); }); it('should include duration in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); tracker.trackRead({ configOrigin: 'in-memory', duration: 150 }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.duration).toBe(150); }); it('should include configUpdatedAt in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); const timestamp = Date.now(); tracker.trackRead({ configOrigin: 'in-memory', configUpdatedAt: timestamp, }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.configUpdatedAt).toBe(timestamp); }); it('should include all options in the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); const timestamp = Date.now(); tracker.trackRead({ @@ -793,13 +599,9 @@ describe('UsageTracker', () => { duration: 200, configUpdatedAt: timestamp, }); - tracker.flush(); + await tracker.flush(); - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); - - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.configOrigin).toBe('in-memory'); expect(event.payload.cacheStatus).toBe('MISS'); @@ -810,30 +612,15 @@ describe('UsageTracker', () => { }); it('should omit undefined options from the event payload', async () => { - const receivedEvents: FlagsConfigReadEvent[][] = []; - - server.use( - http.post('https://example.com/v1/ingest', async ({ request }) => { - const body = (await request.json()) as FlagsConfigReadEvent[]; - receivedEvents.push(body); - return HttpResponse.json({ ok: true }); - }), - ); + fetchMock.mockImplementation(() => jsonResponse({ ok: true })); - const tracker = new UsageTracker({ - sdkKey: 'test-key', - host: 'https://example.com', - }); + const tracker = createTracker(); // Only pass configOrigin, omit others tracker.trackRead({ configOrigin: 'embedded' }); - tracker.flush(); - - await vi.waitFor(() => { - expect(receivedEvents.length).toBe(1); - }); + await tracker.flush(); - const events = receivedEvents[0] as FlagsConfigReadEvent[]; + const events = getBody() as FlagsConfigReadEvent[]; const event = events[0] as FlagsConfigReadEvent; expect(event.payload.configOrigin).toBe('embedded'); expect(event.payload.cacheStatus).toBeUndefined(); diff --git a/packages/vercel-flags-core/src/utils/usage-tracker.ts b/packages/vercel-flags-core/src/utils/usage-tracker.ts index bcbff405..63436ec6 100644 --- a/packages/vercel-flags-core/src/utils/usage-tracker.ts +++ b/packages/vercel-flags-core/src/utils/usage-tracker.ts @@ -18,12 +18,16 @@ export interface FlagsConfigReadEvent { region?: string; invocationHost?: string; vercelRequestId?: string; - cacheStatus?: 'HIT' | 'MISS'; + cacheStatus?: 'HIT' | 'MISS' | 'BYPASS' | 'STALE'; + cacheAction?: 'REFRESHING' | 'FOLLOWING' | 'NONE'; cacheIsBlocking?: boolean; cacheIsFirstRead?: boolean; duration?: number; configUpdatedAt?: number; - configOrigin?: 'in-memory' | 'embedded'; + configOrigin?: 'in-memory' | 'embedded' | 'poll' | 'stream' | 'constructor'; + mode?: 'poll' | 'stream' | 'build' | 'offline'; + revision?: string; + environment?: string; }; } @@ -37,10 +41,7 @@ interface EventBatcher { const MAX_BATCH_SIZE = 50; const MAX_BATCH_WAIT_MS = 5000; - -// WeakSet to track request contexts that have already been recorded -// Using WeakSet allows the context objects to be garbage collected -const trackedRequests = new WeakSet(); +const MAX_QUEUE_SIZE = 500; interface RequestContext { ctx: object | undefined; @@ -75,13 +76,16 @@ function getRequestContext(): RequestContext { export interface UsageTrackerOptions { sdkKey: string; host: string; + fetch: typeof fetch; } export interface TrackReadOptions { /** Whether the config was read from in-memory cache or embedded bundle */ configOrigin: 'in-memory' | 'embedded'; - /** HIT when definitions exist in memory, MISS when not. Omitted for embedded reads. */ - cacheStatus?: 'HIT' | 'MISS'; + /** HIT when definitions exist in memory, MISS when not, BYPASS when using fallback as primary source */ + cacheStatus?: 'HIT' | 'MISS' | 'BYPASS'; + /** FOLLOWING when streaming, REFRESHING when polling, NONE otherwise */ + cacheAction?: 'REFRESHING' | 'FOLLOWING' | 'NONE'; /** True for the very first getData call */ cacheIsFirstRead?: boolean; /** Whether the cache read was blocking */ @@ -90,14 +94,18 @@ export interface TrackReadOptions { duration?: number; /** Timestamp when the config was last updated */ configUpdatedAt?: number; + /** The mode the SDK is operating in */ + mode?: 'poll' | 'stream' | 'build' | 'offline'; + /** Revision of the config */ + revision?: number; } /** * Tracks usage events and batches them for submission to the ingest endpoint. */ export class UsageTracker { - private sdkKey: string; - private host: string; + private options: UsageTrackerOptions; + private trackedRequests = new WeakSet(); private batcher: EventBatcher = { events: [], resolveWait: null, @@ -105,8 +113,7 @@ export class UsageTracker { }; constructor(options: UsageTrackerOptions) { - this.sdkKey = options.sdkKey; - this.host = options.host; + this.options = options; } /** @@ -114,8 +121,17 @@ export class UsageTracker { * Returns a promise that resolves when the flush completes. */ flush(): Promise { - this.batcher.resolveWait?.(); - return this.batcher.pending ?? RESOLVED_VOID; + if (this.batcher.pending) { + this.batcher.resolveWait?.(); + return this.batcher.pending; + } + + // No scheduled flush yet — flush directly if there are queued events + if (this.batcher.events.length > 0) { + return this.flushEvents(); + } + + return RESOLVED_VOID; } /** @@ -127,8 +143,8 @@ export class UsageTracker { // Skip if we've already tracked this request if (ctx) { - if (trackedRequests.has(ctx)) return; - trackedRequests.add(ctx); + if (this.trackedRequests.has(ctx)) return; + this.trackedRequests.add(ctx); } const event: FlagsConfigReadEvent = { @@ -150,6 +166,9 @@ export class UsageTracker { if (options.cacheStatus !== undefined) { event.payload.cacheStatus = options.cacheStatus; } + if (options.cacheAction !== undefined) { + event.payload.cacheAction = options.cacheAction; + } if (options.cacheIsFirstRead !== undefined) { event.payload.cacheIsFirstRead = options.cacheIsFirstRead; } @@ -162,6 +181,18 @@ export class UsageTracker { if (options.configUpdatedAt !== undefined) { event.payload.configUpdatedAt = options.configUpdatedAt; } + if (options.mode !== undefined) { + event.payload.mode = options.mode; + } + if (options.revision !== undefined) { + event.payload.revision = String(options.revision); + } + } + + const environment = + process.env.VERCEL_ENV || process.env.NODE_ENV || undefined; + if (environment) { + event.payload.environment = environment; } this.batcher.events.push(event); @@ -192,7 +223,11 @@ export class UsageTracker { // Use waitUntil to keep the function alive until flush completes // If `waitUntil` is not available this will be a no-op and leave // a floating promise that will be completed in the background - waitUntil(pending); + try { + waitUntil(pending); + } catch { + // waitUntil is best-effort; falling through leaves a floating promise + } this.batcher.pending = pending; } @@ -203,6 +238,23 @@ export class UsageTracker { } } + /** + * Re-queues failed events, dropping oldest when the queue would exceed MAX_QUEUE_SIZE. + */ + private requeue(events: FlagsConfigReadEvent[]): void { + const combined = [...events, ...this.batcher.events]; + // Drop oldest events (from the front) when over capacity + if (combined.length > MAX_QUEUE_SIZE) { + const dropped = combined.length - MAX_QUEUE_SIZE; + console.warn( + `@vercel/flags-core: Dropping ${dropped} usage event(s) (queue full)`, + ); + this.batcher.events = combined.slice(dropped); + } else { + this.batcher.events = combined; + } + } + private async flushEvents(): Promise { if (this.batcher.events.length === 0) return; @@ -211,16 +263,19 @@ export class UsageTracker { this.batcher.events = []; try { - const response = await fetch(`${this.host}/v1/ingest`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${this.sdkKey}`, - 'User-Agent': `VercelFlagsCore/${version}`, - ...(isDebugMode ? { 'x-vercel-debug-ingest': '1' } : null), + const response = await this.options.fetch( + `${this.options.host}/v1/ingest`, + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${this.options.sdkKey}`, + 'User-Agent': `VercelFlagsCore/${version}`, + ...(isDebugMode ? { 'x-vercel-debug-ingest': '1' } : null), + }, + body: JSON.stringify(eventsToSend), }, - body: JSON.stringify(eventsToSend), - }); + ); debugLog( `@vercel/flags-core: Ingest response ${response.status} for ${eventsToSend.length} events on ${response.headers.get('x-vercel-id')}`, @@ -230,10 +285,13 @@ export class UsageTracker { debugLog( '@vercel/flags-core: Failed to send events:', response.statusText, + await response.text(), ); + this.requeue(eventsToSend); } } catch (error) { debugLog('@vercel/flags-core: Error sending events:', error); + this.requeue(eventsToSend); } } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 384b812f..c412a970 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -291,6 +291,94 @@ importers: specifier: ^5.7.3 version: 5.8.2 + examples/shirt-shop-vercel: + dependencies: + '@biomejs/biome': + specifier: ^2.3.13 + version: 2.3.13 + '@flags-sdk/vercel': + specifier: workspace:* + version: link:../../packages/adapter-vercel + '@headlessui/react': + specifier: ^2.2.0 + version: 2.2.9(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@heroicons/react': + specifier: 2.2.0 + version: 2.2.0(react@19.2.4) + '@tailwindcss/aspect-ratio': + specifier: 0.4.2 + version: 0.4.2(tailwindcss@4.1.18) + '@tailwindcss/forms': + specifier: 0.5.10 + version: 0.5.10(tailwindcss@4.1.18) + '@tailwindcss/postcss': + specifier: ^4.0.9 + version: 4.1.18 + '@tailwindcss/typography': + specifier: 0.5.16 + version: 0.5.16(tailwindcss@4.1.18) + '@vercel/analytics': + specifier: 1.5.0 + version: 1.5.0(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3) + '@vercel/edge': + specifier: 1.2.2 + version: 1.2.2 + '@vercel/edge-config': + specifier: 1.4.3 + version: 1.4.3(@opentelemetry/api@1.9.0)(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)) + '@vercel/flags-core': + specifier: workspace:* + version: link:../../packages/vercel-flags-core + '@vercel/toolbar': + specifier: 0.1.36 + version: 0.1.36(5571e7b359b94065007de485c6157db6) + clsx: + specifier: 2.1.1 + version: 2.1.1 + flags: + specifier: 4.0.1 + version: 4.0.1(@opentelemetry/api@1.9.0)(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + js-xxhash: + specifier: 4.0.0 + version: 4.0.0 + motion: + specifier: 12.12.1 + version: 12.12.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + nanoid: + specifier: 5.1.2 + version: 5.1.2 + next: + specifier: 16.1.6 + version: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: + specifier: ^19.2.0 + version: 19.2.4 + react-dom: + specifier: ^19.2.0 + version: 19.2.4(react@19.2.4) + sonner: + specifier: 2.0.1 + version: 2.0.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + devDependencies: + '@types/node': + specifier: ^22.13.5 + version: 22.14.0 + '@types/react': + specifier: ^19.0.10 + version: 19.2.14 + '@types/react-dom': + specifier: ^19.0.4 + version: 19.2.3(@types/react@19.2.14) + postcss: + specifier: ^8.5.3 + version: 8.5.6 + tailwindcss: + specifier: ^4.0.9 + version: 4.1.18 + typescript: + specifier: ^5.7.3 + version: 5.9.3 + examples/snippets: dependencies: '@radix-ui/react-dialog': @@ -812,7 +900,7 @@ importers: version: 5.2.1 react-dom: specifier: '*' - version: 19.2.0(react@19.3.0-canary-6066c782-20260212) + version: 19.2.0(react@19.3.0-canary-03ca38e6-20260213) devDependencies: '@arethetypeswrong/cli': specifier: 0.18.2 @@ -831,10 +919,10 @@ importers: version: 2.6.4(@types/node@20.11.17)(typescript@5.6.3) next: specifier: 16.1.5 - version: 16.1.5(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.0(react@19.3.0-canary-6066c782-20260212))(react@19.3.0-canary-6066c782-20260212) + version: 16.1.5(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.0(react@19.3.0-canary-03ca38e6-20260213))(react@19.3.0-canary-03ca38e6-20260213) react: specifier: canary - version: 19.3.0-canary-6066c782-20260212 + version: 19.3.0-canary-03ca38e6-20260213 tsup: specifier: 8.5.1 version: 8.5.1(jiti@2.6.1)(postcss@8.5.6)(typescript@5.6.3)(yaml@2.8.1) @@ -872,9 +960,6 @@ importers: flags: specifier: workspace:* version: link:../flags - msw: - specifier: 2.6.4 - version: 2.6.4(@types/node@20.11.17)(typescript@5.6.3) next: specifier: 16.1.6 version: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.3.0-canary-da641178-20260129))(react@19.3.0-canary-da641178-20260129) @@ -4325,6 +4410,9 @@ packages: resolution: {integrity: sha512-1++yncEyIAi68D3UEOlytYb1IUcIulMWdoSzX2h9LuSeeyR7JtaIgR8DcTQ6+DmYOQn+5MCh6LY+UmK6QBByNA==} deprecated: This package is deprecated. You should to use `@vercel/functions` instead. + '@vercel/edge@1.2.2': + resolution: {integrity: sha512-1+y+f6rk0Yc9ss9bRDgz/gdpLimwoRteKHhrcgHvEpjbP1nyT3ByqEMWm2BTcpIO5UtDmIFXc8zdq4LR190PDA==} + '@vercel/functions@1.6.0': resolution: {integrity: sha512-R6FKQrYT5MZs5IE1SqeCJWxMuBdHawFcCZboKKw8p7s+6/mcd55Gx6tWmyKnQTyrSEA04NH73Tc9CbqpEle8RA==} engines: {node: '>= 16'} @@ -5461,6 +5549,26 @@ packages: fix-dts-default-cjs-exports@1.0.1: resolution: {integrity: sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==} + flags@4.0.1: + resolution: {integrity: sha512-nJNY97LoI+BDNCSnGIEvBAxYkRYeRuMZ3KtdjCj60quGH3cnyjnSQfw9vB/kvb3+wAtdn2sm5t+jO6dy5tpi1w==} + peerDependencies: + '@opentelemetry/api': ^1.7.0 + '@sveltejs/kit': '*' + next: '*' + react: '*' + react-dom: '*' + peerDependenciesMeta: + '@opentelemetry/api': + optional: true + '@sveltejs/kit': + optional: true + next: + optional: true + react: + optional: true + react-dom: + optional: true + flags@4.0.3: resolution: {integrity: sha512-rLkO+Hn6dSEsDZm6lHuXr3GjfHf8N67lhXCFUeSRBjDdb/43ez5Je8DC/K0HzMtl3LcWc7zgF79V/3WzJXVm/w==} peerDependencies: @@ -7243,8 +7351,8 @@ packages: resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} engines: {node: '>=0.10.0'} - react@19.3.0-canary-6066c782-20260212: - resolution: {integrity: sha512-VRF1aVFk2iLHFObfNA5VGgbfJw8/kRsjvxbaPK33F/e1GU+K6RpV8gZvfes9Ih4ZAQgJuMMvXqCcz+hN8EjBhA==} + react@19.3.0-canary-03ca38e6-20260213: + resolution: {integrity: sha512-NNEFSftu7AEeOV6jq5Cu6PZI2kWf1C1AF6DihaPT8WICkmYh45+SphK96o3n9Y3ulHgtSsY4rZhwuVKC36r6Zw==} engines: {node: '>=0.10.0'} react@19.3.0-canary-da641178-20260129: @@ -9020,6 +9128,14 @@ snapshots: react-dom: 19.2.0(react@19.2.0) tabbable: 6.3.0 + '@floating-ui/react@0.26.28(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@floating-ui/react-dom': 2.1.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@floating-ui/utils': 0.2.10 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + tabbable: 6.3.0 + '@floating-ui/utils@0.2.10': {} '@formatjs/intl-localematcher@0.6.2': @@ -9040,10 +9156,24 @@ snapshots: react-dom: 19.2.0(react@19.2.0) use-sync-external-store: 1.6.0(react@19.2.0) + '@headlessui/react@2.2.9(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@floating-ui/react': 0.26.28(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@react-aria/focus': 3.21.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@react-aria/interactions': 3.25.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@tanstack/react-virtual': 3.13.12(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + use-sync-external-store: 1.6.0(react@19.2.4) + '@heroicons/react@2.2.0(react@19.2.0)': dependencies: react: 19.2.0 + '@heroicons/react@2.2.0(react@19.2.4)': + dependencies: + react: 19.2.4 + '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': @@ -10655,6 +10785,16 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + '@react-aria/focus@3.21.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@react-aria/interactions': 3.25.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@react-aria/utils': 3.31.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@react-types/shared': 3.32.1(react@19.2.4) + '@swc/helpers': 0.5.17 + clsx: 2.1.1 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + '@react-aria/interactions@3.25.6(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@react-aria/ssr': 3.9.10(react@19.2.0) @@ -10665,11 +10805,26 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + '@react-aria/interactions@3.25.6(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@react-aria/ssr': 3.9.10(react@19.2.4) + '@react-aria/utils': 3.31.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@react-stately/flags': 3.1.2 + '@react-types/shared': 3.32.1(react@19.2.4) + '@swc/helpers': 0.5.17 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + '@react-aria/ssr@3.9.10(react@19.2.0)': dependencies: '@swc/helpers': 0.5.17 react: 19.2.0 + '@react-aria/ssr@3.9.10(react@19.2.4)': + dependencies: + '@swc/helpers': 0.5.17 + react: 19.2.4 + '@react-aria/utils@3.31.0(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@react-aria/ssr': 3.9.10(react@19.2.0) @@ -10681,6 +10836,17 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + '@react-aria/utils@3.31.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@react-aria/ssr': 3.9.10(react@19.2.4) + '@react-stately/flags': 3.1.2 + '@react-stately/utils': 3.10.8(react@19.2.4) + '@react-types/shared': 3.32.1(react@19.2.4) + '@swc/helpers': 0.5.17 + clsx: 2.1.1 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + '@react-stately/flags@3.1.2': dependencies: '@swc/helpers': 0.5.17 @@ -10690,10 +10856,19 @@ snapshots: '@swc/helpers': 0.5.17 react: 19.2.0 + '@react-stately/utils@3.10.8(react@19.2.4)': + dependencies: + '@swc/helpers': 0.5.17 + react: 19.2.4 + '@react-types/shared@3.32.1(react@19.2.0)': dependencies: react: 19.2.0 + '@react-types/shared@3.32.1(react@19.2.4)': + dependencies: + react: 19.2.4 + '@reflag/flag-evaluation@1.0.0': dependencies: js-sha256: 0.11.0 @@ -10976,6 +11151,29 @@ snapshots: typescript: 5.8.2 optional: true + '@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1))': + dependencies: + '@standard-schema/spec': 1.0.0 + '@sveltejs/acorn-typescript': 1.0.6(acorn@8.15.0) + '@sveltejs/vite-plugin-svelte': 4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)) + '@types/cookie': 0.6.0 + acorn: 8.15.0 + cookie: 0.6.0 + devalue: 5.6.2 + esm-env: 1.2.2 + kleur: 4.1.5 + magic-string: 0.30.21 + mrmime: 2.0.1 + sade: 1.8.1 + set-cookie-parser: 3.0.1 + sirv: 3.0.2 + svelte: 5.41.3 + vite: 6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1) + optionalDependencies: + '@opentelemetry/api': 1.9.0 + typescript: 5.9.3 + optional: true + '@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1))': dependencies: '@standard-schema/spec': 1.0.0 @@ -11169,11 +11367,20 @@ snapshots: dependencies: tailwindcss: 4.0.15 + '@tailwindcss/aspect-ratio@0.4.2(tailwindcss@4.1.18)': + dependencies: + tailwindcss: 4.1.18 + '@tailwindcss/forms@0.5.10(tailwindcss@4.0.15)': dependencies: mini-svg-data-uri: 1.4.4 tailwindcss: 4.0.15 + '@tailwindcss/forms@0.5.10(tailwindcss@4.1.18)': + dependencies: + mini-svg-data-uri: 1.4.4 + tailwindcss: 4.1.18 + '@tailwindcss/node@4.0.15': dependencies: enhanced-resolve: 5.18.3 @@ -11381,6 +11588,14 @@ snapshots: postcss-selector-parser: 6.0.10 tailwindcss: 4.0.15 + '@tailwindcss/typography@0.5.16(tailwindcss@4.1.18)': + dependencies: + lodash.castarray: 4.4.0 + lodash.isplainobject: 4.0.6 + lodash.merge: 4.6.2 + postcss-selector-parser: 6.0.10 + tailwindcss: 4.1.18 + '@tailwindcss/vite@4.0.15(vite@5.4.21(@types/node@24.10.13)(lightningcss@1.30.2))': dependencies: '@tailwindcss/node': 4.0.15 @@ -11395,6 +11610,12 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + '@tanstack/react-virtual@3.13.12(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@tanstack/virtual-core': 3.13.12 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + '@tanstack/virtual-core@3.13.12': {} '@tinyhttp/accepts@1.3.0': @@ -11796,6 +12017,13 @@ snapshots: react: 19.2.0 svelte: 5.41.3 + '@vercel/analytics@1.5.0(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3)': + optionalDependencies: + '@sveltejs/kit': 2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)) + next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + svelte: 5.41.3 + '@vercel/analytics@1.6.1(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@5.4.21(@types/node@24.10.13)(lightningcss@1.30.2)))(svelte@5.41.3)(typescript@5.8.2)(vite@5.4.21(@types/node@24.10.13)(lightningcss@1.30.2)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3)': optionalDependencies: '@sveltejs/kit': 2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@5.4.21(@types/node@24.10.13)(lightningcss@1.30.2)))(svelte@5.41.3)(typescript@5.8.2)(vite@5.4.21(@types/node@24.10.13)(lightningcss@1.30.2)) @@ -11835,6 +12063,13 @@ snapshots: '@opentelemetry/api': 1.9.0 next: 16.1.5(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@vercel/edge-config@1.4.3(@opentelemetry/api@1.9.0)(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))': + dependencies: + '@vercel/edge-config-fs': 0.1.0 + optionalDependencies: + '@opentelemetry/api': 1.9.0 + next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@vercel/edge-config@1.4.3(@opentelemetry/api@1.9.0)(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.3.0-canary-da641178-20260129))(react@19.3.0-canary-da641178-20260129))': dependencies: '@vercel/edge-config-fs': 0.1.0 @@ -11844,6 +12079,8 @@ snapshots: '@vercel/edge@1.2.1': {} + '@vercel/edge@1.2.2': {} + '@vercel/functions@1.6.0': {} '@vercel/functions@3.3.6': @@ -11892,6 +12129,27 @@ snapshots: transitivePeerDependencies: - debug + '@vercel/microfrontends@1.1.0(5571e7b359b94065007de485c6157db6)': + dependencies: + ajv: 8.17.1 + commander: 12.1.0 + cookie: 0.4.0 + fast-glob: 3.3.3 + http-proxy: 1.18.1 + jsonc-parser: 3.3.1 + nanoid: 3.3.11 + path-to-regexp: 6.2.1 + optionalDependencies: + '@sveltejs/kit': 2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)) + '@vercel/analytics': 1.5.0(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3) + '@vercel/speed-insights': 1.3.1(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3) + next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + vite: 6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1) + transitivePeerDependencies: + - debug + '@vercel/microfrontends@1.1.0(d173fbb08c37b3b6bbf7e6a01a37a15f)': dependencies: ajv: 8.17.1 @@ -11958,6 +12216,14 @@ snapshots: svelte: 5.41.3 optional: true + '@vercel/speed-insights@1.3.1(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3)': + optionalDependencies: + '@sveltejs/kit': 2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)) + next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + svelte: 5.41.3 + optional: true + '@vercel/speed-insights@1.3.1(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react@19.2.4)(svelte@5.41.3)': optionalDependencies: '@sveltejs/kit': 2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)) @@ -12009,6 +12275,28 @@ snapshots: - debug - react-dom + '@vercel/toolbar@0.1.36(5571e7b359b94065007de485c6157db6)': + dependencies: + '@tinyhttp/app': 1.3.0 + '@vercel/microfrontends': 1.1.0(5571e7b359b94065007de485c6157db6) + chokidar: 3.6.0 + execa: 5.1.1 + fast-glob: 3.3.3 + find-up: 5.0.0 + get-port: 5.1.1 + jsonc-parser: 3.3.1 + strip-ansi: 6.0.1 + optionalDependencies: + next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + vite: 6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1) + transitivePeerDependencies: + - '@sveltejs/kit' + - '@vercel/analytics' + - '@vercel/speed-insights' + - debug + - react-dom + '@vercel/toolbar@0.1.36(d173fbb08c37b3b6bbf7e6a01a37a15f)': dependencies: '@tinyhttp/app': 1.3.0 @@ -13202,6 +13490,17 @@ snapshots: mlly: 1.8.0 rollup: 4.52.5 + flags@4.0.1(@opentelemetry/api@1.9.0)(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + '@edge-runtime/cookies': 5.0.2 + jose: 5.2.1 + optionalDependencies: + '@opentelemetry/api': 1.9.0 + '@sveltejs/kit': 2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@22.14.0)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)) + next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + flags@4.0.3(@opentelemetry/api@1.9.0)(@sveltejs/kit@2.50.2(@opentelemetry/api@1.9.0)(@sveltejs/vite-plugin-svelte@4.0.4(svelte@5.41.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(svelte@5.41.3)(typescript@5.9.3)(vite@6.4.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.1)))(next@16.1.6(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: '@edge-runtime/cookies': 5.0.2 @@ -14619,6 +14918,14 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + motion@12.12.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + framer-motion: 12.34.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + tslib: 2.8.1 + optionalDependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + motion@12.34.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: framer-motion: 12.34.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -14746,16 +15053,16 @@ snapshots: - '@babel/core' - babel-plugin-macros - next@16.1.5(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.0(react@19.3.0-canary-6066c782-20260212))(react@19.3.0-canary-6066c782-20260212): + next@16.1.5(@babel/core@7.28.5)(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.0(react@19.3.0-canary-03ca38e6-20260213))(react@19.3.0-canary-03ca38e6-20260213): dependencies: '@next/env': 16.1.5 '@swc/helpers': 0.5.15 baseline-browser-mapping: 2.9.19 caniuse-lite: 1.0.30001751 postcss: 8.4.31 - react: 19.3.0-canary-6066c782-20260212 - react-dom: 19.2.0(react@19.3.0-canary-6066c782-20260212) - styled-jsx: 5.1.6(@babel/core@7.28.5)(react@19.3.0-canary-6066c782-20260212) + react: 19.3.0-canary-03ca38e6-20260213 + react-dom: 19.2.0(react@19.3.0-canary-03ca38e6-20260213) + styled-jsx: 5.1.6(@babel/core@7.28.5)(react@19.3.0-canary-03ca38e6-20260213) optionalDependencies: '@next/swc-darwin-arm64': 16.1.5 '@next/swc-darwin-x64': 16.1.5 @@ -14901,7 +15208,6 @@ snapshots: transitivePeerDependencies: - '@babel/core' - babel-plugin-macros - optional: true next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.1)(react-dom@19.2.4(react@19.3.0-canary-da641178-20260129))(react@19.3.0-canary-da641178-20260129): dependencies: @@ -15346,9 +15652,9 @@ snapshots: react: 19.2.0 scheduler: 0.27.0 - react-dom@19.2.0(react@19.3.0-canary-6066c782-20260212): + react-dom@19.2.0(react@19.3.0-canary-03ca38e6-20260213): dependencies: - react: 19.3.0-canary-6066c782-20260212 + react: 19.3.0-canary-03ca38e6-20260213 scheduler: 0.27.0 react-dom@19.2.4(react@19.2.4): @@ -15466,7 +15772,7 @@ snapshots: react@19.2.4: {} - react@19.3.0-canary-6066c782-20260212: {} + react@19.3.0-canary-03ca38e6-20260213: {} react@19.3.0-canary-da641178-20260129: {} @@ -15842,6 +16148,11 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + sonner@2.0.1(react-dom@19.2.4(react@19.2.4))(react@19.2.4): + dependencies: + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + sonner@2.0.7(react-dom@19.2.4(react@19.2.4))(react@19.2.4): dependencies: react: 19.2.4 @@ -15974,10 +16285,10 @@ snapshots: optionalDependencies: '@babel/core': 7.28.5 - styled-jsx@5.1.6(@babel/core@7.28.5)(react@19.3.0-canary-6066c782-20260212): + styled-jsx@5.1.6(@babel/core@7.28.5)(react@19.3.0-canary-03ca38e6-20260213): dependencies: client-only: 0.0.1 - react: 19.3.0-canary-6066c782-20260212 + react: 19.3.0-canary-03ca38e6-20260213 optionalDependencies: '@babel/core': 7.28.5 @@ -16000,7 +16311,6 @@ snapshots: dependencies: client-only: 0.0.1 react: 19.2.4 - optional: true styled-jsx@5.1.6(react@19.3.0-canary-da641178-20260129): dependencies: