From ff81a748da0e672f3963ed0604869de3cff9069e Mon Sep 17 00:00:00 2001 From: Jacek Date: Tue, 17 Mar 2026 11:42:18 -0500 Subject: [PATCH 1/6] fix(clerk-js): clear orphaned refresh timers on token cache overwrite MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When set() was called multiple times for the same cache key, old refresh and expiration timers were never cancelled. This caused orphaned timers to accumulate — each independently firing onRefresh — making the poller appear erratic with accelerating token refresh requests. The root cause: both _updateClient (via Session constructor → #hydrateCache) and #refreshTokenInBackground call set() for the same key during a single refresh cycle. Each set() created new timers without clearing the old ones. After N cycles, N orphaned timers all fire at different offsets. Fix: clear existing timers at the start of setInternal before creating the new cache entry. --- .changeset/fix-token-cache-timer-leak.md | 5 + .../refresh-timer-cleanup.test.ts | 86 +++++++ .../src/core/__tests__/tokenCache.test.ts | 241 ++++++++++++++++++ packages/clerk-js/src/core/tokenCache.ts | 13 + 4 files changed, 345 insertions(+) create mode 100644 .changeset/fix-token-cache-timer-leak.md create mode 100644 integration/tests/session-token-cache/refresh-timer-cleanup.test.ts diff --git a/.changeset/fix-token-cache-timer-leak.md b/.changeset/fix-token-cache-timer-leak.md new file mode 100644 index 00000000000..0b95b41d0f4 --- /dev/null +++ b/.changeset/fix-token-cache-timer-leak.md @@ -0,0 +1,5 @@ +--- +'@clerk/clerk-js': patch +--- + +Fix token cache refresh timer leak that caused accelerating token refresh requests. When `set()` was called multiple times for the same cache key (e.g., from `_updateClient` hydration and `#refreshTokenInBackground`), old refresh timers were never cancelled, causing orphaned timers to accumulate. Each leaked timer independently fired `onRefresh`, making the poller appear erratic — especially after `session.touch()` or organization switching. diff --git a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts new file mode 100644 index 00000000000..35006850b5a --- /dev/null +++ b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts @@ -0,0 +1,86 @@ +import { expect, test } from '@playwright/test'; + +import { appConfigs } from '../../presets'; +import type { FakeUser } from '../../testUtils'; +import { createTestUtils, testAgainstRunningApps } from '../../testUtils'; + +/** + * Tests that the token cache's proactive refresh timer does not accumulate + * orphaned timers across set() calls. + * + * Background: Every API response that piggybacks client data triggers _updateClient, + * which reconstructs Session objects and calls #hydrateCache → SessionTokenCache.set(). + * Additionally, the proactive refresh timer's onRefresh callback also calls set() after + * fetching a new token. Without proper timer cleanup, each set() call would leave the + * previous refresh timer running, causing the effective polling rate to accelerate over + * time as orphaned timers accumulate. + * + * These tests verify that token refresh requests maintain a stable, predictable rate + * even after operations (like touch or org switching) that trigger multiple set() calls. + */ +testAgainstRunningApps({ withEnv: [appConfigs.envs.withEmailCodes] })( + 'Token refresh timer cleanup @generic', + ({ app }) => { + test.describe.configure({ mode: 'serial' }); + + let fakeUser: FakeUser; + + test.beforeAll(async () => { + const u = createTestUtils({ app }); + fakeUser = u.services.users.createFakeUser(); + await u.services.users.createBapiUser(fakeUser); + }); + + test.afterAll(async () => { + await fakeUser.deleteIfExists(); + await app.teardown(); + }); + + test('token refresh rate remains stable after session.touch() triggers _updateClient', async ({ page, context }) => { + const u = createTestUtils({ app, page, context }); + + await u.po.signIn.goTo(); + await u.po.signIn.signInWithEmailAndInstantPassword({ email: fakeUser.email, password: fakeUser.password }); + await u.po.expect.toBeSignedIn(); + + // Track token fetch requests with timestamps + const tokenRequests: number[] = []; + await page.route('**/v1/client/sessions/*/tokens**', async route => { + tokenRequests.push(Date.now()); + await route.continue(); + }); + + // Trigger multiple touch() calls to simulate the scenario where _updateClient + // causes Session reconstruction → #hydrateCache → set(), potentially leaking timers + for (let i = 0; i < 3; i++) { + await page.evaluate(async () => { + await (window as any).Clerk?.session?.touch(); + }); + } + + // Wait long enough for multiple refresh cycles (tokens have ~60s TTL, + // refresh fires at ~43s). We wait 90s to see at least 2 refresh cycles. + // eslint-disable-next-line playwright/no-wait-for-timeout + await page.waitForTimeout(90_000); + + await page.unrouteAll(); + + // With the fix: ~2-3 refresh requests over 90s (one every ~43s) + // Without the fix: 6+ requests as orphaned timers from each touch() accumulate + // (3 touch calls × 2 set() calls each = 6 timer chains all firing independently) + // + // We allow up to 5 to account for the initial token fetch + normal refresh cycles + expect(tokenRequests.length).toBeLessThanOrEqual(5); + + // Verify requests are reasonably spaced (not clustering together) + if (tokenRequests.length >= 2) { + for (let i = 1; i < tokenRequests.length; i++) { + const gap = tokenRequests[i] - tokenRequests[i - 1]; + // Each gap should be at least 10 seconds — if timers were accumulating, + // we'd see requests firing within seconds of each other + expect(gap).toBeGreaterThan(10_000); + } + } + }); + }, +); diff --git a/packages/clerk-js/src/core/__tests__/tokenCache.test.ts b/packages/clerk-js/src/core/__tests__/tokenCache.test.ts index 14174c218af..1149a08f49e 100644 --- a/packages/clerk-js/src/core/__tests__/tokenCache.test.ts +++ b/packages/clerk-js/src/core/__tests__/tokenCache.test.ts @@ -959,6 +959,247 @@ describe('SessionTokenCache', () => { }); }); + describe('timer cleanup on overwrite', () => { + /** + * This describe block tests the fix for a bug where calling set() multiple times + * for the same cache key would leak orphaned refresh timers. Each set() call creates + * a new value object with its own refresh timer, but previously the old value's timers + * were never cleared. This caused refresh callbacks to fire N times after N set() calls + * instead of just once, making the poller appear erratic. + * + * The root cause was that both `#hydrateCache` (via Session constructor from _updateClient) + * and `#refreshTokenInBackground` call set() for the same key during a single refresh cycle, + * doubling the number of active timers each cycle. + */ + + it('cancels old refresh timer when set() is called again for the same key', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'overwrite-token', jwt, object: 'token' }); + + const onRefresh1 = vi.fn(); + const onRefresh2 = vi.fn(); + const key = { tokenId: 'overwrite-token' }; + + // First set() — schedules refresh timer at ~43s + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token), onRefresh: onRefresh1 }); + await Promise.resolve(); + + // Second set() for SAME key — should cancel first timer and schedule new one + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token), onRefresh: onRefresh2 }); + await Promise.resolve(); + + // Advance past refresh fire time (43s) + vi.advanceTimersByTime(44 * 1000); + + // Only the SECOND callback should fire; the first was cancelled + expect(onRefresh1).not.toHaveBeenCalled(); + expect(onRefresh2).toHaveBeenCalledTimes(1); + }); + + it('cancels old expiration timer when set() is called again for the same key', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt1 = createJwtWithTtl(nowSeconds, 30); + const jwt2 = createJwtWithTtl(nowSeconds, 120); + const token1 = new Token({ id: 'exp-overwrite', jwt: jwt1, object: 'token' }); + const token2 = new Token({ id: 'exp-overwrite', jwt: jwt2, object: 'token' }); + + const key = { tokenId: 'exp-overwrite' }; + + // First set() with 30s TTL + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token1) }); + await Promise.resolve(); + + // Second set() with 120s TTL — old 30s expiration timer should be cancelled + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token2) }); + await Promise.resolve(); + + // After 30s the old timer would have deleted the entry, but it should still exist + vi.advanceTimersByTime(31 * 1000); + const result = SessionTokenCache.get(key); + expect(result).toBeDefined(); + expect(result?.entry.tokenId).toBe('exp-overwrite'); + }); + + it('does not accumulate refresh timers across multiple set() calls', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'accumulate-test', jwt, object: 'token' }); + + const onRefresh = vi.fn(); + const key = { tokenId: 'accumulate-test' }; + + // Simulate 10 rapid set() calls for the same key (as would happen with + // multiple _updateClient / hydration cycles) + for (let i = 0; i < 10; i++) { + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token), onRefresh }); + await Promise.resolve(); + } + + // Advance past refresh fire time + vi.advanceTimersByTime(44 * 1000); + + // Should fire exactly ONCE, not 10 times + expect(onRefresh).toHaveBeenCalledTimes(1); + }); + + it('simulates the hydration + background refresh double-set scenario', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'double-set-token', jwt, object: 'token' }); + + const hydrateRefresh = vi.fn(); + const backgroundRefresh = vi.fn(); + const key = { tokenId: 'double-set-token' }; + + // 1. Simulate #hydrateCache from Session constructor (called by _updateClient) + SessionTokenCache.set({ + ...key, + tokenResolver: Promise.resolve(token), + onRefresh: hydrateRefresh, + }); + await Promise.resolve(); + + // 2. Simulate #refreshTokenInBackground's .then() calling set() with resolved token + // This is what happens during a background refresh cycle — both _updateClient + // and the .then() callback call set() for the same cache key + SessionTokenCache.set({ + ...key, + tokenResolver: Promise.resolve(token), + onRefresh: backgroundRefresh, + }); + await Promise.resolve(); + + // Advance past refresh fire time + vi.advanceTimersByTime(44 * 1000); + + // Only the second (background refresh) callback should fire + expect(hydrateRefresh).not.toHaveBeenCalled(); + expect(backgroundRefresh).toHaveBeenCalledTimes(1); + }); + + it('simulates multiple refresh cycles without timer accumulation', async () => { + const key = { tokenId: 'multi-cycle-token' }; + const refreshCounts: number[] = []; + + // Simulate 5 consecutive refresh cycles + // refreshFireTime = 60 - 15 - 2 = 43s + for (let cycle = 0; cycle < 5; cycle++) { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'multi-cycle-token', jwt, object: 'token' }); + + const onRefresh = vi.fn(); + + // Each cycle does TWO set() calls (hydration + background refresh) + SessionTokenCache.set({ + ...key, + tokenResolver: Promise.resolve(token), + onRefresh, + }); + await Promise.resolve(); + + SessionTokenCache.set({ + ...key, + tokenResolver: Promise.resolve(token), + onRefresh, + }); + await Promise.resolve(); + + // Advance to 42s — just BEFORE the 43s refresh timer fires + vi.advanceTimersByTime(42 * 1000); + refreshCounts.push(onRefresh.mock.calls.length); + + // Advance 2 more seconds past the 43s fire time + vi.advanceTimersByTime(2 * 1000); + refreshCounts.push(onRefresh.mock.calls.length); + } + + // Each cycle should show: 0 calls before timer, 1 call after timer + // If timers were accumulating, later cycles would show more than 1 call + for (let i = 0; i < refreshCounts.length; i += 2) { + expect(refreshCounts[i]).toBe(0); // before timer (42s) + expect(refreshCounts[i + 1]).toBe(1); // after timer (44s) + } + }); + + it('set() with different key does not affect existing timers', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token1 = new Token({ id: 'key-a', jwt, object: 'token' }); + const token2 = new Token({ id: 'key-b', jwt, object: 'token' }); + + const onRefreshA = vi.fn(); + const onRefreshB = vi.fn(); + + SessionTokenCache.set({ + tokenId: 'key-a', + tokenResolver: Promise.resolve(token1), + onRefresh: onRefreshA, + }); + await Promise.resolve(); + + SessionTokenCache.set({ + tokenId: 'key-b', + tokenResolver: Promise.resolve(token2), + onRefresh: onRefreshB, + }); + await Promise.resolve(); + + vi.advanceTimersByTime(44 * 1000); + + // Both should fire independently — setting key-b should NOT cancel key-a's timer + expect(onRefreshA).toHaveBeenCalledTimes(1); + expect(onRefreshB).toHaveBeenCalledTimes(1); + }); + + it('only the latest set() callback fires after interleaved set/clear/set', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'interleaved-token', jwt, object: 'token' }); + + const onRefresh1 = vi.fn(); + const onRefresh2 = vi.fn(); + const key = { tokenId: 'interleaved-token' }; + + // set, clear, set again + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token), onRefresh: onRefresh1 }); + await Promise.resolve(); + + SessionTokenCache.clear(); + + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token), onRefresh: onRefresh2 }); + await Promise.resolve(); + + vi.advanceTimersByTime(44 * 1000); + + expect(onRefresh1).not.toHaveBeenCalled(); + expect(onRefresh2).toHaveBeenCalledTimes(1); + }); + + it('overwriting with a token that has no onRefresh cancels the old refresh timer', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'cancel-refresh', jwt, object: 'token' }); + + const onRefresh = vi.fn(); + const key = { tokenId: 'cancel-refresh' }; + + // First set with onRefresh + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token), onRefresh }); + await Promise.resolve(); + + // Second set WITHOUT onRefresh (like a broadcast-received token) + SessionTokenCache.set({ ...key, tokenResolver: Promise.resolve(token) }); + await Promise.resolve(); + + vi.advanceTimersByTime(44 * 1000); + + // The old onRefresh should have been cancelled, and no new one was scheduled + expect(onRefresh).not.toHaveBeenCalled(); + }); + }); + describe('multi-session isolation', () => { it('stores tokens from different session IDs separately without interference', async () => { const nowSeconds = Math.floor(Date.now() / 1000); diff --git a/packages/clerk-js/src/core/tokenCache.ts b/packages/clerk-js/src/core/tokenCache.ts index 98bfaa25fae..8395a495ce5 100644 --- a/packages/clerk-js/src/core/tokenCache.ts +++ b/packages/clerk-js/src/core/tokenCache.ts @@ -350,6 +350,19 @@ const MemoryTokenCache = (prefix = KEY_PREFIX): TokenCache => { const key = cacheKey.toKey(); + // Clear timers from any existing entry for this key to prevent orphaned + // refresh timers from accumulating across set() calls (e.g., from + // #hydrateCache during _updateClient AND #refreshTokenInBackground). + const existing = cache.get(key); + if (existing) { + if (existing.timeoutId !== undefined) { + clearTimeout(existing.timeoutId); + } + if (existing.refreshTimeoutId !== undefined) { + clearTimeout(existing.refreshTimeoutId); + } + } + const nowSeconds = Math.floor(Date.now() / 1000); const createdAt = entry.createdAt ?? nowSeconds; const value: TokenCacheValue = { createdAt, entry, expiresIn: undefined }; From fd6d1430cd49249c7622bff6cde55eddd8c175fc Mon Sep 17 00:00:00 2001 From: Jacek Date: Tue, 17 Mar 2026 11:42:35 -0500 Subject: [PATCH 2/6] chore: shorten changeset --- .changeset/fix-token-cache-timer-leak.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.changeset/fix-token-cache-timer-leak.md b/.changeset/fix-token-cache-timer-leak.md index 0b95b41d0f4..6b3440a8fa6 100644 --- a/.changeset/fix-token-cache-timer-leak.md +++ b/.changeset/fix-token-cache-timer-leak.md @@ -2,4 +2,4 @@ '@clerk/clerk-js': patch --- -Fix token cache refresh timer leak that caused accelerating token refresh requests. When `set()` was called multiple times for the same cache key (e.g., from `_updateClient` hydration and `#refreshTokenInBackground`), old refresh timers were never cancelled, causing orphaned timers to accumulate. Each leaked timer independently fired `onRefresh`, making the poller appear erratic — especially after `session.touch()` or organization switching. +Fix token cache refresh timer leak that caused accelerating token refresh requests after `session.touch()` or organization switching. From 2dbdc83235d25f2b018e9465ad69a717e35b37cc Mon Sep 17 00:00:00 2001 From: Jacek Date: Tue, 17 Mar 2026 11:49:32 -0500 Subject: [PATCH 3/6] fix: prettier formatting --- .../tests/session-token-cache/refresh-timer-cleanup.test.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts index 35006850b5a..9a2be19c6d5 100644 --- a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts +++ b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts @@ -36,7 +36,10 @@ testAgainstRunningApps({ withEnv: [appConfigs.envs.withEmailCodes] })( await app.teardown(); }); - test('token refresh rate remains stable after session.touch() triggers _updateClient', async ({ page, context }) => { + test('token refresh rate remains stable after session.touch() triggers _updateClient', async ({ + page, + context, + }) => { const u = createTestUtils({ app, page, context }); await u.po.signIn.goTo(); From a0d8003b3dcc934004ed4a334c95b3557a34c99a Mon Sep 17 00:00:00 2001 From: Jacek Date: Tue, 17 Mar 2026 11:53:20 -0500 Subject: [PATCH 4/6] fix(clerk-js): add staleness guard for pending tokenResolver promises --- .../src/core/__tests__/tokenCache.test.ts | 37 +++++++++++++++++++ packages/clerk-js/src/core/tokenCache.ts | 6 +++ 2 files changed, 43 insertions(+) diff --git a/packages/clerk-js/src/core/__tests__/tokenCache.test.ts b/packages/clerk-js/src/core/__tests__/tokenCache.test.ts index 1149a08f49e..f0ba14fae38 100644 --- a/packages/clerk-js/src/core/__tests__/tokenCache.test.ts +++ b/packages/clerk-js/src/core/__tests__/tokenCache.test.ts @@ -1177,6 +1177,43 @@ describe('SessionTokenCache', () => { expect(onRefresh2).toHaveBeenCalledTimes(1); }); + it('does not install timers when a pending tokenResolver resolves after being overwritten', async () => { + const nowSeconds = Math.floor(Date.now() / 1000); + const jwt = createJwtWithTtl(nowSeconds, 60); + const token = new Token({ id: 'stale-pending', jwt, object: 'token' }); + + const onRefreshStale = vi.fn(); + const onRefreshFresh = vi.fn(); + const key = { tokenId: 'stale-pending' }; + + // First set() with a slow-resolving promise + let resolveSlowPromise: (t: TokenResource) => void; + const slowPromise = new Promise(resolve => { + resolveSlowPromise = resolve; + }); + + SessionTokenCache.set({ ...key, tokenResolver: slowPromise, onRefresh: onRefreshStale }); + + // Second set() overwrites the key before the slow promise resolves + SessionTokenCache.set({ + ...key, + tokenResolver: Promise.resolve(token), + onRefresh: onRefreshFresh, + }); + await Promise.resolve(); + + // Now the slow promise resolves — but its entry is stale + resolveSlowPromise!(token); + await Promise.resolve(); + + // Advance past refresh fire time + vi.advanceTimersByTime(44 * 1000); + + // Only the fresh callback should fire; the stale one should be ignored + expect(onRefreshStale).not.toHaveBeenCalled(); + expect(onRefreshFresh).toHaveBeenCalledTimes(1); + }); + it('overwriting with a token that has no onRefresh cancels the old refresh timer', async () => { const nowSeconds = Math.floor(Date.now() / 1000); const jwt = createJwtWithTtl(nowSeconds, 60); diff --git a/packages/clerk-js/src/core/tokenCache.ts b/packages/clerk-js/src/core/tokenCache.ts index 8395a495ce5..7cc9938bde1 100644 --- a/packages/clerk-js/src/core/tokenCache.ts +++ b/packages/clerk-js/src/core/tokenCache.ts @@ -382,6 +382,12 @@ const MemoryTokenCache = (prefix = KEY_PREFIX): TokenCache => { entry.tokenResolver .then(newToken => { + // If this entry was overwritten by a newer set() call while our promise + // was pending, bail out to avoid installing orphaned timers. + if (cache.get(key) !== value) { + return; + } + // Store resolved token for synchronous reads entry.resolvedToken = newToken; From 317a35eb022e85849bd399d2bb8726698e27a6bf Mon Sep 17 00:00:00 2001 From: Jacek Date: Tue, 17 Mar 2026 12:14:40 -0500 Subject: [PATCH 5/6] fix(e2e): reduce integration test timeout for CI --- .../refresh-timer-cleanup.test.ts | 39 +++++++------------ 1 file changed, 13 insertions(+), 26 deletions(-) diff --git a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts index 9a2be19c6d5..bee26346589 100644 --- a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts +++ b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts @@ -10,13 +10,8 @@ import { createTestUtils, testAgainstRunningApps } from '../../testUtils'; * * Background: Every API response that piggybacks client data triggers _updateClient, * which reconstructs Session objects and calls #hydrateCache → SessionTokenCache.set(). - * Additionally, the proactive refresh timer's onRefresh callback also calls set() after - * fetching a new token. Without proper timer cleanup, each set() call would leave the - * previous refresh timer running, causing the effective polling rate to accelerate over - * time as orphaned timers accumulate. - * - * These tests verify that token refresh requests maintain a stable, predictable rate - * even after operations (like touch or org switching) that trigger multiple set() calls. + * Without proper timer cleanup, each set() call would leave the previous refresh timer + * running, causing the effective polling rate to accelerate over time. */ testAgainstRunningApps({ withEnv: [appConfigs.envs.withEmailCodes] })( 'Token refresh timer cleanup @generic', @@ -36,10 +31,7 @@ testAgainstRunningApps({ withEnv: [appConfigs.envs.withEmailCodes] })( await app.teardown(); }); - test('token refresh rate remains stable after session.touch() triggers _updateClient', async ({ - page, - context, - }) => { + test('touch does not cause clustered token refresh requests', async ({ page, context }) => { const u = createTestUtils({ app, page, context }); await u.po.signIn.goTo(); @@ -53,34 +45,29 @@ testAgainstRunningApps({ withEnv: [appConfigs.envs.withEmailCodes] })( await route.continue(); }); - // Trigger multiple touch() calls to simulate the scenario where _updateClient - // causes Session reconstruction → #hydrateCache → set(), potentially leaking timers - for (let i = 0; i < 3; i++) { + // Trigger multiple touch() calls — each causes _updateClient → Session constructor + // → #hydrateCache → set(), which previously leaked orphaned refresh timers + for (let i = 0; i < 5; i++) { await page.evaluate(async () => { await (window as any).Clerk?.session?.touch(); }); } - // Wait long enough for multiple refresh cycles (tokens have ~60s TTL, - // refresh fires at ~43s). We wait 90s to see at least 2 refresh cycles. + // Wait 50s — enough for one refresh cycle (~43s) but not two // eslint-disable-next-line playwright/no-wait-for-timeout - await page.waitForTimeout(90_000); + await page.waitForTimeout(50_000); await page.unrouteAll(); - // With the fix: ~2-3 refresh requests over 90s (one every ~43s) - // Without the fix: 6+ requests as orphaned timers from each touch() accumulate - // (3 touch calls × 2 set() calls each = 6 timer chains all firing independently) - // - // We allow up to 5 to account for the initial token fetch + normal refresh cycles - expect(tokenRequests.length).toBeLessThanOrEqual(5); + // With the fix: at most 1-2 refresh requests (one cycle at ~43s) + // Without the fix: 5+ requests from orphaned timers all firing at different offsets + expect(tokenRequests.length).toBeLessThanOrEqual(3); - // Verify requests are reasonably spaced (not clustering together) + // If multiple requests occurred, verify they aren't clustered together + // (clustering = orphaned timers firing near-simultaneously) if (tokenRequests.length >= 2) { for (let i = 1; i < tokenRequests.length; i++) { const gap = tokenRequests[i] - tokenRequests[i - 1]; - // Each gap should be at least 10 seconds — if timers were accumulating, - // we'd see requests firing within seconds of each other expect(gap).toBeGreaterThan(10_000); } } From 1c01b01f69583a5b2511b5074eaac15173b9b638 Mon Sep 17 00:00:00 2001 From: Jacek Date: Tue, 17 Mar 2026 12:55:30 -0500 Subject: [PATCH 6/6] fix(e2e): increase test timeout to 120s for refresh timer test --- .../tests/session-token-cache/refresh-timer-cleanup.test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts index bee26346589..485ac39e383 100644 --- a/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts +++ b/integration/tests/session-token-cache/refresh-timer-cleanup.test.ts @@ -32,6 +32,7 @@ testAgainstRunningApps({ withEnv: [appConfigs.envs.withEmailCodes] })( }); test('touch does not cause clustered token refresh requests', async ({ page, context }) => { + test.setTimeout(120_000); const u = createTestUtils({ app, page, context }); await u.po.signIn.goTo();