From b7ba8fed2289edbae6c8f7ef4f7c54a7e86deef5 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 14 May 2026 18:44:18 -0300 Subject: [PATCH 1/2] fix(sequencer): use targetSlot in tryVoteWhenEscapeHatchOpen under pipelining MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit tryVoteWhenEscapeHatchOpen constructed CheckpointVoter with the wall-clock slot and called publisher.sendRequestsAt(slot). Under proposer pipelining we are the elected proposer for slot + 1 (targetSlot), and the multicall is expected to mine in that slot. Signing for the wall-clock slot makes the L1 contract's EIP-712 digest mismatch and the require msg.sender == getCurrentProposer() check fail, because the wall-clock slot's proposer is someone else. The whole multicall reverts silently inside Multicall3 and every governance/slashing entry is dropped. Thread targetSlot through and use it for both CheckpointVoter (which binds the EIP-712 signature) and publisher.sendRequestsAt (which delays submission so the tx mines in targetSlot). Mirrors tryVoteWhenSyncFails and CheckpointProposalJob.execute. When pipelining is disabled targetSlot equals slot, so sendRequestsAt resolves with no extra sleep and the legacy behaviour is preserved. Unblocks e2e_sequencer/escape_hatch_vote_only and the "should vote even when unable to build blocks" case in e2e_sequencer/gov_proposal.parallel under pipelining (these were skipped in #23275 pending this fix). See PIPELINING_TEST_STATUS.md §6 B5. --- .../src/sequencer/sequencer.ts | 34 +++++++++++++------ 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 83f9cb174b97..6dda210dad1a 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -312,7 +312,7 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter ({ [Attributes.SLOT_NUMBER]: slot })) protected async tryVoteWhenEscapeHatchOpen(args: { slot: SlotNumber; + targetSlot: SlotNumber; proposer: EthAddress | undefined; }): Promise { - const { slot, proposer } = args; + const { slot, targetSlot, proposer } = args; // Prevent duplicate attempts in the same slot if (this.lastSlotForFallbackVote === slot) { @@ -898,10 +899,19 @@ export class Sequencer extends (EventEmitter as new () => TypedEventEmitter TypedEventEmitter { + this.log.error(`Failed to publish escape-hatch votes for slot ${slot}`, err, { slot, targetSlot }); + }); } /** From 00e851bcb40f12fc255b5b66f5f4e2e50dfc2fba Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 14 May 2026 18:46:24 -0300 Subject: [PATCH 2/2] test(e2e): re-enable escape_hatch_vote_only under pipelining MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Exercises the §6 B5 source-level fix (`tryVoteWhenEscapeHatchOpen` now signs the CheckpointVoter for `targetSlot` and submits via `sendRequestsAt(targetSlot)`). Without the fix, governance signals cast during the escape-hatch window would fail signature verification inside Multicall3 and the test's `finalStats.votes >= slotsPassed` assertion would fail. Test-side adjustments needed for the pipelined timing model: - Move event listener attachment to after the warp into the escape-hatch epoch. Checkpoint proposals in flight at warp time fail their L1 propose (their target slot is now in the past after the L1 timestamp jump) and we don't want to count those setup-warp artifacts as escape-hatch failures. Also filter remaining events to those at or after `initialStats.slot` for the same reason. - Snapshot `slotAtMeasurement` for the vote-count lower bound, then wait for the L1 slot to advance two more so the trailing vote (signed in build slot N for target slot N+1) has time to mine before we count. Without this drain, `finalStats.votes` consistently lags `slotsPassed` by 1-2 under pipelining. Inlines `enableProposerPipelining: true` and `inboxLag: 2` directly in the test rather than importing a shared `PIPELINING_SETUP_OPTS` preset, since this is the only pipelined test on this branch. --- .../escape_hatch_vote_only.test.ts | 78 ++++++++++++------- 1 file changed, 51 insertions(+), 27 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_sequencer/escape_hatch_vote_only.test.ts b/yarn-project/end-to-end/src/e2e_sequencer/escape_hatch_vote_only.test.ts index 401c4159176b..d235815b15ff 100644 --- a/yarn-project/end-to-end/src/e2e_sequencer/escape_hatch_vote_only.test.ts +++ b/yarn-project/end-to-end/src/e2e_sequencer/escape_hatch_vote_only.test.ts @@ -75,6 +75,10 @@ describe('e2e_escape_hatch_vote_only', () => { minTxsPerBlock: 0, enforceTimeTable: true, automineL1Setup: true, + // Pipelining opts — exercise the §6 B5 fix (tryVoteWhenEscapeHatchOpen signing/submitting for targetSlot). + // inboxLag: 2 so the sequencer sources L1->L2 messages from a sealed checkpoint when building for slot+1. + enableProposerPipelining: true, + inboxLag: 2, }); ({ @@ -142,19 +146,38 @@ describe('e2e_escape_hatch_vote_only', () => { afterEach(() => teardown()); it('casts governance signals and advances checkpoints while escape hatch is closed', async () => { + const sequencer = sequencerClient!.getSequencer(); + // Enable voting from the sequencer. await aztecNodeAdmin!.setConfig({ governanceProposerPayload: newGovernanceProposerPayloadAddress, minTxsPerBlock: 0, }); - // Set up event listeners to track sequencer behavior + // We need to set it for hatch 1, and then make a time jump. We do this such that we don't pollute the epoch cache. + // The warp must happen before we attach failure-event listeners, because any checkpoint proposal in flight at warp + // time will fail (its propose tx becomes invalid after the L1 timestamp jump) — that is a test-setup artifact, not + // a behavior we are asserting on. + if (OPEN_THE_HATCH) { + await ethCheatCodes.store( + await rollup.getEscapeHatchAddress(), + ethCheatCodes.keccak256(BigInt(EscapeHatchStorage.find(s => s.label === '$designatedProposer')!.slot), 1n), + escapeHatchProposerAddress.toField().toBigInt(), + ); + expect(await rollup.isEscapeHatchOpen(EpochNumber(Number(ESCAPE_HATCH_FREQUENCY)))).toBeTruthy(); + + logger.info(`Advancing to epoch ${ESCAPE_HATCH_FREQUENCY}`); + + await cheatCodes.rollup.advanceToEpoch(EpochNumber(Number(ESCAPE_HATCH_FREQUENCY)), { + offset: -ETHEREUM_SLOT_DURATION, + }); + } + + // Set up event listeners to track sequencer behavior during the vote-only window const failEvents: Array<{ type: keyof SequencerEvents; args: any }> = []; const blockProposedEvents: Array<{ blockNumber: any; slot: any }> = []; const checkpointPublishedEvents: Array<{ checkpoint: any; slot: any }> = []; - const sequencer = sequencerClient!.getSequencer(); - // Track failure events that indicate problems const failEventTypes: (keyof SequencerEvents)[] = [ 'block-build-failed', @@ -192,22 +215,6 @@ describe('e2e_escape_hatch_vote_only', () => { logger.warn(`Sequencer published checkpoint when escape hatch should be open`, args); }); - // We need to set it for hatch 1, and then make a time jump. We do this such that we don't pollute the epoch cache - if (OPEN_THE_HATCH) { - await ethCheatCodes.store( - await rollup.getEscapeHatchAddress(), - ethCheatCodes.keccak256(BigInt(EscapeHatchStorage.find(s => s.label === '$designatedProposer')!.slot), 1n), - escapeHatchProposerAddress.toField().toBigInt(), - ); - expect(await rollup.isEscapeHatchOpen(EpochNumber(Number(ESCAPE_HATCH_FREQUENCY)))).toBeTruthy(); - - logger.info(`Advancing to epoch ${ESCAPE_HATCH_FREQUENCY}`); - - await cheatCodes.rollup.advanceToEpoch(EpochNumber(Number(ESCAPE_HATCH_FREQUENCY)), { - offset: -ETHEREUM_SLOT_DURATION, - }); - } - const getStats = async () => ({ slot: await rollup.getSlotNumber(), epoch: await rollup.getEpochNumberForSlotNumber(await rollup.getSlotNumber()), @@ -229,20 +236,37 @@ describe('e2e_escape_hatch_vote_only', () => { 1, ); - const finalStats = await getStats(); - - // Due to the the stats not being pulled at the same time, a vote could land after the slot is fetched, but before the votes are. - // Therefore, we use the slots passed as the lower bound. - const slotsPassed = finalStats.slot - initialStats.slot; + // Snapshot the slot we will assert against now; under proposer pipelining the sequencer signs a vote in build + // slot N for target slot N+1 and submits it at the start of N+1, so the votes corresponding to slots up through + // `slotAtMeasurement` lag the current slot by one. Wait for the L1 slot to advance one more so the last + // in-flight vote (signed for `slotAtMeasurement`) has time to mine before we count votes. + const slotAtMeasurement = await rollup.getSlotNumber(); + const slotsPassed = slotAtMeasurement - initialStats.slot; expect(slotsPassed).toBeGreaterThan(0); + const drainTarget = slotAtMeasurement + 2; + await retryUntil( + () => rollup.getSlotNumber().then(s => s >= drainTarget), + 'pipelined vote drain', + AZTEC_SLOT_DURATION * 4, + 1, + ); + + const finalStats = await getStats(); expect(finalStats.votes - initialStats.votes).toBeGreaterThanOrEqual(slotsPassed); if (OPEN_THE_HATCH) { expect(finalStats.pending - initialStats.pending).toBe(0); // When escape hatch is open, sequencer should only vote, not build blocks nor checkpoints, but there should also be no failures. - expect(blockProposedEvents).toEqual([]); - expect(failEvents).toEqual([]); - expect(checkpointPublishedEvents).toEqual([]); + // Filter out events corresponding to pre-warp slots — they are checkpoint proposals that were in flight when + // the test warped past their target slot and whose L1 propose tx then fails. That's a setup artifact of the + // warp, not behavior we are asserting on in the vote-only window. + const inVoteOnlyWindow = (e: T) => { + const slotValue = (e as any).slot ?? (e as any).args?.slot; + return slotValue === undefined || Number(slotValue) >= Number(initialStats.slot); + }; + expect(blockProposedEvents.filter(inVoteOnlyWindow)).toEqual([]); + expect(failEvents.filter(inVoteOnlyWindow)).toEqual([]); + expect(checkpointPublishedEvents.filter(inVoteOnlyWindow)).toEqual([]); } else { expect(finalStats.pending - initialStats.pending).toBeGreaterThanOrEqual(slotsPassed); }