diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 594fb8fb062..84c9f43dc7e 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -145,10 +145,21 @@ export class Archiver implements ArchiveSource { await this.sync(blockUntilSynced); } - this.runningPromise = new RunningPromise(() => this.sync(false), this.pollingIntervalMs); + this.runningPromise = new RunningPromise(() => this.safeSync(), this.pollingIntervalMs); this.runningPromise.start(); } + /** + * Syncs and catches exceptions. + */ + private async safeSync() { + try { + await this.sync(false); + } catch (error) { + this.log.error('Error syncing archiver', error); + } + } + /** * Fetches logs from L1 contracts and processes them. * @param blockUntilSynced - If true, blocks until the archiver has fully synced. @@ -166,10 +177,14 @@ export class Archiver implements ArchiveSource { * * This code does not handle reorgs. */ - const { blocksSynchedTo, messagesSynchedTo } = await this.store.getSynchPoint(); + const { blockBodiesSynchedTo, blocksSynchedTo, messagesSynchedTo } = await this.store.getSynchPoint(); const currentL1BlockNumber = await this.publicClient.getBlockNumber(); - if (currentL1BlockNumber <= blocksSynchedTo && currentL1BlockNumber <= messagesSynchedTo) { + if ( + currentL1BlockNumber <= blocksSynchedTo && + currentL1BlockNumber <= messagesSynchedTo && + currentL1BlockNumber <= blockBodiesSynchedTo + ) { // chain hasn't moved forward // or it's been rolled back this.log.debug(`Nothing to sync`, { currentL1BlockNumber, blocksSynchedTo, messagesSynchedTo }); @@ -220,16 +235,19 @@ export class Archiver implements ArchiveSource { // Read all data from chain and then write to our stores at the end const nextExpectedL2BlockNum = BigInt((await this.store.getSynchedL2BlockNumber()) + 1); + this.log.debug(`Retrieving block bodies from ${blockBodiesSynchedTo + 1n} to ${currentL1BlockNumber}`); const retrievedBlockBodies = await retrieveBlockBodiesFromAvailabilityOracle( this.publicClient, this.availabilityOracleAddress, blockUntilSynced, - blocksSynchedTo + 1n, + blockBodiesSynchedTo + 1n, currentL1BlockNumber, ); - const blockBodies = retrievedBlockBodies.retrievedData.map(([blockBody]) => blockBody); - await this.store.addBlockBodies(blockBodies); + this.log.debug( + `Retrieved ${retrievedBlockBodies.retrievedData.length} block bodies up to L1 block ${retrievedBlockBodies.lastProcessedL1BlockNumber}`, + ); + await this.store.addBlockBodies(retrievedBlockBodies); // Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and // the metadata @@ -237,6 +255,7 @@ export class Archiver implements ArchiveSource { { // @todo @LHerskind Investigate how necessary that nextExpectedL2BlockNum really is. // Also, I would expect it to break horribly if we have a reorg. + this.log.debug(`Retrieving block metadata from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup( this.publicClient, this.rollupAddress, @@ -278,17 +297,18 @@ export class Archiver implements ArchiveSource { } and ${currentL1BlockNumber}.`, ); - // Set the `lastProcessedL1BlockNumber` to the smallest of the header and body retrieval - const min = (a: bigint, b: bigint) => (a < b ? a : b); retrievedBlocks = { - lastProcessedL1BlockNumber: min( - retrievedBlockMetadata.lastProcessedL1BlockNumber, - retrievedBlockBodies.lastProcessedL1BlockNumber, - ), + lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber, retrievedData: blocks, }; } + this.log.debug( + `Processing retrieved blocks ${retrievedBlocks.retrievedData + .map(b => b.number) + .join(',')} with last processed L1 block ${retrievedBlocks.lastProcessedL1BlockNumber}`, + ); + await Promise.all( retrievedBlocks.retrievedData.map(block => { const noteEncryptedLogs = block.body.noteEncryptedLogs; diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index d2dcd7ae9b1..a9de126ae78 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -30,8 +30,10 @@ import { type DataRetrieval } from './data_retrieval.js'; * Represents the latest L1 block processed by the archiver for various objects in L2. */ export type ArchiverL1SynchPoint = { - /** Number of the last L1 block that added a new L2 block. */ + /** Number of the last L1 block that added a new L2 block metadata. */ blocksSynchedTo: bigint; + /** Number of the last L1 block that added a new L2 block body. */ + blockBodiesSynchedTo: bigint; /** Number of the last L1 block that added L1 -> L2 messages from the Inbox. */ messagesSynchedTo: bigint; }; @@ -53,7 +55,7 @@ export interface ArchiverDataStore { * @param blockBodies - The L2 block bodies to be added to the store. * @returns True if the operation is successful. */ - addBlockBodies(blockBodies: Body[]): Promise; + addBlockBodies(blockBodies: DataRetrieval): Promise; /** * Gets block bodies that have the same txsEffectsHashes as we supply. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 3c306e30327..f523cd6a0a7 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -1,4 +1,4 @@ -import { InboxLeaf, L2Block, LogId, LogType, TxHash } from '@aztec/circuit-types'; +import { type Body, InboxLeaf, L2Block, LogId, LogType, TxHash } from '@aztec/circuit-types'; import '@aztec/circuit-types/jest'; import { AztecAddress, Fr, INITIAL_L2_BLOCK_NUM, L1_TO_L2_MSG_SUBTREE_HEIGHT } from '@aztec/circuits.js'; import { @@ -14,7 +14,7 @@ import { SerializableContractInstance, } from '@aztec/types/contracts'; -import { type ArchiverDataStore } from './archiver_store.js'; +import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; import { type DataRetrieval } from './data_retrieval.js'; /** @@ -25,6 +25,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe(testName, () => { let store: ArchiverDataStore; let blocks: DataRetrieval; + let blockBodies: DataRetrieval; const blockTests: [number, number, () => L2Block[]][] = [ [1, 1, () => blocks.retrievedData.slice(0, 1)], [10, 1, () => blocks.retrievedData.slice(9, 10)], @@ -39,11 +40,15 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch lastProcessedL1BlockNumber: 5n, retrievedData: Array.from({ length: 10 }).map((_, i) => L2Block.random(i + 1)), }; + blockBodies = { + retrievedData: blocks.retrievedData.map(block => block.body), + lastProcessedL1BlockNumber: 4n, + }; }); describe('addBlocks', () => { it('returns success when adding block bodies', async () => { - await expect(store.addBlockBodies(blocks.retrievedData.map(block => block.body))).resolves.toBe(true); + await expect(store.addBlockBodies(blockBodies)).resolves.toBe(true); }); it('returns success when adding blocks', async () => { @@ -59,7 +64,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getBlocks', () => { beforeEach(async () => { await store.addBlocks(blocks); - await store.addBlockBodies(blocks.retrievedData.map(block => block.body)); + await store.addBlockBodies(blockBodies); }); it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => { @@ -95,7 +100,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await expect(store.getSynchPoint()).resolves.toEqual({ blocksSynchedTo: 0n, messagesSynchedTo: 0n, - }); + blockBodiesSynchedTo: 0n, + } satisfies ArchiverL1SynchPoint); }); it('returns the L1 block number in which the most recent L2 block was published', async () => { @@ -103,7 +109,17 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await expect(store.getSynchPoint()).resolves.toEqual({ blocksSynchedTo: blocks.lastProcessedL1BlockNumber, messagesSynchedTo: 0n, - }); + blockBodiesSynchedTo: 0n, + } satisfies ArchiverL1SynchPoint); + }); + + it('returns the L1 block number in which the most recent L2 block body was published', async () => { + await store.addBlockBodies(blockBodies); + await expect(store.getSynchPoint()).resolves.toEqual({ + blocksSynchedTo: 0n, + messagesSynchedTo: 0n, + blockBodiesSynchedTo: blockBodies.lastProcessedL1BlockNumber, + } satisfies ArchiverL1SynchPoint); }); it('returns the L1 block number that most recently added messages from inbox', async () => { @@ -114,7 +130,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await expect(store.getSynchPoint()).resolves.toEqual({ blocksSynchedTo: 0n, messagesSynchedTo: 1n, - }); + blockBodiesSynchedTo: 0n, + } satisfies ArchiverL1SynchPoint); }); }); @@ -179,7 +196,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch ), ); await store.addBlocks(blocks); - await store.addBlockBodies(blocks.retrievedData.map(block => block.body)); + await store.addBlockBodies(blockBodies); }); it.each([ @@ -335,7 +352,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }; await store.addBlocks(blocks); - await store.addBlockBodies(blocks.retrievedData.map(block => block.body)); + await store.addBlockBodies(blockBodies); await Promise.all( blocks.retrievedData.map(block => diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index e39e9daff80..dce7a224dfb 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,6 +1,7 @@ import { type Body, type InboxLeaf } from '@aztec/circuit-types'; import { type AppendOnlyTreeSnapshot, Fr, type Header } from '@aztec/circuits.js'; import { type EthAddress } from '@aztec/foundation/eth-address'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; import { type PublicClient, getAbiItem } from 'viem'; @@ -45,6 +46,7 @@ export async function retrieveBlockMetadataFromRollup( searchStartBlock: bigint, searchEndBlock: bigint, expectedNextL2BlockNum: bigint, + logger: DebugLogger = createDebugLogger('aztec:archiver'), ): Promise> { const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; do { @@ -61,13 +63,18 @@ export async function retrieveBlockMetadataFromRollup( break; } + const lastLog = l2BlockProcessedLogs[l2BlockProcessedLogs.length - 1]; + logger.debug( + `Got L2 block processed logs for ${l2BlockProcessedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`, + ); + const newBlockMetadata = await processL2BlockProcessedLogs( publicClient, expectedNextL2BlockNum, l2BlockProcessedLogs, ); retrievedBlockMetadata.push(...newBlockMetadata); - searchStartBlock = l2BlockProcessedLogs[l2BlockProcessedLogs.length - 1].blockNumber! + 1n; + searchStartBlock = lastLog.blockNumber! + 1n; expectedNextL2BlockNum += BigInt(newBlockMetadata.length); } while (blockUntilSynced && searchStartBlock <= searchEndBlock); return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedBlockMetadata }; @@ -80,7 +87,7 @@ export async function retrieveBlockMetadataFromRollup( * @param blockUntilSynced - If true, blocks until the archiver has fully synced. * @param searchStartBlock - The block number to use for starting the search. * @param searchEndBlock - The highest block number that we should search up to. - * @returns A array of tuples of L2 block bodies and their associated hash as well as the next eth block to search from + * @returns A array of L2 block bodies as well as the next eth block to search from */ export async function retrieveBlockBodiesFromAvailabilityOracle( publicClient: PublicClient, @@ -88,8 +95,8 @@ export async function retrieveBlockBodiesFromAvailabilityOracle( blockUntilSynced: boolean, searchStartBlock: bigint, searchEndBlock: bigint, -): Promise> { - const retrievedBlockBodies: [Body, Buffer][] = []; +): Promise> { + const retrievedBlockBodies: Body[] = []; do { if (searchStartBlock > searchEndBlock) { @@ -106,9 +113,10 @@ export async function retrieveBlockBodiesFromAvailabilityOracle( } const newBlockBodies = await processTxsPublishedLogs(publicClient, l2TxsPublishedLogs); - retrievedBlockBodies.push(...newBlockBodies); - searchStartBlock = l2TxsPublishedLogs[l2TxsPublishedLogs.length - 1].blockNumber! + 1n; + retrievedBlockBodies.push(...newBlockBodies.map(([body]) => body)); + searchStartBlock = l2TxsPublishedLogs[l2TxsPublishedLogs.length - 1].blockNumber + 1n; } while (blockUntilSynced && searchStartBlock <= searchEndBlock); + return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedBlockBodies }; } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.test.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.test.ts index f267eaef5f6..f8cd220e109 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.test.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.test.ts @@ -13,12 +13,14 @@ describe('Block Body Store', () => { it('Should add and return block bodies', async () => { const body = Body.random(1); - await archiverStore.addBlockBodies([body]); + await archiverStore.addBlockBodies({ retrievedData: [body], lastProcessedL1BlockNumber: 5n }); const txsEffectsHash = body.getTxsEffectsHash(); const [returnedBody] = await archiverStore.getBlockBodies([txsEffectsHash]); - expect(body).toStrictEqual(returnedBody); + + const { blockBodiesSynchedTo } = await archiverStore.getSynchPoint(); + expect(blockBodiesSynchedTo).toEqual(5n); }); }); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts index a34317045cc..006e389b267 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts @@ -1,13 +1,19 @@ import { Body } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; + +import { type DataRetrieval } from '../data_retrieval.js'; export class BlockBodyStore { /** Map block body hash to block body */ #blockBodies: AztecMap; + /** Stores L1 block number in which the last processed L2 block body was included */ + #lastSynchedL1Block: AztecSingleton; + constructor(private db: AztecKVStore, private log = createDebugLogger('aztec:archiver:block_body_store')) { this.#blockBodies = db.openMap('archiver_block_bodies'); + this.#lastSynchedL1Block = db.openSingleton('archiver_block_bodies_last_synched_l1_block'); } /** @@ -15,12 +21,12 @@ export class BlockBodyStore { * @param blockBodies - The L2 block bodies to be added to the store. * @returns True if the operation is successful. */ - addBlockBodies(blockBodies: Body[]): Promise { + addBlockBodies(blockBodies: DataRetrieval): Promise { return this.db.transaction(() => { - for (const body of blockBodies) { + for (const body of blockBodies.retrievedData) { void this.#blockBodies.set(body.getTxsEffectsHash().toString('hex'), body.toBuffer()); } - + void this.#lastSynchedL1Block.set(blockBodies.lastProcessedL1BlockNumber); return true; }); } @@ -57,4 +63,12 @@ export class BlockBodyStore { return blockBody && Body.fromBuffer(blockBody); } + + /** + * Gets the last L1 block number in which a L2 block body was included + * @returns The L1 block number + */ + getSynchedL1BlockNumber(): bigint { + return this.#lastSynchedL1Block.get() ?? 0n; + } } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index b5bee9f274e..2779d54579a 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -101,7 +101,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param blockBodies - The L2 block bodies to be added to the store. * @returns True if the operation is successful. */ - addBlockBodies(blockBodies: Body[]): Promise { + addBlockBodies(blockBodies: DataRetrieval): Promise { return this.#blockBodyStore.addBlockBodies(blockBodies); } @@ -260,6 +260,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { getSynchPoint(): Promise { return Promise.resolve({ blocksSynchedTo: this.#blockStore.getSynchedL1BlockNumber(), + blockBodiesSynchedTo: this.#blockBodyStore.getSynchedL1BlockNumber(), messagesSynchedTo: this.#messageStore.getSynchedL1BlockNumber(), }); } diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index cd6bc3278fb..d9a2d5afc0b 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -83,6 +83,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { private contractInstances: Map = new Map(); private lastL1BlockNewBlocks: bigint = 0n; + private lastL1BlockNewBlockBodies: bigint = 0n; private lastL1BlockNewMessages: bigint = 0n; private lastProvenL2BlockNumber: number = 0; @@ -163,11 +164,11 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @param blockBodies - The L2 block bodies to be added to the store. * @returns True if the operation is successful. */ - addBlockBodies(blockBodies: Body[]): Promise { - for (const body of blockBodies) { + addBlockBodies(blockBodies: DataRetrieval): Promise { + for (const body of blockBodies.retrievedData) { void this.l2BlockBodies.set(body.getTxsEffectsHash().toString('hex'), body); } - + this.lastL1BlockNewBlockBodies = blockBodies.lastProcessedL1BlockNumber; return Promise.resolve(true); } @@ -443,6 +444,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve({ blocksSynchedTo: this.lastL1BlockNewBlocks, messagesSynchedTo: this.lastL1BlockNewMessages, + blockBodiesSynchedTo: this.lastL1BlockNewBlockBodies, }); } diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index f8e04a0a06f..efb61cd3e49 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -233,6 +233,7 @@ export class L2Block { return { txCount: this.body.txEffects.length, blockNumber: this.number, + blockTimestamp: this.header.globalVariables.timestamp.toNumber(), ...logsStats, }; } diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 74696ddeca7..b4c705a48cb 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -11,6 +11,7 @@ import { type PXE, type Wallet, deriveKeys, + retryUntil, } from '@aztec/aztec.js'; import { times } from '@aztec/foundation/collection'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; @@ -316,6 +317,17 @@ describe('e2e_block_building', () => { } }); + // Regression for https://github.com/AztecProtocol/aztec-packages/issues/7918 + it('publishes two blocks with only padding txs', async () => { + ({ teardown, pxe, logger, aztecNode } = await setup(0, { + minTxsPerBlock: 0, + sequencerSkipSubmitProofs: true, + skipProtocolContracts: true, + })); + + await retryUntil(async () => (await aztecNode.getBlockNumber()) >= 3, 'wait-block', 10, 1); + }); + // Regression for https://github.com/AztecProtocol/aztec-packages/issues/7537 it('sends a tx on the first block', async () => { ({ teardown, pxe, logger, aztecNode } = await setup(0, { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index ffa1d5d8fb5..582ff82edd6 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -201,11 +201,14 @@ export class Sequencer { const lastBlockTime = historicalHeader?.globalVariables.timestamp.toNumber() || 0; const currentTime = Math.floor(Date.now() / 1000); const elapsedSinceLastBlock = currentTime - lastBlockTime; + this.log.debug( + `Last block mined at ${lastBlockTime} current time is ${currentTime} (elapsed ${elapsedSinceLastBlock})`, + ); // Do not go forward with new block if not enough time has passed since last block if (this.minSecondsBetweenBlocks > 0 && elapsedSinceLastBlock < this.minSecondsBetweenBlocks) { this.log.debug( - `Not creating block because not enough time has passed since last block (last block at ${lastBlockTime} current time ${currentTime})`, + `Not creating block because not enough time ${this.minSecondsBetweenBlocks} has passed since last block`, ); return; } @@ -334,7 +337,7 @@ export class Sequencer { // less txs than the minimum. But that'd cause the entire block to be aborted and retried. Instead, we should // go back to the p2p pool and load more txs until we hit our minTxsPerBLock target. Only if there are no txs // we should bail. - if (processedTxs.length === 0 && !this.skipMinTxsPerBlockCheck(elapsedSinceLastBlock)) { + if (processedTxs.length === 0 && !this.skipMinTxsPerBlockCheck(elapsedSinceLastBlock) && this.minTxsPerBLock > 0) { this.log.verbose('No txs processed correctly to build block. Exiting'); prover.cancelBlock(); return;