Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: store blockhash alongside blocks #3950

Merged
merged 5 commits into from
Jan 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 30 additions & 27 deletions yarn-project/archiver/src/archiver/lmdb_archiver_store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ type BlockIndexValue = [blockNumber: number, index: number];

type BlockContext = {
block?: Uint8Array;
blockHash?: Uint8Array;
l1BlockNumber?: Uint8Array;
encryptedLogs?: Uint8Array;
unencryptedLogs?: Uint8Array;
Expand Down Expand Up @@ -125,6 +126,7 @@ export class LMDBArchiverStore implements ArchiverDataStore {
const blockCtx = this.#tables.blocks.get(block.number) ?? {};
blockCtx.block = block.toBuffer();
blockCtx.l1BlockNumber = toBufferBE(block.getL1BlockNumber(), 32);
blockCtx.blockHash = block.getBlockHash();

// no need to await, all writes are enqueued in the transaction
// awaiting would interrupt the execution flow of this callback and "leak" the transaction to some other part
Expand Down Expand Up @@ -163,15 +165,10 @@ export class LMDBArchiverStore implements ArchiverDataStore {
.getRange(this.#computeBlockRange(start, limit))
.filter(({ value }) => value.block)
.map(({ value }) => {
const block = L2Block.fromBuffer(asBuffer(value.block!));
if (value.encryptedLogs) {
block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(value.encryptedLogs)), LogType.ENCRYPTED);
}

if (value.unencryptedLogs) {
block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(value.unencryptedLogs)), LogType.UNENCRYPTED);
}

const block = L2Block.fromBuffer(
asBuffer(value.block!),
value.blockHash ? asBuffer(value.blockHash) : undefined,
);
Comment on lines -166 to +171
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm worried that this change could mean that some consumers of L2Blocks will no longer be receiving their blocks. WDYT about having L2Block and L2BlockWithLogs as two separate interfaces, so it's absolutely clear when we are moving around an object with logs, and when it doesn't?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not having an interface for L2Block is strange to me since it seems a major point of interaction for clients. Having two separate interfaces for blocks with/without logs feels like it would get painful as well. Maybe just:

interface L2Block {
    ...
    newEncryptedLogs?: L2BlockL2Logs,
    ...
}
type L2BlockWithLogs = L2Block & { newEncryptedLogs: L2BlockL2Logs };
type L2BlockWithoutLogs = L2Block & { newEncryptedLogs: undefined };

then in the class implementation of L2Block.

hasLogs(): this is L2BlockWithLogs {
    return this.newEncryptedLogs !== undefined
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree with both of you, but changes are coming to this section of the code to bring it inline with the Yello paper and I'd prefer to wait for them to land before touching it more than needed if that's alright :)

return block;
}).asArray;

Expand All @@ -193,7 +190,7 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return Promise.resolve(undefined);
}

const block = this.#getBlock(blockNumber, true);
const block = this.#getBlock(blockNumber);
return Promise.resolve(block?.getTx(txIndex));
}

Expand Down Expand Up @@ -415,18 +412,18 @@ export class LMDBArchiverStore implements ArchiverDataStore {
getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
try {
if (filter.afterLog) {
return Promise.resolve(this.#filterLogsBetweenBlocks(filter));
return Promise.resolve(this.#filterUnencryptedLogsBetweenBlocks(filter));
} else if (filter.txHash) {
return Promise.resolve(this.#filterLogsOfTx(filter));
return Promise.resolve(this.#filterUnencryptedLogsOfTx(filter));
} else {
return Promise.resolve(this.#filterLogsBetweenBlocks(filter));
return Promise.resolve(this.#filterUnencryptedLogsBetweenBlocks(filter));
}
} catch (err) {
return Promise.reject(err);
}
}

#filterLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse {
#filterUnencryptedLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse {
if (!filter.txHash) {
throw new Error('Missing txHash');
}
Expand All @@ -436,19 +433,16 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return { logs: [], maxLogsHit: false };
}

const block = this.#getBlock(blockNumber, true);
if (!block || !block.newUnencryptedLogs) {
return { logs: [], maxLogsHit: false };
}
const unencryptedLogsInBlock = this.#getBlockLogs(blockNumber, LogType.UNENCRYPTED);
const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log));

const txLogs = block.newUnencryptedLogs.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log));
const logs: ExtendedUnencryptedL2Log[] = [];
const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);

return { logs, maxLogsHit };
}

#filterLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse {
#filterUnencryptedLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse {
const start =
filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM);
const end = filter.toBlock;
Expand All @@ -466,12 +460,7 @@ export class LMDBArchiverStore implements ArchiverDataStore {
let maxLogsHit = false;

loopOverBlocks: for (const blockNumber of blockNumbers) {
const block = this.#getBlock(blockNumber, true);
if (!block || !block.newUnencryptedLogs) {
continue;
}

const unencryptedLogsInBlock = block.newUnencryptedLogs;
const unencryptedLogsInBlock = this.#getBlockLogs(blockNumber, LogType.UNENCRYPTED);
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < unencryptedLogsInBlock.txLogs.length; txIndex++) {
const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log));
maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter);
Expand Down Expand Up @@ -632,7 +621,10 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return undefined;
}

const block = L2Block.fromBuffer(asBuffer(blockCtx.block));
const block = L2Block.fromBuffer(
asBuffer(blockCtx.block),
blockCtx.blockHash ? asBuffer(blockCtx.blockHash) : undefined,
);

if (withLogs) {
if (blockCtx.encryptedLogs) {
Expand All @@ -647,6 +639,17 @@ export class LMDBArchiverStore implements ArchiverDataStore {
return block;
}

#getBlockLogs(blockNumber: number, logType: LogType): L2BlockL2Logs {
const blockCtx = this.#tables.blocks.get(blockNumber);
const logs = blockCtx?.[logType === LogType.ENCRYPTED ? 'encryptedLogs' : 'unencryptedLogs'];

if (!logs) {
return new L2BlockL2Logs([]);
}

return L2BlockL2Logs.fromBuffer(asBuffer(logs));
}

#computeBlockRange(start: number, limit: number): Required<Pick<RangeOptions, 'start' | 'end'>> {
if (limit < 1) {
throw new Error(`Invalid limit: ${limit}`);
Expand Down
28 changes: 16 additions & 12 deletions yarn-project/circuit-types/src/l2_block.ts
Original file line number Diff line number Diff line change
Expand Up @@ -321,9 +321,10 @@ export class L2Block {
/**
* Deserializes L2 block without logs from a buffer.
* @param buf - A serialized L2 block.
* @param blockHash - The hash of the block.
* @returns Deserialized L2 block.
*/
static fromBuffer(buf: Buffer | BufferReader) {
static fromBuffer(buf: Buffer | BufferReader, blockHash?: Buffer) {
const reader = BufferReader.asReader(buf);
const globalVariables = reader.readObject(GlobalVariables);
// TODO(#3938): update the encoding here
Expand Down Expand Up @@ -358,17 +359,20 @@ export class L2Block {
// TODO(#3938): populate bodyHash
const header = new Header(startArchiveSnapshot, [Fr.ZERO, Fr.ZERO], state, globalVariables);

return L2Block.fromFields({
archive: endArchiveSnapshot,
header,
newCommitments,
newNullifiers,
newPublicDataWrites,
newL2ToL1Msgs,
newContracts,
newContractData,
newL1ToL2Messages,
});
return L2Block.fromFields(
{
archive: endArchiveSnapshot,
header,
newCommitments,
newNullifiers,
newPublicDataWrites,
newL2ToL1Msgs,
newContracts,
newContractData,
newL1ToL2Messages,
},
blockHash,
);
}

/**
Expand Down