Skip to content

Commit

Permalink
refactor: no calls to pedersen from TS (#2724)
Browse files Browse the repository at this point in the history
Fixes #435 

This PR doesn't really address the original issue that much. I have
reasons for this:
1. Most of the direct pedersen calls are used by the merkle tress. Given
that the merkle trees will be fully replaced by an application in rust
or C++ it doesn't make sense to invest time in that code.
2. The remaining calls are performed in tests where we check whether
some hash corresponds to the one computed in Noir. I feel like it
doesn't really make sense to create specific bindings for tests. But I
don't really have a strong opinion on this.

I marked the pedersen functions as deprecated with a message to not use
it in production code. I think that's the best way to go about this at
this point.

# Checklist:
Remove the checklist to signal you've completed it. Enable auto-merge if
the PR is ready to merge.
- [ ] If the pull request requires a cryptography review (e.g.
cryptographic algorithm implementations) I have added the 'crypto' tag.
- [ ] I have reviewed my diff in github, line by line and removed
unexpected formatting changes, testing logs, or commented-out code.
- [ ] Every change is related to the PR description.
- [ ] I have
[linked](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue)
this pull request to relevant issues (if any exist).
  • Loading branch information
benesjan authored Oct 11, 2023
1 parent 8efa374 commit 78e44c3
Show file tree
Hide file tree
Showing 14 changed files with 68 additions and 60 deletions.
4 changes: 2 additions & 2 deletions circuits/cpp/src/aztec3/circuits/hash.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -381,10 +381,10 @@ template <typename NCT> typename NCT::fr compute_public_data_tree_value(typename
* @param storage_slot The storage slot to which the inserted element belongs
* @return The index for insertion into the public data tree
*/
template <typename NCT> typename NCT::fr compute_public_data_tree_index(typename NCT::fr const& contract_address,
template <typename NCT> typename NCT::fr compute_public_data_tree_index(typename NCT::address const& contract_address,
typename NCT::fr const& storage_slot)
{
return NCT::compress({ contract_address, storage_slot }, GeneratorIndex::PUBLIC_LEAF_INDEX);
return NCT::compress({ contract_address.to_field(), storage_slot }, GeneratorIndex::PUBLIC_LEAF_INDEX);
}

template <typename NCT> typename NCT::fr compute_l2_to_l1_hash(typename NCT::address const& contract_address,
Expand Down
1 change: 0 additions & 1 deletion yarn-project/acir-simulator/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
export * from './client/index.js';
export * from './acvm/index.js';
export * from './public/index.js';
export { computeSlotForMapping } from './utils.js';
4 changes: 2 additions & 2 deletions yarn-project/acir-simulator/src/public/execution.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ function contractStorageReadToPublicDataRead(
contractAddress: AztecAddress,
): PublicDataRead {
return new PublicDataRead(
computePublicDataTreeIndex(wasm, contractAddress.toField(), read.storageSlot),
computePublicDataTreeIndex(wasm, contractAddress, read.storageSlot),
computePublicDataTreeValue(wasm, read.currentValue),
read.sideEffectCounter!,
);
Expand All @@ -141,7 +141,7 @@ function contractStorageUpdateRequestToPublicDataUpdateRequest(
contractAddress: AztecAddress,
): PublicDataUpdateRequest {
return new PublicDataUpdateRequest(
computePublicDataTreeIndex(wasm, contractAddress.toField(), update.storageSlot),
computePublicDataTreeIndex(wasm, contractAddress, update.storageSlot),
computePublicDataTreeValue(wasm, update.oldValue),
computePublicDataTreeValue(wasm, update.newValue),
update.sideEffectCounter!,
Expand Down
6 changes: 3 additions & 3 deletions yarn-project/aztec-node/src/aztec-node/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
L1_TO_L2_MSG_TREE_HEIGHT,
PRIVATE_DATA_TREE_HEIGHT,
} from '@aztec/circuits.js';
import { computePublicDataTreeIndex } from '@aztec/circuits.js/abis';
import { L1ContractAddresses } from '@aztec/ethereum';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { createDebugLogger } from '@aztec/foundation/log';
Expand Down Expand Up @@ -41,7 +42,6 @@ import {
ServerWorldStateSynchronizer,
WorldStateConfig,
WorldStateSynchronizer,
computePublicDataTreeLeafIndex,
getConfigEnvVars as getWorldStateConfig,
} from '@aztec/world-state';

Expand Down Expand Up @@ -321,8 +321,8 @@ export class AztecNodeService implements AztecNode {
*/
public async getPublicStorageAt(contract: AztecAddress, slot: bigint): Promise<Buffer | undefined> {
const committedDb = await this.#getWorldState();
const leafIndex = computePublicDataTreeLeafIndex(contract, new Fr(slot), await CircuitsWasm.get());
return committedDb.getLeafValue(MerkleTreeId.PUBLIC_DATA_TREE, leafIndex);
const leafIndex = computePublicDataTreeIndex(await CircuitsWasm.get(), contract, new Fr(slot));
return committedDb.getLeafValue(MerkleTreeId.PUBLIC_DATA_TREE, leafIndex.value);
}

/**
Expand Down
6 changes: 3 additions & 3 deletions yarn-project/circuits.js/src/abis/abis.ts
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ export function siloCommitment(wasm: IWasmModule, contract: AztecAddress, innerC
}

/**
* Computes a unique commitment. It includes a nonce which contains data that guarantees the commiment will be unique.
* Computes a unique commitment. It includes a nonce which contains data that guarantees the commitment will be unique.
* @param wasm - A module providing low-level wasm access.
* @param nonce - The contract address.
* @param siloedCommitment - An siloed commitment.
Expand Down Expand Up @@ -367,11 +367,11 @@ export function computePublicDataTreeValue(wasm: IWasmModule, value: Fr): Fr {
* Computes a public data tree index from contract address and storage slot.
* @param wasm - A module providing low-level wasm access.
* @param contractAddress - Contract where insertion is occurring.
* @param storageSlot - Storage slot where insertion is occuring.
* @param storageSlot - Storage slot where insertion is occurring.
* @returns Public data tree index computed from contract address and storage slot.
*/
export function computePublicDataTreeIndex(wasm: IWasmModule, contractAddress: Fr, storageSlot: Fr): Fr {
export function computePublicDataTreeIndex(wasm: IWasmModule, contractAddress: AztecAddress, storageSlot: Fr): Fr {
wasm.call('pedersen__init');
return abisComputePublicDataTreeIndex(wasm, contractAddress, storageSlot);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import { deserializeArrayFromVector, deserializeField, serializeBufferArrayToVec
* @param lhs - The first hash.
* @param rhs - The second hash.
* @returns The new 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array): Buffer {
// If not done already, precompute constants.
Expand All @@ -29,6 +31,8 @@ export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8A
* @param lhs - The first hash.
* @param rhs - The second hash.
* @returns The new 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer {
// If not done already, precompute constants.
Expand All @@ -44,6 +48,8 @@ export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer
* @param wasm - The barretenberg module.
* @param inputs - The array of buffers to compress.
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenCompressInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer {
// If not done already, precompute constants.
Expand All @@ -59,6 +65,8 @@ export function pedersenCompressInputs(wasm: IWasmModule, inputs: Buffer[]): Buf
* @param wasm - The barretenberg module.
* @param inputs - The array of buffers to compress.
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenPlookupCommitInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer {
// If not done already, precompute constants.
Expand All @@ -75,6 +83,8 @@ export function pedersenPlookupCommitInputs(wasm: IWasmModule, inputs: Buffer[])
* @param inputs - The array of buffers to compress.
* @param hashIndex - Hash index of the generator to use (See GeneratorIndex enum).
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenPlookupCommitWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer {
// If not done already, precompute constants.
Expand All @@ -91,6 +101,8 @@ export function pedersenPlookupCommitWithHashIndex(wasm: IWasmModule, inputs: Bu
* @param inputs - The array of buffers to compress.
* @param hashIndex - Hash index of the generator to use (See GeneratorIndex enum).
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer {
// If not done already, precompute constants.
Expand All @@ -107,6 +119,8 @@ export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[
* @param inputs - The array of buffers to compress.
* @param hashIndex - Hash index of the generator to use (See GeneratorIndex enum).
* @returns The resulting 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenPlookupCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer {
// If not done already, precompute constants.
Expand All @@ -122,6 +136,8 @@ export function pedersenPlookupCompressWithHashIndex(wasm: IWasmModule, inputs:
* @param wasm - The barretenberg module.
* @param data - The data buffer.
* @returns The hash buffer.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenGetHash(wasm: IWasmModule, data: Buffer): Buffer {
// If not done already, precompute constants.
Expand All @@ -144,6 +160,8 @@ export function pedersenGetHash(wasm: IWasmModule, data: Buffer): Buffer {
* @param wasm - The barretenberg module.
* @param values - The 32 byte pedersen leaves.
* @returns A tree represented by an array.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export function pedersenGetHashTree(wasm: IWasmModule, values: Buffer[]) {
// If not done already, precompute constants.
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/circuits.js/src/cbind/circuits.gen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3208,7 +3208,7 @@ export function abisComputeGlobalsHash(wasm: IWasmModule, arg0: GlobalVariables)
export function abisComputePublicDataTreeValue(wasm: IWasmModule, arg0: Fr): Fr {
return Fr.fromBuffer(callCbind(wasm, 'abis__compute_public_data_tree_value', [toBuffer(arg0)]));
}
export function abisComputePublicDataTreeIndex(wasm: IWasmModule, arg0: Fr, arg1: Fr): Fr {
export function abisComputePublicDataTreeIndex(wasm: IWasmModule, arg0: Address, arg1: Fr): Fr {
return Fr.fromBuffer(callCbind(wasm, 'abis__compute_public_data_tree_index', [toBuffer(arg0), toBuffer(arg1)]));
}
export function privateKernelDummyPreviousKernel(wasm: IWasmModule): PreviousKernelData {
Expand Down
18 changes: 18 additions & 0 deletions yarn-project/merkle-tree/src/pedersen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,40 @@ import { Hasher } from '@aztec/types';

/**
* A helper class encapsulating Pedersen hash functionality.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export class Pedersen implements Hasher {
constructor(private wasm: IWasmModule) {}

/*
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public compress(lhs: Uint8Array, rhs: Uint8Array): Buffer {
return pedersenCompress(this.wasm, lhs, rhs);
}

/*
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public compressInputs(inputs: Buffer[]): Buffer {
return pedersenHashInputs(this.wasm, inputs);
}

/*
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public hashToField(data: Uint8Array): Buffer {
return pedersenGetHash(this.wasm, Buffer.from(data));
}

/*
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public hashToTree(leaves: Buffer[]): Promise<Buffer[]> {
return Promise.resolve(pedersenGetHashTree(this.wasm, leaves));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { Pedersen } from '../../index.js';

/**
* A test utility allowing us to count the number of times the compress function has been called.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
export class PedersenWithCounter extends Pedersen {
/**
Expand All @@ -14,6 +16,8 @@ export class PedersenWithCounter extends Pedersen {
* @param lhs - The first hash.
* @param rhs - The second hash.
* @returns The new 32-byte hash.
* @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific
* purposes.
*/
public compress(lhs: Uint8Array, rhs: Uint8Array): Buffer {
this.compressCounter++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,20 @@ import {
VerificationKey,
makeTuple,
} from '@aztec/circuits.js';
import { computeBlockHash, computeBlockHashWithGlobals, computeContractLeaf } from '@aztec/circuits.js/abis';
import {
computeBlockHash,
computeBlockHashWithGlobals,
computeContractLeaf,
computeGlobalsHash,
} from '@aztec/circuits.js/abis';
import { toFriendlyJSON } from '@aztec/circuits.js/utils';
import { toBigIntBE } from '@aztec/foundation/bigint-buffer';
import { padArrayEnd } from '@aztec/foundation/collection';
import { Fr } from '@aztec/foundation/fields';
import { createDebugLogger } from '@aztec/foundation/log';
import { Tuple, assertLength } from '@aztec/foundation/serialize';
import { ContractData, L2Block, L2BlockL2Logs, MerkleTreeId, PublicDataWrite, TxL2Logs } from '@aztec/types';
import { MerkleTreeOperations, computeGlobalVariablesHash } from '@aztec/world-state';
import { MerkleTreeOperations } from '@aztec/world-state';

import chunk from 'lodash.chunk';
import flatMap from 'lodash.flatmap';
Expand Down Expand Up @@ -308,7 +313,7 @@ export class SoloBlockBuilder implements BlockBuilder {
// Update the root trees with the latest data and contract tree roots,
// and validate them against the output of the root circuit simulation
this.debug(`Updating and validating root trees`);
const globalVariablesHash = await computeGlobalVariablesHash(left[0].constants.globalVariables);
const globalVariablesHash = computeGlobalsHash(await CircuitsWasm.get(), left[0].constants.globalVariables);
await this.db.updateLatestGlobalVariablesHash(globalVariablesHash);
await this.db.updateHistoricBlocksTree(globalVariablesHash);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ import {
PublicStateDB,
} from '@aztec/acir-simulator';
import { AztecAddress, CircuitsWasm, EthAddress, Fr, FunctionSelector, HistoricBlockData } from '@aztec/circuits.js';
import { computePublicDataTreeIndex } from '@aztec/circuits.js/abis';
import { ContractDataSource, ExtendedContractData, L1ToL2MessageSource, MerkleTreeId, Tx } from '@aztec/types';
import { MerkleTreeOperations, computePublicDataTreeLeafIndex } from '@aztec/world-state';
import { MerkleTreeOperations } from '@aztec/world-state';

/**
* Returns a new PublicExecutor simulator backed by the supplied merkle tree db and contract data source.
Expand All @@ -31,7 +32,7 @@ export function getPublicExecutor(

/**
* Implements the PublicContractsDB using a ContractDataSource.
* Progresively records contracts in transaction as they are processed in a block.
* Progressively records contracts in transaction as they are processed in a block.
*/
export class ContractsDataSourcePublicDB implements PublicContractsDB {
cache = new Map<string, ExtendedContractData>();
Expand Down Expand Up @@ -106,7 +107,7 @@ class WorldStatePublicDB implements PublicStateDB {
* @returns The current value in the storage slot.
*/
public async storageRead(contract: AztecAddress, slot: Fr): Promise<Fr> {
const index = computePublicDataTreeLeafIndex(contract, slot, await CircuitsWasm.get());
const index = computePublicDataTreeIndex(await CircuitsWasm.get(), contract, slot).value;
const cached = this.writeCache.get(index);
if (cached !== undefined) return cached;
const value = await this.db.getLeafValue(MerkleTreeId.PUBLIC_DATA_TREE, index);
Expand All @@ -120,7 +121,7 @@ class WorldStatePublicDB implements PublicStateDB {
* @param newValue - The new value to store.
*/
public async storageWrite(contract: AztecAddress, slot: Fr, newValue: Fr): Promise<void> {
const index = computePublicDataTreeLeafIndex(contract, slot, await CircuitsWasm.get());
const index = computePublicDataTreeIndex(await CircuitsWasm.get(), contract, slot).value;
this.writeCache.set(index, newValue);
}
}
Expand Down
1 change: 0 additions & 1 deletion yarn-project/world-state/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
export * from './synchronizer/index.js';
export * from './world-state-db/index.js';
export * from './utils.js';
export * from './synchronizer/config.js';
35 changes: 0 additions & 35 deletions yarn-project/world-state/src/utils.ts

This file was deleted.

9 changes: 4 additions & 5 deletions yarn-project/world-state/src/world-state-db/merkle_trees.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {
PRIVATE_DATA_TREE_HEIGHT,
PUBLIC_DATA_TREE_HEIGHT,
} from '@aztec/circuits.js';
import { computeBlockHash } from '@aztec/circuits.js/abis';
import { computeBlockHash, computeGlobalsHash } from '@aztec/circuits.js/abis';
import { Committable } from '@aztec/foundation/committable';
import { SerialQueue } from '@aztec/foundation/fifo';
import { createDebugLogger } from '@aztec/foundation/log';
Expand All @@ -33,7 +33,6 @@ import { L2Block, MerkleTreeId, SiblingPath } from '@aztec/types';
import { default as levelup } from 'levelup';

import { MerkleTreeOperationsFacade } from '../merkle-tree/merkle_tree_operations_facade.js';
import { computeGlobalVariablesHash } from '../utils.js';
import {
CurrentTreeRoots,
HandleL2BlockResult,
Expand Down Expand Up @@ -127,12 +126,12 @@ export class MerkleTrees implements MerkleTreeDb {

// The first leaf in the blocks tree contains the empty roots of the other trees and empty global variables.
if (!fromDb) {
const initialGlobalVariablesHash = await computeGlobalVariablesHash(GlobalVariables.empty());
const initialGlobalVariablesHash = computeGlobalsHash(wasm, GlobalVariables.empty());
await this._updateLatestGlobalVariablesHash(initialGlobalVariablesHash);
await this._updateHistoricBlocksTree(initialGlobalVariablesHash, true);
await this._commit();
} else {
await this._updateLatestGlobalVariablesHash(await computeGlobalVariablesHash(fromDbOptions.globalVariables));
await this._updateLatestGlobalVariablesHash(computeGlobalsHash(wasm, fromDbOptions.globalVariables));
}
}

Expand Down Expand Up @@ -575,7 +574,7 @@ export class MerkleTrees implements MerkleTreeDb {
}

// Sync and add the block to the historic blocks tree
const globalVariablesHash = await computeGlobalVariablesHash(l2Block.globalVariables);
const globalVariablesHash = computeGlobalsHash(await CircuitsWasm.get(), l2Block.globalVariables);
await this._updateLatestGlobalVariablesHash(globalVariablesHash);
this.log(`Synced global variables with hash ${globalVariablesHash}`);

Expand Down

0 comments on commit 78e44c3

Please sign in to comment.