From 40306b6d5ea01bf191288b0a3bca6fdbeae9912f Mon Sep 17 00:00:00 2001 From: Cody Gunton Date: Wed, 8 May 2024 13:31:40 -0400 Subject: [PATCH 01/43] refactor: Make MSM builder more explicit (#6110) After trying to understand the MSM builder part of the ECCVM builder, I did a refactor for clarity. This is almost entirely naming (e.g we had sometimes 4+ indices `i, j, k, m, idx` in deeply nested loops that I gave more explicit names) and comments. I also made the function that computes the trace rows return a table rather than to mutate one since there was no real reason to take the latter pattern. --- .../eccvm/eccvm_builder_types.hpp | 15 +- .../eccvm/eccvm_circuit_builder.hpp | 51 +-- .../src/barretenberg/eccvm/eccvm_flavor.hpp | 200 ++++---- .../src/barretenberg/eccvm/msm_builder.hpp | 430 +++++++++--------- .../eccvm/precomputed_tables_builder.hpp | 34 +- .../barretenberg/eccvm/transcript_builder.hpp | 14 +- .../op_queue/ecc_op_queue.hpp | 36 +- 7 files changed, 392 insertions(+), 388 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp index 95abffe5120..2db0d13abf2 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp @@ -4,13 +4,12 @@ #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" namespace bb::eccvm { - -static constexpr size_t NUM_SCALAR_BITS = 128; -static constexpr size_t WNAF_SLICE_BITS = 4; -static constexpr size_t NUM_WNAF_SLICES = (NUM_SCALAR_BITS + WNAF_SLICE_BITS - 1) / WNAF_SLICE_BITS; -static constexpr uint64_t WNAF_MASK = static_cast((1ULL << WNAF_SLICE_BITS) - 1ULL); -static constexpr size_t POINT_TABLE_SIZE = 1ULL << (WNAF_SLICE_BITS); -static constexpr size_t WNAF_SLICES_PER_ROW = 4; +static constexpr size_t NUM_SCALAR_BITS = 128; // The length of scalars handled by the ECCVVM +static constexpr size_t NUM_WNAF_DIGIT_BITS = 4; // Scalars are decompose into base 16 in wNAF form +static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = NUM_SCALAR_BITS / NUM_WNAF_DIGIT_BITS; // 32 +static constexpr uint64_t WNAF_MASK = static_cast((1ULL << NUM_WNAF_DIGIT_BITS) - 1ULL); +static constexpr size_t POINT_TABLE_SIZE = 1ULL << (NUM_WNAF_DIGIT_BITS); +static constexpr size_t WNAF_DIGITS_PER_ROW = 4; static constexpr size_t ADDITIONS_PER_ROW = 4; template struct VMOperation { @@ -39,7 +38,7 @@ template struct ScalarMul { uint32_t pc; uint256_t scalar; typename CycleGroup::affine_element base_point; - std::array wnaf_slices; + std::array wnaf_digits; bool wnaf_skew; // size bumped by 1 to record base_point.dbl() std::array precomputed_table; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp index b295133b12a..7f49af86030 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_circuit_builder.hpp @@ -24,11 +24,11 @@ class ECCVMCircuitBuilder { using AffineElement = typename CycleGroup::affine_element; static constexpr size_t NUM_SCALAR_BITS = bb::eccvm::NUM_SCALAR_BITS; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; - static constexpr size_t NUM_WNAF_SLICES = bb::eccvm::NUM_WNAF_SLICES; + static constexpr size_t NUM_WNAF_DIGIT_BITS = bb::eccvm::NUM_WNAF_DIGIT_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; static constexpr uint64_t WNAF_MASK = bb::eccvm::WNAF_MASK; static constexpr size_t POINT_TABLE_SIZE = bb::eccvm::POINT_TABLE_SIZE; - static constexpr size_t WNAF_SLICES_PER_ROW = bb::eccvm::WNAF_SLICES_PER_ROW; + static constexpr size_t WNAF_DIGITS_PER_ROW = bb::eccvm::WNAF_DIGITS_PER_ROW; static constexpr size_t ADDITIONS_PER_ROW = bb::eccvm::ADDITIONS_PER_ROW; using MSM = bb::eccvm::MSM; @@ -50,7 +50,8 @@ class ECCVMCircuitBuilder { /** * For input point [P], return { -15[P], -13[P], ..., -[P], [P], ..., 13[P], 15[P] } */ - const auto compute_precomputed_table = [](const AffineElement& base_point) { + const auto compute_precomputed_table = + [](const AffineElement& base_point) -> std::array { const auto d2 = Element(base_point).dbl(); std::array table; table[POINT_TABLE_SIZE] = d2; // need this for later @@ -69,10 +70,10 @@ class ECCVMCircuitBuilder { } return result; }; - const auto compute_wnaf_slices = [](uint256_t scalar) { - std::array output; + const auto compute_wnaf_digits = [](uint256_t scalar) -> std::array { + std::array output; int previous_slice = 0; - for (size_t i = 0; i < NUM_WNAF_SLICES; ++i) { + for (size_t i = 0; i < NUM_WNAF_DIGITS_PER_SCALAR; ++i) { // slice the scalar into 4-bit chunks, starting with the least significant bits uint64_t raw_slice = static_cast(scalar) & WNAF_MASK; @@ -86,19 +87,19 @@ class ECCVMCircuitBuilder { } else if (is_even) { // for other slices, if it's even, we add 1 to the slice value // and subtract 16 from the previous slice to preserve the total scalar sum - static constexpr int borrow_constant = static_cast(1ULL << WNAF_SLICE_BITS); + static constexpr int borrow_constant = static_cast(1ULL << NUM_WNAF_DIGIT_BITS); previous_slice -= borrow_constant; wnaf_slice += 1; } if (i > 0) { const size_t idx = i - 1; - output[NUM_WNAF_SLICES - idx - 1] = previous_slice; + output[NUM_WNAF_DIGITS_PER_SCALAR - idx - 1] = previous_slice; } previous_slice = wnaf_slice; // downshift raw_slice by 4 bits - scalar = scalar >> WNAF_SLICE_BITS; + scalar = scalar >> NUM_WNAF_DIGIT_BITS; } ASSERT(scalar == 0); @@ -108,8 +109,6 @@ class ECCVMCircuitBuilder { return output; }; - // a vector of MSMs = a vector of a vector of scalar muls - // each mul size_t msm_count = 0; size_t active_mul_count = 0; std::vector msm_opqueue_index; @@ -118,6 +117,7 @@ class ECCVMCircuitBuilder { const auto& raw_ops = op_queue->get_raw_ops(); size_t op_idx = 0; + // populate opqueue and mul indices for (const auto& op : raw_ops) { if (op.mul) { if (op.z1 != 0 || op.z2 != 0) { @@ -142,39 +142,38 @@ class ECCVMCircuitBuilder { msm_sizes.push_back(active_mul_count); msm_count++; } - std::vector msms_test(msm_count); + std::vector result(msm_count); for (size_t i = 0; i < msm_count; ++i) { - auto& msm = msms_test[i]; + auto& msm = result[i]; msm.resize(msm_sizes[i]); } run_loop_in_parallel(msm_opqueue_index.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - const size_t opqueue_index = msm_opqueue_index[i]; - const auto& op = raw_ops[opqueue_index]; + const auto& op = raw_ops[msm_opqueue_index[i]]; auto [msm_index, mul_index] = msm_mul_index[i]; if (op.z1 != 0) { - ASSERT(msms_test.size() > msm_index); - ASSERT(msms_test[msm_index].size() > mul_index); - msms_test[msm_index][mul_index] = (ScalarMul{ + ASSERT(result.size() > msm_index); + ASSERT(result[msm_index].size() > mul_index); + result[msm_index][mul_index] = (ScalarMul{ .pc = 0, .scalar = op.z1, .base_point = op.base_point, - .wnaf_slices = compute_wnaf_slices(op.z1), + .wnaf_digits = compute_wnaf_digits(op.z1), .wnaf_skew = (op.z1 & 1) == 0, .precomputed_table = compute_precomputed_table(op.base_point), }); mul_index++; } if (op.z2 != 0) { - ASSERT(msms_test.size() > msm_index); - ASSERT(msms_test[msm_index].size() > mul_index); + ASSERT(result.size() > msm_index); + ASSERT(result[msm_index].size() > mul_index); auto endo_point = AffineElement{ op.base_point.x * FF::cube_root_of_unity(), -op.base_point.y }; - msms_test[msm_index][mul_index] = (ScalarMul{ + result[msm_index][mul_index] = (ScalarMul{ .pc = 0, .scalar = op.z2, .base_point = endo_point, - .wnaf_slices = compute_wnaf_slices(op.z2), + .wnaf_digits = compute_wnaf_digits(op.z2), .wnaf_skew = (op.z2 & 1) == 0, .precomputed_table = compute_precomputed_table(endo_point), }); @@ -191,7 +190,7 @@ class ECCVMCircuitBuilder { // sumcheck relations that involve pc (if we did the other way around, starting at 1 and ending at num_muls, // we create a discontinuity in pc values between the last transcript row and the following empty row) uint32_t pc = num_muls; - for (auto& msm : msms_test) { + for (auto& msm : result) { for (auto& mul : msm) { mul.pc = pc; pc--; @@ -199,7 +198,7 @@ class ECCVMCircuitBuilder { } ASSERT(pc == 0); - return msms_test; + return result; } static std::vector get_flattened_scalar_muls(const std::vector& msms) diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index e1828ca8fe4..759353edb0a 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -34,6 +34,7 @@ class ECCVMFlavor { using CommitmentKey = bb::CommitmentKey; using VerifierCommitmentKey = bb::VerifierCommitmentKey; using RelationSeparator = FF; + using MSM = bb::eccvm::MSM; static constexpr size_t NUM_WIRES = 74; @@ -358,6 +359,7 @@ class ECCVMFlavor { ProverPolynomials& operator=(ProverPolynomials&& o) noexcept = default; ~ProverPolynomials() = default; [[nodiscard]] size_t get_polynomial_size() const { return this->lagrange_first.size(); } + /** * @brief Returns the evaluations of all prover polynomials at one point on the boolean hypercube, which * represents one row in the execution trace. @@ -460,33 +462,28 @@ class ECCVMFlavor { */ ProverPolynomials(const CircuitBuilder& builder) { - const auto msms = builder.get_msms(); - const auto flattened_muls = builder.get_flattened_scalar_muls(msms); - - std::array, 2> point_table_read_counts; - const auto transcript_state = ECCVMTranscriptBuilder::compute_transcript_state( - builder.op_queue->get_raw_ops(), builder.get_number_of_muls()); - const auto precompute_table_state = ECCVMPrecomputedTablesBuilder::compute_precompute_state(flattened_muls); - const auto msm_state = ECCVMMSMMBuilder::compute_msm_state( - msms, point_table_read_counts, builder.get_number_of_muls(), builder.op_queue->get_num_msm_rows()); - - const size_t msm_size = msm_state.size(); - const size_t transcript_size = transcript_state.size(); - const size_t precompute_table_size = precompute_table_state.size(); - - const size_t num_rows = std::max(precompute_table_size, std::max(msm_size, transcript_size)); - - const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); - size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + // compute rows for the three different sections of the ECCVM execution trace + const auto transcript_rows = + ECCVMTranscriptBuilder::compute_rows(builder.op_queue->get_raw_ops(), builder.get_number_of_muls()); + const std::vector msms = builder.get_msms(); + const auto point_table_rows = + ECCVMPointTablePrecomputationBuilder::compute_rows(CircuitBuilder::get_flattened_scalar_muls(msms)); + const auto [msm_rows, point_table_read_counts] = ECCVMMSMMBuilder::compute_rows( + msms, builder.get_number_of_muls(), builder.op_queue->get_num_msm_rows()); + + const size_t num_rows = std::max({ point_table_rows.size(), msm_rows.size(), transcript_rows.size() }); + const auto log_num_rows = static_cast(numeric::get_msb64(num_rows)); + const size_t dyadic_num_rows = 1UL << (log_num_rows + (1UL << log_num_rows == num_rows ? 0 : 1)); + + // allocate polynomials; define lagrange and lookup read count polynomials for (auto& poly : get_all()) { - poly = Polynomial(num_rows_pow2); + poly = Polynomial(dyadic_num_rows); } lagrange_first[0] = 1; lagrange_second[1] = 1; lagrange_last[lagrange_last.size() - 1] = 1; - for (size_t i = 0; i < point_table_read_counts[0].size(); ++i) { - // Explanation of off-by-one offset + // Explanation of off-by-one offset: // When computing the WNAF slice for a point at point counter value `pc` and a round index `round`, the // row number that computes the slice can be derived. This row number is then mapped to the index of // `lookup_read_counts`. We do this mapping in `ecc_msm_relation`. We are off-by-one because we add an @@ -495,106 +492,109 @@ class ECCVMFlavor { lookup_read_counts_0[i + 1] = point_table_read_counts[0][i]; lookup_read_counts_1[i + 1] = point_table_read_counts[1][i]; } - run_loop_in_parallel(transcript_state.size(), [&](size_t start, size_t end) { + + // compute polynomials for transcript columns + run_loop_in_parallel(transcript_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - transcript_accumulator_empty[i] = transcript_state[i].accumulator_empty; - transcript_add[i] = transcript_state[i].q_add; - transcript_mul[i] = transcript_state[i].q_mul; - transcript_eq[i] = transcript_state[i].q_eq; - transcript_reset_accumulator[i] = transcript_state[i].q_reset_accumulator; - transcript_msm_transition[i] = transcript_state[i].msm_transition; - transcript_pc[i] = transcript_state[i].pc; - transcript_msm_count[i] = transcript_state[i].msm_count; - transcript_Px[i] = transcript_state[i].base_x; - transcript_Py[i] = transcript_state[i].base_y; - transcript_z1[i] = transcript_state[i].z1; - transcript_z2[i] = transcript_state[i].z2; - transcript_z1zero[i] = transcript_state[i].z1_zero; - transcript_z2zero[i] = transcript_state[i].z2_zero; - transcript_op[i] = transcript_state[i].opcode; - transcript_accumulator_x[i] = transcript_state[i].accumulator_x; - transcript_accumulator_y[i] = transcript_state[i].accumulator_y; - transcript_msm_x[i] = transcript_state[i].msm_output_x; - transcript_msm_y[i] = transcript_state[i].msm_output_y; - transcript_collision_check[i] = transcript_state[i].collision_check; + transcript_accumulator_empty[i] = transcript_rows[i].accumulator_empty; + transcript_add[i] = transcript_rows[i].q_add; + transcript_mul[i] = transcript_rows[i].q_mul; + transcript_eq[i] = transcript_rows[i].q_eq; + transcript_reset_accumulator[i] = transcript_rows[i].q_reset_accumulator; + transcript_msm_transition[i] = transcript_rows[i].msm_transition; + transcript_pc[i] = transcript_rows[i].pc; + transcript_msm_count[i] = transcript_rows[i].msm_count; + transcript_Px[i] = transcript_rows[i].base_x; + transcript_Py[i] = transcript_rows[i].base_y; + transcript_z1[i] = transcript_rows[i].z1; + transcript_z2[i] = transcript_rows[i].z2; + transcript_z1zero[i] = transcript_rows[i].z1_zero; + transcript_z2zero[i] = transcript_rows[i].z2_zero; + transcript_op[i] = transcript_rows[i].opcode; + transcript_accumulator_x[i] = transcript_rows[i].accumulator_x; + transcript_accumulator_y[i] = transcript_rows[i].accumulator_y; + transcript_msm_x[i] = transcript_rows[i].msm_output_x; + transcript_msm_y[i] = transcript_rows[i].msm_output_y; + transcript_collision_check[i] = transcript_rows[i].collision_check; } }); // TODO(@zac-williamson) if final opcode resets accumulator, all subsequent "is_accumulator_empty" row // values must be 1. Ideally we find a way to tweak this so that empty rows that do nothing have column // values that are all zero (issue #2217) - if (transcript_state[transcript_state.size() - 1].accumulator_empty == 1) { - for (size_t i = transcript_state.size(); i < num_rows_pow2; ++i) { + if (transcript_rows[transcript_rows.size() - 1].accumulator_empty) { + for (size_t i = transcript_rows.size(); i < dyadic_num_rows; ++i) { transcript_accumulator_empty[i] = 1; } } - run_loop_in_parallel(precompute_table_state.size(), [&](size_t start, size_t end) { + + // compute polynomials for point table columns + run_loop_in_parallel(point_table_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { // first row is always an empty row (to accommodate shifted polynomials which must have 0 as 1st - // coefficient). All other rows in the precompute_table_state represent active wnaf gates (i.e. + // coefficient). All other rows in the point_table_rows represent active wnaf gates (i.e. // precompute_select = 1) precompute_select[i] = (i != 0) ? 1 : 0; - precompute_pc[i] = precompute_table_state[i].pc; - precompute_point_transition[i] = static_cast(precompute_table_state[i].point_transition); - precompute_round[i] = precompute_table_state[i].round; - precompute_scalar_sum[i] = precompute_table_state[i].scalar_sum; - - precompute_s1hi[i] = precompute_table_state[i].s1; - precompute_s1lo[i] = precompute_table_state[i].s2; - precompute_s2hi[i] = precompute_table_state[i].s3; - precompute_s2lo[i] = precompute_table_state[i].s4; - precompute_s3hi[i] = precompute_table_state[i].s5; - precompute_s3lo[i] = precompute_table_state[i].s6; - precompute_s4hi[i] = precompute_table_state[i].s7; - precompute_s4lo[i] = precompute_table_state[i].s8; + precompute_pc[i] = point_table_rows[i].pc; + precompute_point_transition[i] = static_cast(point_table_rows[i].point_transition); + precompute_round[i] = point_table_rows[i].round; + precompute_scalar_sum[i] = point_table_rows[i].scalar_sum; + precompute_s1hi[i] = point_table_rows[i].s1; + precompute_s1lo[i] = point_table_rows[i].s2; + precompute_s2hi[i] = point_table_rows[i].s3; + precompute_s2lo[i] = point_table_rows[i].s4; + precompute_s3hi[i] = point_table_rows[i].s5; + precompute_s3lo[i] = point_table_rows[i].s6; + precompute_s4hi[i] = point_table_rows[i].s7; + precompute_s4lo[i] = point_table_rows[i].s8; // If skew is active (i.e. we need to subtract a base point from the msm result), // write `7` into rows.precompute_skew. `7`, in binary representation, equals `-1` when converted // into WNAF form - precompute_skew[i] = precompute_table_state[i].skew ? 7 : 0; - - precompute_dx[i] = precompute_table_state[i].precompute_double.x; - precompute_dy[i] = precompute_table_state[i].precompute_double.y; - precompute_tx[i] = precompute_table_state[i].precompute_accumulator.x; - precompute_ty[i] = precompute_table_state[i].precompute_accumulator.y; + precompute_skew[i] = point_table_rows[i].skew ? 7 : 0; + precompute_dx[i] = point_table_rows[i].precompute_double.x; + precompute_dy[i] = point_table_rows[i].precompute_double.y; + precompute_tx[i] = point_table_rows[i].precompute_accumulator.x; + precompute_ty[i] = point_table_rows[i].precompute_accumulator.y; } }); - run_loop_in_parallel(msm_state.size(), [&](size_t start, size_t end) { + // compute polynomials for the msm columns + run_loop_in_parallel(msm_rows.size(), [&](size_t start, size_t end) { for (size_t i = start; i < end; i++) { - msm_transition[i] = static_cast(msm_state[i].msm_transition); - msm_add[i] = static_cast(msm_state[i].q_add); - msm_double[i] = static_cast(msm_state[i].q_double); - msm_skew[i] = static_cast(msm_state[i].q_skew); - msm_accumulator_x[i] = msm_state[i].accumulator_x; - msm_accumulator_y[i] = msm_state[i].accumulator_y; - msm_pc[i] = msm_state[i].pc; - msm_size_of_msm[i] = msm_state[i].msm_size; - msm_count[i] = msm_state[i].msm_count; - msm_round[i] = msm_state[i].msm_round; - msm_add1[i] = static_cast(msm_state[i].add_state[0].add); - msm_add2[i] = static_cast(msm_state[i].add_state[1].add); - msm_add3[i] = static_cast(msm_state[i].add_state[2].add); - msm_add4[i] = static_cast(msm_state[i].add_state[3].add); - msm_x1[i] = msm_state[i].add_state[0].point.x; - msm_y1[i] = msm_state[i].add_state[0].point.y; - msm_x2[i] = msm_state[i].add_state[1].point.x; - msm_y2[i] = msm_state[i].add_state[1].point.y; - msm_x3[i] = msm_state[i].add_state[2].point.x; - msm_y3[i] = msm_state[i].add_state[2].point.y; - msm_x4[i] = msm_state[i].add_state[3].point.x; - msm_y4[i] = msm_state[i].add_state[3].point.y; - msm_collision_x1[i] = msm_state[i].add_state[0].collision_inverse; - msm_collision_x2[i] = msm_state[i].add_state[1].collision_inverse; - msm_collision_x3[i] = msm_state[i].add_state[2].collision_inverse; - msm_collision_x4[i] = msm_state[i].add_state[3].collision_inverse; - msm_lambda1[i] = msm_state[i].add_state[0].lambda; - msm_lambda2[i] = msm_state[i].add_state[1].lambda; - msm_lambda3[i] = msm_state[i].add_state[2].lambda; - msm_lambda4[i] = msm_state[i].add_state[3].lambda; - msm_slice1[i] = msm_state[i].add_state[0].slice; - msm_slice2[i] = msm_state[i].add_state[1].slice; - msm_slice3[i] = msm_state[i].add_state[2].slice; - msm_slice4[i] = msm_state[i].add_state[3].slice; + msm_transition[i] = static_cast(msm_rows[i].msm_transition); + msm_add[i] = static_cast(msm_rows[i].q_add); + msm_double[i] = static_cast(msm_rows[i].q_double); + msm_skew[i] = static_cast(msm_rows[i].q_skew); + msm_accumulator_x[i] = msm_rows[i].accumulator_x; + msm_accumulator_y[i] = msm_rows[i].accumulator_y; + msm_pc[i] = msm_rows[i].pc; + msm_size_of_msm[i] = msm_rows[i].msm_size; + msm_count[i] = msm_rows[i].msm_count; + msm_round[i] = msm_rows[i].msm_round; + msm_add1[i] = static_cast(msm_rows[i].add_state[0].add); + msm_add2[i] = static_cast(msm_rows[i].add_state[1].add); + msm_add3[i] = static_cast(msm_rows[i].add_state[2].add); + msm_add4[i] = static_cast(msm_rows[i].add_state[3].add); + msm_x1[i] = msm_rows[i].add_state[0].point.x; + msm_y1[i] = msm_rows[i].add_state[0].point.y; + msm_x2[i] = msm_rows[i].add_state[1].point.x; + msm_y2[i] = msm_rows[i].add_state[1].point.y; + msm_x3[i] = msm_rows[i].add_state[2].point.x; + msm_y3[i] = msm_rows[i].add_state[2].point.y; + msm_x4[i] = msm_rows[i].add_state[3].point.x; + msm_y4[i] = msm_rows[i].add_state[3].point.y; + msm_collision_x1[i] = msm_rows[i].add_state[0].collision_inverse; + msm_collision_x2[i] = msm_rows[i].add_state[1].collision_inverse; + msm_collision_x3[i] = msm_rows[i].add_state[2].collision_inverse; + msm_collision_x4[i] = msm_rows[i].add_state[3].collision_inverse; + msm_lambda1[i] = msm_rows[i].add_state[0].lambda; + msm_lambda2[i] = msm_rows[i].add_state[1].lambda; + msm_lambda3[i] = msm_rows[i].add_state[2].lambda; + msm_lambda4[i] = msm_rows[i].add_state[3].lambda; + msm_slice1[i] = msm_rows[i].add_state[0].slice; + msm_slice2[i] = msm_rows[i].add_state[1].slice; + msm_slice3[i] = msm_rows[i].add_state[2].slice; + msm_slice4[i] = msm_rows[i].add_state[3].slice; } }); this->set_shifted(); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp index 5572bab54ee..69f4871eb91 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/msm_builder.hpp @@ -13,13 +13,15 @@ class ECCVMMSMMBuilder { using FF = curve::Grumpkin::ScalarField; using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; + using MSM = bb::eccvm::MSM; static constexpr size_t ADDITIONS_PER_ROW = bb::eccvm::ADDITIONS_PER_ROW; - static constexpr size_t NUM_SCALAR_BITS = bb::eccvm::NUM_SCALAR_BITS; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; - struct alignas(64) MSMState { + struct alignas(64) MSMRow { + // counter over all half-length scalar muls used to compute the required MSMs uint32_t pc = 0; + // the number of points that will be scaled and summed uint32_t msm_size = 0; uint32_t msm_count = 0; uint32_t msm_round = 0; @@ -43,138 +45,138 @@ class ECCVMMSMMBuilder { FF accumulator_y = 0; }; - struct alignas(64) MSMRowTranscript { - std::array lambda_numerator; - std::array lambda_denominator; - Element accumulator_in; - Element accumulator_out; - }; - - struct alignas(64) AdditionTrace { - Element p1; - Element p2; - Element p3; - bool predicate; - bool is_double; - }; - /** * @brief Computes the row values for the Straus MSM columns of the ECCVM. * * For a detailed description of the Straus algorithm and its relation to the ECCVM, please see * https://hackmd.io/@aztec-network/rJ5xhuCsn * - * @param msms - * @param point_table_read_counts - * @param total_number_of_muls - * @return std::vector + * @param msms A vector of vectors of ScalarMuls. + * @param point_table_read_counts Table of read counts to be populated. + * @param total_number_of_muls A mul op in the OpQueue adds up to two muls, one for each nonzero z_i (i=1,2). + * @param num_msm_rows + * @return std::vector */ - static std::vector compute_msm_state(const std::vector>& msms, - std::array, 2>& point_table_read_counts, - const uint32_t total_number_of_muls, - const size_t num_msm_rows) + static std::tuple, std::array, 2>> compute_rows( + const std::vector& msms, const uint32_t total_number_of_muls, const size_t num_msm_rows) { - // N.B. the following comments refer to a "point lookup table" frequently. - // To perform a scalar multiplicaiton of a point [P] by a scalar x, we compute multiples of [P] and store in a - // table: specifically: -15[P], -13[P], ..., -3[P], -[P], [P], 3[P], ..., 15[P] when we define our point lookup - // table, we have 2 write columns and 4 read columns when we perform a read on a given row, we need to increment - // the read count on the respective write column by 1 we can define the following struture: 1st write column = - // positive 2nd write column = negative the row number is a function of pc and slice value row = pc_delta * - // rows_per_point_table + some function of the slice value pc_delta = total_number_of_muls - pc - // std::vector point_table_read_counts; - const size_t table_rows = static_cast(total_number_of_muls) * 8; - point_table_read_counts[0].reserve(table_rows); - point_table_read_counts[1].reserve(table_rows); - for (size_t i = 0; i < table_rows; ++i) { + // To perform a scalar multiplication of a point P by a scalar x, we precompute a table of points + // -15P, -13P, ..., -3P, -P, P, 3P, ..., 15P + // When we perform a scalar multiplication, we decompose x into base-16 wNAF digits then look these precomputed + // values up with digit-by-digit. We record read counts in a table with the following structure: + // 1st write column = positive wNAF digits + // 2nd write column = negative wNAF digits + // the row number is a function of pc and wnaf digit: + // point_idx = total_number_of_muls - pc + // row = point_idx * rows_per_point_table + (some function of the slice value) + // + // Illustration: + // Block Structure Table structure: + // | 0 | 1 | | Block_{0} | <-- pc = total_number_of_muls + // | - | - | | Block_{1} | <-- pc = total_number_of_muls-(num muls in msm 0) + // 1 | # | # | -1 | ... | ... + // 3 | # | # | -3 | Block_{total_number_of_muls-1} | <-- pc = num muls in last msm + // 5 | # | # | -5 + // 7 | # | # | -7 + // 9 | # | # | -9 + // 11 | # | # | -11 + // 13 | # | # | -13 + // 15 | # | # | -15 + + const size_t num_rows_in_read_counts_table = + static_cast(total_number_of_muls) * (eccvm::POINT_TABLE_SIZE >> 1); + std::array, 2> point_table_read_counts; + point_table_read_counts[0].reserve(num_rows_in_read_counts_table); + point_table_read_counts[1].reserve(num_rows_in_read_counts_table); + for (size_t i = 0; i < num_rows_in_read_counts_table; ++i) { point_table_read_counts[0].emplace_back(0); point_table_read_counts[1].emplace_back(0); } - const auto update_read_counts = [&](const size_t pc, const int slice) { - // When we compute our wnaf/point tables, we start with the point with the largest pc value. - // i.e. if we are reading a slice for point with a point counter value `pc`, - // its position in the wnaf/point table (relative to other points) will be `total_number_of_muls - pc` - const size_t pc_delta = total_number_of_muls - pc; - const size_t pc_offset = pc_delta * 8; - bool slice_negative = slice < 0; - const int slice_row = (slice + 15) / 2; - - const size_t column_index = slice_negative ? 1 : 0; + const auto update_read_count = [&point_table_read_counts](const size_t point_idx, const int slice) { /** - * When computing `point_table_read_counts`, we need the *table index* that a given point belongs to. - * the slice value is in *compressed* windowed-non-adjacent-form format: - * A non-compressed WNAF slice is in the range: `-15, -13, ..., 15` - * In compressed form, tney become `0, ..., 15` + * The wNAF digits for base 16 lie in the range -15, -13, ..., 13, 15. * The *point table* format is the following: - * (for positive point table) T[0] = P, T[1] = PT, ..., T[7] = 15P + * (for positive point table) T[0] = P, T[1] = 3P, ..., T[7] = 15P * (for negative point table) T[0] = -P, T[1] = -3P, ..., T[15] = -15P * i.e. if the slice value is negative, we can use the compressed WNAF directly as the table index - * if the slice value is positive, we must take `15 - compressedWNAF` to get the table index + * if the slice value is positive, we must take 15 - (compressed wNAF) to get the table index */ - if (slice_negative) { - point_table_read_counts[column_index][pc_offset + static_cast(slice_row)]++; + const size_t row_index_offset = point_idx * 8; + const bool digit_is_negative = slice < 0; + const auto relative_row_idx = static_cast((slice + 15) / 2); + const size_t column_index = digit_is_negative ? 1 : 0; + + if (digit_is_negative) { + point_table_read_counts[column_index][row_index_offset + relative_row_idx]++; } else { - point_table_read_counts[column_index][pc_offset + 15 - static_cast(slice_row)]++; + point_table_read_counts[column_index][row_index_offset + 15 - relative_row_idx]++; } }; // compute which row index each multiscalar multiplication will start at. - // also compute the program counter index that each multiscalar multiplication will start at. - // we use this information to populate the MSM row data across multiple threads - std::vector msm_row_indices; - std::vector pc_indices; - msm_row_indices.reserve(msms.size() + 1); - pc_indices.reserve(msms.size() + 1); - - msm_row_indices.push_back(1); - pc_indices.push_back(total_number_of_muls); + std::vector msm_row_counts; + msm_row_counts.reserve(msms.size() + 1); + msm_row_counts.push_back(1); + // compute the program counter (i.e. the index among all single scalar muls) that each multiscalar + // multiplication will start at. + std::vector pc_values; + pc_values.reserve(msms.size() + 1); + pc_values.push_back(total_number_of_muls); for (const auto& msm : msms) { - const size_t rows = ECCOpQueue::get_msm_row_count_for_single_msm(msm.size()); - msm_row_indices.push_back(msm_row_indices.back() + rows); - pc_indices.push_back(pc_indices.back() - msm.size()); + const size_t num_rows_required = ECCOpQueue::num_eccvm_msm_rows(msm.size()); + msm_row_counts.push_back(msm_row_counts.back() + num_rows_required); + pc_values.push_back(pc_values.back() - msm.size()); } + ASSERT(pc_values.back() == 0); - static constexpr size_t num_rounds = NUM_SCALAR_BITS / WNAF_SLICE_BITS; - std::vector msm_state(num_msm_rows); - // start with empty row (shiftable polynomials must have 0 as first coefficient) - msm_state[0] = (MSMState{}); + // compute the MSM rows + std::vector msm_rows(num_msm_rows); + // start with empty row (shiftable polynomials must have 0 as first coefficient) + msm_rows[0] = (MSMRow{}); // compute "read counts" so that we can determine the number of times entries in our log-derivative lookup // tables are called. - // Note: this part is single-threaded. THe amount of compute is low, however, so this is likely not a big + // Note: this part is single-threaded. The amount of compute is low, however, so this is likely not a big // concern. - for (size_t i = 0; i < msms.size(); ++i) { - - for (size_t j = 0; j < num_rounds; ++j) { - uint32_t pc = static_cast(pc_indices[i]); - const auto& msm = msms[i]; + for (size_t msm_idx = 0; msm_idx < msms.size(); ++msm_idx) { + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + auto pc = static_cast(pc_values[msm_idx]); + const auto& msm = msms[msm_idx]; const size_t msm_size = msm.size(); - const size_t rows_per_round = - (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); - - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - bool add = points_per_row > m; + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + + for (size_t relative_row_idx = 0; relative_row_idx < num_rows_per_digit; ++relative_row_idx) { + const size_t num_points_in_row = (relative_row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + const size_t offset = relative_row_idx * ADDITIONS_PER_ROW; + for (size_t relative_point_idx = 0; relative_point_idx < ADDITIONS_PER_ROW; ++relative_point_idx) { + const size_t point_idx = offset + relative_point_idx; + const bool add = num_points_in_row > relative_point_idx; if (add) { - int slice = add ? msm[idx + m].wnaf_slices[j] : 0; - update_read_counts(pc - idx - m, slice); + int slice = msm[point_idx].wnaf_digits[digit_idx]; + // pc starts at total_number_of_muls and decreses non-uniformly to 0 + update_read_count((total_number_of_muls - pc) + point_idx, slice); } } } - if (j == num_rounds - 1) { - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; - for (size_t m = 0; m < 4; ++m) { - bool add = points_per_row > m; - + if (digit_idx == NUM_WNAF_DIGITS_PER_SCALAR - 1) { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + for (size_t relative_point_idx = 0; relative_point_idx < ADDITIONS_PER_ROW; + ++relative_point_idx) { + bool add = num_points_in_row > relative_point_idx; + const size_t point_idx = offset + relative_point_idx; if (add) { - update_read_counts(pc - idx - m, msm[idx + m].wnaf_skew ? -1 : -15); + // pc starts at total_number_of_muls and decreses non-uniformly to 0 + int slice = msm[point_idx].wnaf_skew ? -1 : -15; + update_read_count((total_number_of_muls - pc) + point_idx, slice); } } } @@ -184,80 +186,84 @@ class ECCVMMSMMBuilder { // The execution trace data for the MSM columns requires knowledge of intermediate values from *affine* point // addition. The naive solution to compute this data requires 2 field inversions per in-circuit group addition - // evaluation. This is bad! To avoid this, we split the witness computation algorithm into 3 steps. Step 1: - // compute the execution trace group operations in *projective* coordinates Step 2: use batch inversion trick to - // convert all point traces into affine coordinates Step 3: populate the full execution trace, including the - // intermediate values from affine group operations This section sets up the data structures we need to store - // all intermediate ECC operations in projective form + // evaluation. This is bad! To avoid this, we split the witness computation algorithm into 3 steps. + // Step 1: compute the execution trace group operations in *projective* coordinates + // Step 2: use batch inversion trick to convert all points into affine coordinates + // Step 3: populate the full execution trace, including the intermediate values from affine group operations + // This section sets up the data structures we need to store all intermediate ECC operations in projective form const size_t num_point_adds_and_doubles = (num_msm_rows - 2) * 4; const size_t num_accumulators = num_msm_rows - 1; - const size_t num_points_in_trace = (num_point_adds_and_doubles * 3) + num_accumulators; + // In what fallows, either p1 + p2 = p3, or p1.dbl() = p3 // We create 1 vector to store the entire point trace. We split into multiple containers using std::span // (we want 1 vector object to more efficiently batch normalize points) - std::vector point_trace(num_points_in_trace); - // the point traces record group operations. Either p1 + p2 = p3, or p1.dbl() = p3 - std::span p1_trace(&point_trace[0], num_point_adds_and_doubles); - std::span p2_trace(&point_trace[num_point_adds_and_doubles], num_point_adds_and_doubles); - std::span p3_trace(&point_trace[num_point_adds_and_doubles * 2], num_point_adds_and_doubles); + static constexpr size_t NUM_POINTS_IN_ADDITION_RELATION = 3; + const size_t num_points_to_normalize = + (num_point_adds_and_doubles * NUM_POINTS_IN_ADDITION_RELATION) + num_accumulators; + std::vector points_to_normalize(num_points_to_normalize); + std::span p1_trace(&points_to_normalize[0], num_point_adds_and_doubles); + std::span p2_trace(&points_to_normalize[num_point_adds_and_doubles], num_point_adds_and_doubles); + std::span p3_trace(&points_to_normalize[num_point_adds_and_doubles * 2], num_point_adds_and_doubles); // operation_trace records whether an entry in the p1/p2/p3 trace represents a point addition or doubling std::vector operation_trace(num_point_adds_and_doubles); // accumulator_trace tracks the value of the ECCVM accumulator for each row - std::span accumulator_trace(&point_trace[num_point_adds_and_doubles * 3], num_accumulators); + std::span accumulator_trace(&points_to_normalize[num_point_adds_and_doubles * 3], num_accumulators); // we start the accumulator at the point at infinity accumulator_trace[0] = (CycleGroup::affine_point_at_infinity); // TODO(https://github.com/AztecProtocol/barretenberg/issues/973): Reinstate multitreading? - // populate point trace data, and the components of the MSM execution trace that do not relate to affine point + // populate point trace, and the components of the MSM execution trace that do not relate to affine point // operations - for (size_t i = 0; i < msms.size(); i++) { + for (size_t msm_idx = 0; msm_idx < msms.size(); msm_idx++) { Element accumulator = CycleGroup::affine_point_at_infinity; - const auto& msm = msms[i]; - size_t msm_row_index = msm_row_indices[i]; + const auto& msm = msms[msm_idx]; + size_t msm_row_index = msm_row_counts[msm_idx]; const size_t msm_size = msm.size(); - const size_t rows_per_round = (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); - size_t trace_index = (msm_row_indices[i] - 1) * 4; - - for (size_t j = 0; j < num_rounds; ++j) { - const uint32_t pc = static_cast(pc_indices[i]); - - for (size_t k = 0; k < rows_per_round; ++k) { - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - auto& row = msm_state[msm_row_index]; - const size_t idx = k * ADDITIONS_PER_ROW; - row.msm_transition = (j == 0) && (k == 0); - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - - auto& add_state = row.add_state[m]; - add_state.add = points_per_row > m; - int slice = add_state.add ? msm[idx + m].wnaf_slices[j] : 0; + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); + size_t trace_index = (msm_row_counts[msm_idx] - 1) * 4; + + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + const auto pc = static_cast(pc_values[msm_idx]); + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? (msm_size % ADDITIONS_PER_ROW) + : ADDITIONS_PER_ROW; + auto& row = msm_rows[msm_row_index]; + const size_t offset = row_idx * ADDITIONS_PER_ROW; + row.msm_transition = (digit_idx == 0) && (row_idx == 0); + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + + auto& add_state = row.add_state[point_idx]; + add_state.add = num_points_in_row > point_idx; + int slice = add_state.add ? msm[offset + point_idx].wnaf_digits[digit_idx] : 0; // In the MSM columns in the ECCVM circuit, we can add up to 4 points per row. - // if `row.add_state[m].add = 1`, this indicates that we want to add the `m`'th point in - // the MSM columns into the MSM accumulator `add_state.slice` = A 4-bit WNAF slice of - // the scalar multiplier associated with the point we are adding (the specific slice - // chosen depends on the value of msm_round) (WNAF = windowed-non-adjacent-form. Value - // range is `-15, -13, + // if `row.add_state[point_idx].add = 1`, this indicates that we want to add the + // `point_idx`'th point in the MSM columns into the MSM accumulator `add_state.slice` = A + // 4-bit WNAF slice of the scalar multiplier associated with the point we are adding (the + // specific slice chosen depends on the value of msm_round) (WNAF = + // windowed-non-adjacent-form. Value range is `-15, -13, // ..., 15`) If `add_state.add = 1`, we want `add_state.slice` to be the *compressed* // form of the WNAF slice value. (compressed = no gaps in the value range. i.e. -15, // -13, ..., 15 maps to 0, ... , 15) add_state.slice = add_state.add ? (slice + 15) / 2 : 0; - add_state.point = add_state.add - ? msm[idx + m].precomputed_table[static_cast(add_state.slice)] - : AffineElement{ 0, 0 }; + add_state.point = + add_state.add + ? msm[offset + point_idx].precomputed_table[static_cast(add_state.slice)] + : AffineElement{ 0, 0 }; // predicate logic: // add_predicate should normally equal add_state.add - // However! if j == 0 AND k == 0 AND m == 0 this implies we are examing the 1st point - // addition of a new MSM In this case, we do NOT add the 1st point into the accumulator, - // instead we SET the accumulator to equal the 1st point. add_predicate is used to - // determine whether we add the output of a point addition into the accumulator, - // therefore if j == 0 AND k == 0 AND m == 0, add_predicate = 0 even if add_state.add = - // true - bool add_predicate = (m == 0 ? (j != 0 || k != 0) : add_state.add); + // However! if digit_idx == 0 AND row_idx == 0 AND point_idx == 0 this implies we are + // examing the 1st point addition of a new MSM. In this case, we do NOT add the 1st point + // into the accumulator, instead we SET the accumulator to equal the 1st point. + // add_predicate is used to determine whether we add the output of a point addition into the + // accumulator, therefore if digit_idx == 0 AND row_idx == 0 AND point_idx == 0, + // add_predicate = 0 even if add_state.add = true + bool add_predicate = (point_idx == 0 ? (digit_idx != 0 || row_idx != 0) : add_state.add); - Element p1 = (m == 0) ? Element(add_state.point) : accumulator; - Element p2 = (m == 0) ? accumulator : Element(add_state.point); + Element p1 = (point_idx == 0) ? Element(add_state.point) : accumulator; + Element p2 = (point_idx == 0) ? accumulator : Element(add_state.point); accumulator = add_predicate ? (accumulator + add_state.point) : Element(p1); p1_trace[trace_index] = p1; @@ -270,25 +276,24 @@ class ECCVMMSMMBuilder { row.q_add = true; row.q_double = false; row.q_skew = false; - row.msm_round = static_cast(j); + row.msm_round = static_cast(digit_idx); row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(idx); + row.msm_count = static_cast(offset); row.pc = pc; msm_row_index++; } // doubling - if (j < num_rounds - 1) { - auto& row = msm_state[msm_row_index]; + if (digit_idx < NUM_WNAF_DIGITS_PER_SCALAR - 1) { + auto& row = msm_rows[msm_row_index]; row.msm_transition = false; - row.msm_round = static_cast(j + 1); + row.msm_round = static_cast(digit_idx + 1); row.msm_size = static_cast(msm_size); row.msm_count = static_cast(0); row.q_add = false; row.q_double = true; row.q_skew = false; - for (size_t m = 0; m < 4; ++m) { - - auto& add_state = row.add_state[m]; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; add_state.add = false; add_state.slice = 0; add_state.point = { 0, 0 }; @@ -304,25 +309,25 @@ class ECCVMMSMMBuilder { accumulator_trace[msm_row_index] = accumulator; msm_row_index++; } else { - for (size_t k = 0; k < rows_per_round; ++k) { - auto& row = msm_state[msm_row_index]; + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + auto& row = msm_rows[msm_row_index]; - const size_t points_per_row = - (k + 1) * ADDITIONS_PER_ROW > msm_size ? msm_size % ADDITIONS_PER_ROW : ADDITIONS_PER_ROW; - const size_t idx = k * ADDITIONS_PER_ROW; + const size_t num_points_in_row = (row_idx + 1) * ADDITIONS_PER_ROW > msm_size + ? msm_size % ADDITIONS_PER_ROW + : ADDITIONS_PER_ROW; + const size_t offset = row_idx * ADDITIONS_PER_ROW; row.msm_transition = false; - Element acc_expected = accumulator; - - for (size_t m = 0; m < 4; ++m) { - auto& add_state = row.add_state[m]; - add_state.add = points_per_row > m; - add_state.slice = add_state.add ? msm[idx + m].wnaf_skew ? 7 : 0 : 0; - - add_state.point = add_state.add - ? msm[idx + m].precomputed_table[static_cast(add_state.slice)] - : AffineElement{ 0, 0 }; - bool add_predicate = add_state.add ? msm[idx + m].wnaf_skew : false; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + add_state.add = num_points_in_row > point_idx; + add_state.slice = add_state.add ? msm[offset + point_idx].wnaf_skew ? 7 : 0 : 0; + + add_state.point = + add_state.add + ? msm[offset + point_idx].precomputed_table[static_cast(add_state.slice)] + : AffineElement{ 0, 0 }; + bool add_predicate = add_state.add ? msm[offset + point_idx].wnaf_skew : false; auto p1 = accumulator; accumulator = add_predicate ? accumulator + add_state.point : accumulator; p1_trace[trace_index] = p1; @@ -334,9 +339,9 @@ class ECCVMMSMMBuilder { row.q_add = false; row.q_double = false; row.q_skew = true; - row.msm_round = static_cast(j + 1); + row.msm_round = static_cast(digit_idx + 1); row.msm_size = static_cast(msm_size); - row.msm_count = static_cast(idx); + row.msm_count = static_cast(offset); row.pc = pc; accumulator_trace[msm_row_index] = accumulator; msm_row_index++; @@ -346,18 +351,18 @@ class ECCVMMSMMBuilder { } // Normalize the points in the point trace - run_loop_in_parallel(point_trace.size(), [&](size_t start, size_t end) { - Element::batch_normalize(&point_trace[start], end - start); + run_loop_in_parallel(points_to_normalize.size(), [&](size_t start, size_t end) { + Element::batch_normalize(&points_to_normalize[start], end - start); }); // inverse_trace is used to compute the value of the `collision_inverse` column in the ECCVM. std::vector inverse_trace(num_point_adds_and_doubles); run_loop_in_parallel(num_point_adds_and_doubles, [&](size_t start, size_t end) { - for (size_t i = start; i < end; ++i) { - if (operation_trace[i]) { - inverse_trace[i] = (p1_trace[i].y + p1_trace[i].y); + for (size_t operation_idx = start; operation_idx < end; ++operation_idx) { + if (operation_trace[operation_idx]) { + inverse_trace[operation_idx] = (p1_trace[operation_idx].y + p1_trace[operation_idx].y); } else { - inverse_trace[i] = (p2_trace[i].x - p1_trace[i].x); + inverse_trace[operation_idx] = (p2_trace[operation_idx].x - p1_trace[operation_idx].x); } } FF::batch_invert(&inverse_trace[start], end - start); @@ -366,28 +371,29 @@ class ECCVMMSMMBuilder { // complete the computation of the ECCVM execution trace, by adding the affine intermediate point data // i.e. row.accumulator_x, row.accumulator_y, row.add_state[0...3].collision_inverse, // row.add_state[0...3].lambda - for (size_t i = 0; i < msms.size(); i++) { - const auto& msm = msms[i]; - size_t trace_index = ((msm_row_indices[i] - 1) * ADDITIONS_PER_ROW); - size_t msm_row_index = msm_row_indices[i]; + for (size_t msm_idx = 0; msm_idx < msms.size(); msm_idx++) { + const auto& msm = msms[msm_idx]; + size_t trace_index = ((msm_row_counts[msm_idx] - 1) * ADDITIONS_PER_ROW); + size_t msm_row_index = msm_row_counts[msm_idx]; // 1st MSM row will have accumulator equal to the previous MSM output // (or point at infinity for 1st MSM) - size_t accumulator_index = msm_row_indices[i] - 1; + size_t accumulator_index = msm_row_counts[msm_idx] - 1; const size_t msm_size = msm.size(); - const size_t rows_per_round = (msm_size / ADDITIONS_PER_ROW) + (msm_size % ADDITIONS_PER_ROW != 0 ? 1 : 0); + const size_t num_rows_per_digit = + (msm_size / ADDITIONS_PER_ROW) + ((msm_size % ADDITIONS_PER_ROW != 0) ? 1 : 0); - for (size_t j = 0; j < num_rounds; ++j) { - for (size_t k = 0; k < rows_per_round; ++k) { - auto& row = msm_state[msm_row_index]; + for (size_t digit_idx = 0; digit_idx < NUM_WNAF_DIGITS_PER_SCALAR; ++digit_idx) { + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + auto& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - auto& add_state = row.add_state[m]; - bool add_predicate = (m == 0 ? (j != 0 || k != 0) : add_state.add); + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + bool add_predicate = (point_idx == 0 ? (digit_idx != 0 || row_idx != 0) : add_state.add); const auto& inverse = inverse_trace[trace_index]; const auto& p1 = p1_trace[trace_index]; @@ -400,16 +406,15 @@ class ECCVMMSMMBuilder { msm_row_index++; } - if (j < num_rounds - 1) { - MSMState& row = msm_state[msm_row_index]; + if (digit_idx < NUM_WNAF_DIGITS_PER_SCALAR - 1) { + MSMRow& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - - for (size_t m = 0; m < 4; ++m) { - auto& add_state = row.add_state[m]; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; add_state.collision_inverse = 0; const FF& dx = p1_trace[trace_index].x; const FF& inverse = inverse_trace[trace_index]; @@ -419,20 +424,17 @@ class ECCVMMSMMBuilder { accumulator_index++; msm_row_index++; } else { - for (size_t k = 0; k < rows_per_round; ++k) { - MSMState& row = msm_state[msm_row_index]; + for (size_t row_idx = 0; row_idx < num_rows_per_digit; ++row_idx) { + MSMRow& row = msm_rows[msm_row_index]; const Element& normalized_accumulator = accumulator_trace[accumulator_index]; - - const size_t idx = k * ADDITIONS_PER_ROW; - + const size_t offset = row_idx * ADDITIONS_PER_ROW; const FF& acc_x = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.x; const FF& acc_y = normalized_accumulator.is_point_at_infinity() ? 0 : normalized_accumulator.y; row.accumulator_x = acc_x; row.accumulator_y = acc_y; - - for (size_t m = 0; m < ADDITIONS_PER_ROW; ++m) { - auto& add_state = row.add_state[m]; - bool add_predicate = add_state.add ? msm[idx + m].wnaf_skew : false; + for (size_t point_idx = 0; point_idx < ADDITIONS_PER_ROW; ++point_idx) { + auto& add_state = row.add_state[point_idx]; + bool add_predicate = add_state.add ? msm[offset + point_idx].wnaf_skew : false; const auto& inverse = inverse_trace[trace_index]; const auto& p1 = p1_trace[trace_index]; @@ -452,8 +454,8 @@ class ECCVMMSMMBuilder { // we always require 1 extra row at the end of the trace, because the accumulator x/y coordinates for row `i` // are present at row `i+1` Element final_accumulator(accumulator_trace.back()); - MSMState& final_row = msm_state.back(); - final_row.pc = static_cast(pc_indices.back()); + MSMRow& final_row = msm_rows.back(); + final_row.pc = static_cast(pc_values.back()); final_row.msm_transition = true; final_row.accumulator_x = final_accumulator.is_point_at_infinity() ? 0 : final_accumulator.x; final_row.accumulator_y = final_accumulator.is_point_at_infinity() ? 0 : final_accumulator.y; @@ -462,12 +464,12 @@ class ECCVMMSMMBuilder { final_row.q_add = false; final_row.q_double = false; final_row.q_skew = false; - final_row.add_state = { typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, - typename MSMState::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 } }; + final_row.add_state = { typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 }, + typename MSMRow::AddState{ false, 0, AffineElement{ 0, 0 }, 0, 0 } }; - return msm_state; + return { msm_rows, point_table_read_counts }; } }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp index ed77be8f6a6..c98e1d56b8b 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/precomputed_tables_builder.hpp @@ -4,18 +4,18 @@ namespace bb { -class ECCVMPrecomputedTablesBuilder { +class ECCVMPointTablePrecomputationBuilder { public: using CycleGroup = bb::g1; using FF = grumpkin::fr; using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; - static constexpr size_t NUM_WNAF_SLICES = bb::eccvm::NUM_WNAF_SLICES; - static constexpr size_t WNAF_SLICES_PER_ROW = bb::eccvm::WNAF_SLICES_PER_ROW; - static constexpr size_t WNAF_SLICE_BITS = bb::eccvm::WNAF_SLICE_BITS; + static constexpr size_t NUM_WNAF_DIGITS_PER_SCALAR = bb::eccvm::NUM_WNAF_DIGITS_PER_SCALAR; + static constexpr size_t WNAF_DIGITS_PER_ROW = bb::eccvm::WNAF_DIGITS_PER_ROW; + static constexpr size_t NUM_WNAF_DIGIT_BITS = bb::eccvm::NUM_WNAF_DIGIT_BITS; - struct PrecomputeState { + struct PointTablePrecoputationRow { int s1 = 0; int s2 = 0; int s3 = 0; @@ -33,31 +33,31 @@ class ECCVMPrecomputedTablesBuilder { AffineElement precompute_double{ 0, 0 }; }; - static std::vector compute_precompute_state( + static std::vector compute_rows( const std::vector>& ecc_muls) { - static constexpr size_t num_rows_per_scalar = NUM_WNAF_SLICES / WNAF_SLICES_PER_ROW; + static constexpr size_t num_rows_per_scalar = NUM_WNAF_DIGITS_PER_SCALAR / WNAF_DIGITS_PER_ROW; const size_t num_precompute_rows = num_rows_per_scalar * ecc_muls.size() + 1; - std::vector precompute_state(num_precompute_rows); + std::vector precompute_state(num_precompute_rows); // start with empty row (shiftable polynomials must have 0 as first coefficient) - precompute_state[0] = PrecomputeState{}; + precompute_state[0] = PointTablePrecoputationRow{}; // current impl doesn't work if not 4 - static_assert(WNAF_SLICES_PER_ROW == 4); + static_assert(WNAF_DIGITS_PER_ROW == 4); run_loop_in_parallel(ecc_muls.size(), [&](size_t start, size_t end) { for (size_t j = start; j < end; j++) { const auto& entry = ecc_muls[j]; - const auto& slices = entry.wnaf_slices; + const auto& slices = entry.wnaf_digits; uint256_t scalar_sum = 0; for (size_t i = 0; i < num_rows_per_scalar; ++i) { - PrecomputeState row; - const int slice0 = slices[i * WNAF_SLICES_PER_ROW]; - const int slice1 = slices[i * WNAF_SLICES_PER_ROW + 1]; - const int slice2 = slices[i * WNAF_SLICES_PER_ROW + 2]; - const int slice3 = slices[i * WNAF_SLICES_PER_ROW + 3]; + PointTablePrecoputationRow row; + const int slice0 = slices[i * WNAF_DIGITS_PER_ROW]; + const int slice1 = slices[i * WNAF_DIGITS_PER_ROW + 1]; + const int slice2 = slices[i * WNAF_DIGITS_PER_ROW + 2]; + const int slice3 = slices[i * WNAF_DIGITS_PER_ROW + 3]; const int slice0base2 = (slice0 + 15) / 2; const int slice1base2 = (slice1 + 15) / 2; @@ -85,7 +85,7 @@ class ECCVMPrecomputedTablesBuilder { bool chunk_negative = row_chunk < 0; - scalar_sum = scalar_sum << (WNAF_SLICE_BITS * WNAF_SLICES_PER_ROW); + scalar_sum = scalar_sum << (NUM_WNAF_DIGIT_BITS * WNAF_DIGITS_PER_ROW); if (chunk_negative) { scalar_sum -= static_cast(-row_chunk); } else { diff --git a/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp b/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp index 106d83b5d4b..b3d93d3d1f8 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/transcript_builder.hpp @@ -11,7 +11,7 @@ class ECCVMTranscriptBuilder { using Element = typename CycleGroup::element; using AffineElement = typename CycleGroup::affine_element; - struct TranscriptState { + struct TranscriptRow { bool accumulator_empty = false; bool q_add = false; bool q_mul = false; @@ -57,12 +57,12 @@ class ECCVMTranscriptBuilder { return res; } }; - static std::vector compute_transcript_state( - const std::vector>& vm_operations, const uint32_t total_number_of_muls) + static std::vector compute_rows(const std::vector>& vm_operations, + const uint32_t total_number_of_muls) { const size_t num_transcript_entries = vm_operations.size() + 2; - std::vector transcript_state(num_transcript_entries); + std::vector transcript_state(num_transcript_entries); std::vector inverse_trace(num_transcript_entries - 2); VMState state{ .pc = total_number_of_muls, @@ -73,9 +73,9 @@ class ECCVMTranscriptBuilder { }; VMState updated_state; // add an empty row. 1st row all zeroes because of our shiftable polynomials - transcript_state[0] = (TranscriptState{}); + transcript_state[0] = (TranscriptRow{}); for (size_t i = 0; i < vm_operations.size(); ++i) { - TranscriptState& row = transcript_state[i + 1]; + TranscriptRow& row = transcript_state[i + 1]; const bb::eccvm::VMOperation& entry = vm_operations[i]; const bool is_mul = entry.mul; @@ -180,7 +180,7 @@ class ECCVMTranscriptBuilder { for (size_t i = 0; i < inverse_trace.size(); ++i) { transcript_state[i + 1].collision_check = inverse_trace[i]; } - TranscriptState& final_row = transcript_state.back(); + TranscriptRow& final_row = transcript_state.back(); final_row.pc = updated_state.pc; final_row.accumulator_x = (updated_state.accumulator.is_point_at_infinity()) ? 0 : updated_state.accumulator.x; final_row.accumulator_y = (updated_state.accumulator.is_point_at_infinity()) ? 0 : updated_state.accumulator.y; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp index 4ef2ef12ef8..c3f04728cd3 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp @@ -261,18 +261,19 @@ class ECCOpQueue { } /** - * @brief Get the number of rows in the 'msm' column section o the ECCVM, associated with a single multiscalar mul + * @brief Get the number of rows in the 'msm' column section of the ECCVM associated with a single multiscalar + * multiplication. * - * @param msm_count + * @param msm_size * @return uint32_t */ - static uint32_t get_msm_row_count_for_single_msm(const size_t msm_count) + static uint32_t num_eccvm_msm_rows(const size_t msm_size) { - const size_t rows_per_round = - (msm_count / eccvm::ADDITIONS_PER_ROW) + (msm_count % eccvm::ADDITIONS_PER_ROW != 0 ? 1 : 0); - constexpr size_t num_rounds = eccvm::NUM_SCALAR_BITS / eccvm::WNAF_SLICE_BITS; - const size_t num_rows_for_all_rounds = (num_rounds + 1) * rows_per_round; // + 1 round for skew - const size_t num_double_rounds = num_rounds - 1; + const size_t rows_per_wnaf_digit = + (msm_size / eccvm::ADDITIONS_PER_ROW) + ((msm_size % eccvm::ADDITIONS_PER_ROW != 0) ? 1 : 0); + const size_t num_rows_for_all_rounds = + (eccvm::NUM_WNAF_DIGITS_PER_SCALAR + 1) * rows_per_wnaf_digit; // + 1 round for skew + const size_t num_double_rounds = eccvm::NUM_WNAF_DIGITS_PER_SCALAR - 1; const size_t num_rows_for_msm = num_rows_for_all_rounds + num_double_rounds; return static_cast(num_rows_for_msm); @@ -287,7 +288,7 @@ class ECCOpQueue { { size_t msm_rows = num_msm_rows + 2; if (cached_active_msm_count > 0) { - msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + msm_rows += num_eccvm_msm_rows(cached_active_msm_count); } return msm_rows; } @@ -305,7 +306,7 @@ class ECCOpQueue { // add 1 row to start of precompute table section size_t precompute_rows = num_precompute_table_rows + 1; if (cached_active_msm_count > 0) { - msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + msm_rows += num_eccvm_msm_rows(cached_active_msm_count); precompute_rows += get_precompute_table_row_count_for_single_msm(cached_active_msm_count); } @@ -323,7 +324,7 @@ class ECCOpQueue { accumulator = accumulator + to_add; // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(ADD_ACCUM, to_add); + UltraOp ultra_op = construct_and_populate_ultra_ops(ADD_ACCUM, to_add); // Store the raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -353,7 +354,7 @@ class ECCOpQueue { accumulator = accumulator + to_mul * scalar; // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(MUL_ACCUM, to_mul, scalar); + UltraOp ultra_op = construct_and_populate_ultra_ops(MUL_ACCUM, to_mul, scalar); // Store the raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -383,7 +384,7 @@ class ECCOpQueue { accumulator.self_set_infinity(); // Construct and store the operation in the ultra op format - auto ultra_op = construct_and_populate_ultra_ops(EQUALITY, expected); + UltraOp ultra_op = construct_and_populate_ultra_ops(EQUALITY, expected); // Store raw operation raw_ops.emplace_back(ECCVMOperation{ @@ -404,7 +405,9 @@ class ECCOpQueue { private: /** - * @brief when inserting operations, update the number of multiplications in the latest scalar mul + * @brief Update cached_active_msm_count or update other row counts and reset cached_active_msm_count. + * @details To the OpQueue, an MSM is a sequence of successive mul opcodes (note that mul might better be called + * mul_add--its effect on the accumulator is += scalar * point). * * @param op */ @@ -418,7 +421,7 @@ class ECCOpQueue { cached_active_msm_count++; } } else if (cached_active_msm_count != 0) { - num_msm_rows += get_msm_row_count_for_single_msm(cached_active_msm_count); + num_msm_rows += num_eccvm_msm_rows(cached_active_msm_count); num_precompute_table_rows += get_precompute_table_row_count_for_single_msm(cached_active_msm_count); cached_num_muls += cached_active_msm_count; cached_active_msm_count = 0; @@ -433,7 +436,8 @@ class ECCOpQueue { */ static uint32_t get_precompute_table_row_count_for_single_msm(const size_t msm_count) { - constexpr size_t num_precompute_rows_per_scalar = eccvm::NUM_WNAF_SLICES / eccvm::WNAF_SLICES_PER_ROW; + constexpr size_t num_precompute_rows_per_scalar = + eccvm::NUM_WNAF_DIGITS_PER_SCALAR / eccvm::WNAF_DIGITS_PER_ROW; const size_t num_rows_for_precompute_table = msm_count * num_precompute_rows_per_scalar; return static_cast(num_rows_for_precompute_table); } From f1195eaf6e213798644e5b3b12ce0611a675e210 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 8 May 2024 13:58:12 -0400 Subject: [PATCH 02/43] hotfix(ci): concurrency --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 237feef58f2..d696c0d41bf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 20 + runner_concurrency: 50 subaction: ${{ inputs.runner_action || 'start' }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge From 1ca0d28d4931e7461bcb00ef77d412b9ade02630 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Wed, 8 May 2024 15:45:39 -0300 Subject: [PATCH 03/43] docs: call types (#5472) Explanation of the different kinds of calls on Ethereum and the different ways to interact with contracts (from another contract, from a client), to eventually compare with how Aztec works. --- docs/docs/misc/glossary/call_types.md | 177 ++++++++++++++++++ .../misc/{glossary.md => glossary/main.md} | 0 docs/sidebars.js | 10 +- .../app_subscription_contract/src/main.nr | 2 + .../contracts/auth_contract/src/main.nr | 2 + .../contracts/fpc_contract/src/main.nr | 2 + .../contracts/lending_contract/src/main.nr | 2 + .../end-to-end/src/e2e_auth_contract.test.ts | 8 + .../end-to-end/src/e2e_card_game.test.ts | 2 + 9 files changed, 204 insertions(+), 1 deletion(-) create mode 100644 docs/docs/misc/glossary/call_types.md rename docs/docs/misc/{glossary.md => glossary/main.md} (100%) diff --git a/docs/docs/misc/glossary/call_types.md b/docs/docs/misc/glossary/call_types.md new file mode 100644 index 00000000000..3de6d61d834 --- /dev/null +++ b/docs/docs/misc/glossary/call_types.md @@ -0,0 +1,177 @@ +--- +## title: Call Types +--- + +# Understanding Call Types + +## What is a Call + +We say that a smart contract is called when one of its functions is invoked and its code is run. This means there'll be: + +- a caller +- arguments +- return values +- a call status (successful or failed) + +There are multiple types of calls, and some of the naming can make things **very** confusing. This page lists the different call types and execution modes, pointing out key differences between them. + +## Ethereum Call Types + +Even though we're discussing Aztec, its design is heavily influenced by Ethereum and many of the APIs and concepts are quite similar. It is therefore worthwhile to briefly review how things work there and what naming conventions are used to provide context to the Aztec-specific concepts. + +Broadly speaking, Ethereum contracts can be thought of as executing as a result of three different things: running certain EVM opcodes, running Solidity code (which compiles to EVM opcodes), or via the node JSON-RPC interface (e.g. when executing transactions). + +### EVM + +Certain opcodes allow contracts to make calls to other contracts, each with different semantics. We're particularly interested in `CALL` and `STATICCALL`, and how those relate to contract programming languages and client APIs. + +#### `CALL` + +This is the most common and basic type of call. It grants execution control to the caller until it eventually returns. No special semantics are in play here. Most Ethereum transactions spend the majority of their time in `CALL` contexts. + +#### `STATICCALL` + +This behaves almost exactly the same as `CALL`, with one key difference: any state-changing operations are forbidden and will immediately cause the call to fail. This includes writing to storage, emitting logs, or deploying new contracts. This call is used to query state on an external contract, e.g. to get data from a price oracle, check for access control permissions, etc. + +#### Others + +The `CREATE` and `CREATE2` opcodes (for contract deployment) also result in something similar to a `CALL` context, but all that's special about them has to do with how deployments work. `DELEGATECALL` (and `CALLCODE`) are somewhat complicated to understand but don't have any Aztec equivalents, so they are not worth covering. + +### Solidity + +Solidity (and other contract programming languages such as Vyper) compile down to EVM opcodes, but it is useful to understand how they map language concepts to the different call types. + +#### Mutating External Functions + +These are functions marked `payable` (which can receive ETH, which is a state change) or with no mutability declaration (sometimes called `nonpayable`). When one of these functions is called on a contract, the `CALL` opcode is emitted, meaning the callee can perform state changes, make further `CALL`s, etc. + +It is also possible to call such a function with `STATICCALL` manually (e.g. using assembly), but the execution will fail as soon as a state-changing opcode is executed. + +#### `view` + +An external function marked `view` will not be able to mutate state (write to storage, etc.), it can only _view_ the state. Solidity will emit the `STATICCALL` opcode when calling these functions, since its restrictions provide added safety to the caller (e.g. no risk of reentrancy). + +Note that it is entirely possible to use `CALL` to call a `view` function, and the result will be the exact same as if `STATICCALL` had been used. The reason why `STATICCALL` exists is so that _untrusted or unknown_ contracts can be called while still being able to reason about correctness. From the [EIP](https://eips.ethereum.org/EIPS/eip-214): + +> '`STATICCALL` adds a way to call other contracts and restrict what they can do in the simplest way. It can be safely assumed that the state of all accounts is the same before and after a static call.' + +### JSON-RPC + +From outside the EVM, calls to contracts are made via [JSON-RPC](https://ethereum.org/en/developers/docs/apis/json-rpc/) methods, typically from some client library that is aware of contract ABIs, such as [ethers.js](https://docs.ethers.org/v5) or [viem](https://viem.sh/). + +#### `eth_sendTransaction` + +This method is how transactions are sent to a node to get them to be broadcast and eventually included in a block. The specified `to` address will be called in a `CALL` context, with some notable properties: + +- there are no return values, even if the contract function invoked does return some data +- there is no explicit caller: it is instead derived from a provided signature + +Some client libraries choose to automatically issue `eth_sendTransaction` when calling functions from a contract ABI that are not marked as `view` - [ethers is a good example](https://docs.ethers.org/v5/getting-started/#getting-started--writing). Notably, this means that any return value is lost and not available to the calling client - the library typically returns a transaction receipt instead. If the return value is required, then the only option is to simulate the call `eth_call`. + +Note that it is possible to call non state-changing functions (i.e. `view`) with `eth_sendTransaction` - this is always meaningless. What transactions do is change the blockchain state, so all calling such a function achieves is for the caller to lose funds by paying for gas fees. The sole purpose of a `view` function is to return data, and `eth_sendTransaction` does not make the return value available. + +#### `eth_call` + +This method is the largest culprit of confusion around calls, but unfortunately requires understanding of all previous concepts in order to be explained. Its name is also quite unhelpful. + +What `eth_call` does is simulate a transaction (a call to a contract) given the current blockchain state. The behavior will be the exact same as `eth_sendTransaction`, except: + +- no actual transaction will be created +- while gas _will_ be measured, there'll be no transaction fees of any kind +- no signature is required: the `from` address is passed directly, and can be set to any value (even if the private key is unknown, or if they are contract addresses!) +- the return value of the called contract is available + +`eth_call` is typically used for one of the following: + +- query blockchain data, e.g. read token balances +- preview the state changes produced by a transaction, e.g. the transaction cost, token balance changes, etc + +Because some libraries ([such as ethers](https://docs.ethers.org/v5/getting-started/#getting-started--reading)) automatically use `eth_call` for `view` functions (which when called via Solidity result in the `STATICCALL` opcode), these concepts can be hard to tell apart. The following bears repeating: **an `eth_call`'s call context is the same as `eth_sendTransaction`, and it is a `CALL` context, not `STATICCALL`.** + +## Aztec Call Types + +Large parts of the Aztec Network's design are still not finalized, and the nitty-gritty of contract calls is no exception. This section won't therefore contain a thorough review of these, but rather list some of the main ways contracts can currently be interacted with, with analogies to Ethereum call types when applicable. + +While Ethereum contracts are defined by bytecode that runs on the EVM, Aztec contracts have multiple modes of execution depending on the function that is invoked. + +### Private Execution + +Contract functions marked with `#[aztec(private)]` can only be called privately, and as such 'run' in the user's device. Since they're circuits, their 'execution' is actually the generation of a zk-SNARK proof that'll later be sent to the sequencer for verification. + +#### Private Calls + +Private functions from other contracts can be called either regularly or statically by using the `.call()` and `.static_call` functions. They will also be 'executed' (i.e. proved) in the user's device, and `static_call` will fail if any state changes are attempted (like the EVM's `STATICCALL`). + +#include_code private_call /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +Unlike the EVM however, private execution doesn't revert in the traditional way: in case of error (e.g. a failed assertion, a state changing operation in a static context, etc.) the proof generation simply fails and no transaction request is generated, spending no network gas or user funds. + +#### Public Calls + +Since public execution can only be performed by the sequencer, public functions cannot be executed in a private context. It is possible however to _enqueue_ a public function call during private execution, requesting the sequencer to run it during inclusion of the transaction. It will be [executed in public](#public-execution) normally, including the possibility to enqueue static public calls. + +Since the public call is made asynchronously, any return values or side effects are not available during private execution. If the public function fails once executed, the entire transaction is reverted inncluding state changes caused by the private part, such as new notes or nullifiers. Note that this does result in gas being spent, like in the case of the EVM. + +#include_code enqueue_public /noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr rust + +It is also possible to create public functions that can _only_ be invoked by privately enqueing a call from the same contract, which can very useful to update public state after private exection (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[aztec(internal)]`. + +A common pattern is to enqueue public calls to check some validity condition on public state, e.g. that a deadline has not expired or that some public value is set. + +#include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +#include_code deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +:::warning +Calling public functions privately leaks some privacy! The caller of the function and all arguments will be revelead, so exercise care when mixing the private and public domains. To learn about alternative ways to access public state privately, look into [Shared State](../../developers/contracts/references/storage/shared_state.md). +::: + +### Public Execution + +Contract functions marked with `#[aztec(public)]` can only be called publicly, and are executed by the sequencer. The computation model is very similar to the EVM: all state, parameters, etc. are known to the entire network, and no data is private. Static execution like the EVM's `STATICCALL` is possible too, with similar semantics (state can be accessed but not modified, etc.). + +Since private calls are always run in a user's device, it is not possible to perform any private execution from a public context. A reasonably good mental model for public execution is that of an EVM in which some work has already been done privately, and all that is know about it is its correctness and side-effects (new notes and nullifiers, enqueued public calls, etc.). A reverted public execution will also revert the private side-effects. + +Public functions in other contracts can be called both regularly and statically, just like on the EVM. + +#include_code public_call /noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr rust + +:::note +This is the same function that was called by privately enqueuing a call to it! Public functions can be called either directly in a public context, or asynchronously by enqueuing in a private context. +::: + +### Top-level Unconstrained + +Contract functions with the `unconstrained` Noir keyword are a special type of function still under development, and their semantics will likely change in the near future. They are used to perform state queries from an off-chain client, and are never included in any transaction. No guarantees are made on the correctness of the result since they rely exclusively on unconstrained oracle calls. + +A reasonable mental model for them is that of a `view` Solidity function that is never called in any transaction, and is only ever invoked via `eth_call`. Note that in these the caller assumes that the node is acting honestly by exectuing the true contract bytecode with correct blockchain state, the same way the Aztec version assumes the oracles are returning legitimate data. + +### aztec.js + +There are three different ways to execute an Aztec contract function using the `aztec.js` library, with close similarities to their [JSON-RPC counterparts](#json-rpc). + +#### `simulate` + +This is used to get a result out of an execution, either private or public. It creates no transaction and spends no gas. The mental model is fairly close to that of [`eth_call`](#eth_call), in that it can be used to call any type of function, simulate its execution and get a result out of it. `simulate` is also the only way to run [top-level unconstrained functions](#top-level-unconstrained). + +#include_code public_getter /noir-projects/noir-contracts/contracts/auth_contract/src/main.nr rust + +#include_code simulate_public_getter yarn-project/end-to-end/src/e2e_auth_contract.test.ts typescript + +:::warning +No correctness is guaranteed on the result of `simulate`! Correct execution is entirely optional and left up to the client that handles this request. +::: + +#### `prove` + +This creates and returns a transaction request, which includes proof of correct private execution and side-efects. The request is not broadcast however, and no gas is spent. It is typically used in testing contexts to inspect transaction parameters or to check for execution failure. + +#include_code local-tx-fails /yarn-project/end-to-end/src/guides/dapp_testing.test.ts typescript + +Like most Ethereum libraries, `prove` also simulates public execution to try to detect runtime errors that would only occur once the transaction is picked up by the sequencer. This makes `prove` very useful in testing environments, but users shuld be wary of both false positives and negatives in production environments, particularly if the node's data is stale. Public simulation can be skipped by setting the `skipPublicSimulation` flag. + +#### `send` + +This is the same as [`prove`](#prove) except it also broadcasts the transaction and returns a receipt. This is how transactions are sent, getting them to be included in blocks and spending gas. It is similar to [`eth_sendTransaction`](#eth_sendtransaction), except it also performs some work on the user's device, namely the production of the proof for the private part of the transaction. + +#include_code send_tx yarn-project/end-to-end/src/e2e_card_game.test.ts typescript diff --git a/docs/docs/misc/glossary.md b/docs/docs/misc/glossary/main.md similarity index 100% rename from docs/docs/misc/glossary.md rename to docs/docs/misc/glossary/main.md diff --git a/docs/sidebars.js b/docs/sidebars.js index b2272172ad8..65a63a31870 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -584,7 +584,15 @@ const sidebars = { defaultStyle: true, }, "misc/migration_notes", - "misc/glossary", + { + label: "Glossary", + type: "category", + link: { + type: "doc", + id: "misc/glossary/main", + }, + items: ["misc/glossary/call_types"], + }, { label: "Roadmap", type: "category", diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index f049473ea57..2bf04c8628c 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -48,8 +48,10 @@ contract AppSubscription { note.remaining_txs -= 1; storage.subscriptions.at(user_address).replace(&mut note, true); + // docs:start:enqueue_public let gas_limit = storage.gas_token_limit_per_tx.read_private(); GasToken::at(storage.gas_token_address.read_private()).pay_fee(gas_limit).enqueue(&mut context); + // docs:end:enqueue_public context.end_setup(); diff --git a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr index 836e01bb41d..0de4f7c2093 100644 --- a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr @@ -33,12 +33,14 @@ contract Auth { // docs:end:shared_mutable_schedule } + // docs:start:public_getter #[aztec(public)] fn get_authorized() -> AztecAddress { // docs:start:shared_mutable_get_current_public storage.authorized.get_current_value_in_public() // docs:end:shared_mutable_get_current_public } + // docs:end:public_getter #[aztec(public)] fn get_scheduled_authorized() -> AztecAddress { diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index f7636711d0e..c877e8c7ff0 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -40,7 +40,9 @@ contract FPC { #[aztec(public)] #[aztec(internal)] fn pay_fee(refund_address: AztecAddress, amount: Field, asset: AztecAddress) { + // docs:start:public_call let refund = GasToken::at(storage.gas_token_address.read_public()).pay_fee(amount).call(&mut context); + // docs:end:public_call // Just do public refunds for the present Token::at(asset).transfer_public(context.this_address(), refund_address, refund, 0).call(&mut context); } diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index 80d693340c6..909f0417849 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -236,7 +236,9 @@ contract Lending { stable_coin: AztecAddress ) { let on_behalf_of = compute_identifier(secret, on_behalf_of, context.msg_sender().to_field()); + // docs:start:private_call let _ = Token::at(stable_coin).burn(from, amount, nonce).call(&mut context); + // docs:end:private_call let _ = Lending::at(context.this_address())._repay(AztecAddress::from_field(on_behalf_of), amount, stable_coin).enqueue(&mut context); } diff --git a/yarn-project/end-to-end/src/e2e_auth_contract.test.ts b/yarn-project/end-to-end/src/e2e_auth_contract.test.ts index 2ccf9ff3493..4702797a4d2 100644 --- a/yarn-project/end-to-end/src/e2e_auth_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_auth_contract.test.ts @@ -51,6 +51,12 @@ describe('e2e_auth_contract', () => { expect(await contract.methods.get_authorized().simulate()).toEqual(AztecAddress.ZERO); }); + it('non-admin canoot set authorized', async () => { + await expect( + contract.withWallet(other).methods.set_authorized(authorized.getAddress()).send().wait(), + ).rejects.toThrow('caller is not admin'); + }); + it('admin sets authorized', async () => { await contract.withWallet(admin).methods.set_authorized(authorized.getAddress()).send().wait(); @@ -68,7 +74,9 @@ describe('e2e_auth_contract', () => { it('after a while the scheduled change is effective and can be used with max block restriction', async () => { await mineBlocks(DELAY); // This gets us past the block of change + // docs:start:simulate_public_getter expect(await contract.methods.get_authorized().simulate()).toEqual(authorized.getAddress()); + // docs:end:simulate_public_getter const interaction = contract.withWallet(authorized).methods.do_private_authorized_thing(); diff --git a/yarn-project/end-to-end/src/e2e_card_game.test.ts b/yarn-project/end-to-end/src/e2e_card_game.test.ts index 56f7a547f41..f0949b16663 100644 --- a/yarn-project/end-to-end/src/e2e_card_game.test.ts +++ b/yarn-project/end-to-end/src/e2e_card_game.test.ts @@ -144,7 +144,9 @@ describe('e2e_card_game', () => { it('should be able to buy packs', async () => { const seed = 27n; + // docs:start:send_tx await contract.methods.buy_pack(seed).send().wait(); + // docs:end:send_tx const collection = await contract.methods.view_collection_cards(firstPlayer, 0).simulate({ from: firstPlayer }); const expected = getPackedCards(0, seed); expect(unwrapOptions(collection)).toMatchObject(expected); From 11cde4434060807e4ee5fcb39268c6e8dbcc4a45 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Wed, 8 May 2024 15:03:43 -0400 Subject: [PATCH 04/43] feat: Sync from noir (#6280) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE feat: add `Not` trait to stdlib (https://github.com/noir-lang/noir/pull/4999) feat: increase default expression width to 4 (https://github.com/noir-lang/noir/pull/4995) chore: adding name shadowing tests template program (https://github.com/noir-lang/noir/pull/4799) feat: implement `ops` traits on `u16`/`i16` (https://github.com/noir-lang/noir/pull/4996) chore: disable `gates_report.yml` (https://github.com/noir-lang/noir/pull/4997) feat: Sync from aztec-packages (https://github.com/noir-lang/noir/pull/4993) fix: defer overflow checks for unsigned integers to acir-gen (https://github.com/noir-lang/noir/pull/4832) feat: add support for u16/i16 (https://github.com/noir-lang/noir/pull/4985) chore: split `ops` into `arith` and `bit` modules (https://github.com/noir-lang/noir/pull/4989) chore(ci): run clippy on benchmarks (https://github.com/noir-lang/noir/pull/4988) feat: remove query to backend to get expression width (https://github.com/noir-lang/noir/pull/4975) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: Tom French --- .noir-sync-commit | 2 +- .../.github/workflows/formatting.yml | 2 +- .../.github/workflows/gates_report.yml | 144 +- noir/noir-repo/Cargo.lock | 1 - .../acvm-repo/acir/benches/serialization.rs | 2 +- .../optimizers/constant_backpropagation.rs | 2 +- .../compiler/optimizers/redundant_range.rs | 2 +- .../compiler/noirc_driver/src/lib.rs | 4 +- .../src/brillig/brillig_gen/brillig_block.rs | 87 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 70 +- .../src/ssa/opt/remove_bit_shifts.rs | 11 +- .../src/ssa/opt/remove_enable_side_effects.rs | 12 +- .../src/ssa/ssa_gen/context.rs | 49 +- .../compiler/noirc_frontend/src/ast/mod.rs | 3 + .../src/hir/comptime/interpreter.rs | 64 + .../noirc_frontend/src/hir/comptime/tests.rs | 13 + .../noirc_frontend/src/hir/comptime/value.rs | 13 + .../noirc_frontend/src/parser/parser.rs | 7 +- .../compiler/noirc_frontend/src/tests.rs | 2481 ++++++++--------- .../src/tests/name_shadowing.rs | 419 +++ noir/noir-repo/compiler/wasm/src/compile.rs | 21 +- .../compiler/wasm/src/compile_new.rs | 30 +- .../docs/noir/concepts/data_types/integers.md | 4 +- .../docs/docs/noir/standard_library/traits.md | 33 +- .../noir_stdlib/src/embedded_curve_ops.nr | 7 +- noir/noir-repo/noir_stdlib/src/ops.nr | 173 +- noir/noir-repo/noir_stdlib/src/ops/arith.nr | 103 + noir/noir-repo/noir_stdlib/src/ops/bit.nr | 109 + noir/noir-repo/noir_stdlib/src/uint128.nr | 30 +- .../execution_success/u16_support/Nargo.toml | 7 + .../execution_success/u16_support/Prover.toml | 1 + .../execution_success/u16_support/src/main.nr | 24 + .../tooling/backend_interface/Cargo.toml | 1 - .../tooling/backend_interface/src/cli/info.rs | 62 - .../tooling/backend_interface/src/cli/mod.rs | 2 - .../backend_interface/src/proof_system.rs | 25 +- .../mock_backend/src/info_cmd.rs | 40 - .../test-binaries/mock_backend/src/main.rs | 3 - .../tooling/bb_abstraction_leaks/build.rs | 2 +- .../tooling/nargo_cli/src/cli/check_cmd.rs | 7 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 3 +- .../tooling/nargo_cli/src/cli/compile_cmd.rs | 17 +- .../tooling/nargo_cli/src/cli/dap_cmd.rs | 18 +- .../tooling/nargo_cli/src/cli/debug_cmd.rs | 14 +- .../tooling/nargo_cli/src/cli/execute_cmd.rs | 14 +- .../tooling/nargo_cli/src/cli/export_cmd.rs | 7 +- .../tooling/nargo_cli/src/cli/info_cmd.rs | 23 +- .../tooling/nargo_cli/src/cli/lsp_cmd.rs | 8 +- .../tooling/nargo_cli/src/cli/mod.rs | 18 +- .../tooling/nargo_cli/src/cli/new_cmd.rs | 8 +- .../tooling/nargo_cli/src/cli/prove_cmd.rs | 7 +- .../tooling/nargo_cli/src/cli/test_cmd.rs | 8 +- .../tooling/nargo_cli/src/cli/verify_cmd.rs | 7 +- .../tooling/noir_js/test/node/execute.test.ts | 36 - 54 files changed, 2347 insertions(+), 1913 deletions(-) create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs create mode 100644 noir/noir-repo/noir_stdlib/src/ops/arith.nr create mode 100644 noir/noir-repo/noir_stdlib/src/ops/bit.nr create mode 100644 noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml create mode 100644 noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr delete mode 100644 noir/noir-repo/tooling/backend_interface/src/cli/info.rs delete mode 100644 noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs diff --git a/.noir-sync-commit b/.noir-sync-commit index 61a3851ea0c..5fe0fbedd16 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -c49d3a9ded819b828cffdfc031e86614da21e329 +95d4d133d1eb5e0eb44cd928d8183d890e970a13 diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index 8166fb0f7c2..08c02af519f 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -44,7 +44,7 @@ jobs: save-if: ${{ github.event_name != 'merge_group' }} - name: Run `cargo clippy` - run: cargo clippy --workspace --locked --release + run: cargo clippy --all-targets --workspace --locked --release - name: Run `cargo fmt` run: cargo fmt --all --check diff --git a/noir/noir-repo/.github/workflows/gates_report.yml b/noir/noir-repo/.github/workflows/gates_report.yml index ba4cb600c59..3d4bef1940e 100644 --- a/noir/noir-repo/.github/workflows/gates_report.yml +++ b/noir/noir-repo/.github/workflows/gates_report.yml @@ -1,88 +1,88 @@ -name: Report gates diff +# name: Report gates diff -on: - push: - branches: - - master - pull_request: +# on: +# push: +# branches: +# - master +# pull_request: -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] +# jobs: +# build-nargo: +# runs-on: ubuntu-latest +# strategy: +# matrix: +# target: [x86_64-unknown-linux-gnu] - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 +# steps: +# - name: Checkout Noir repo +# uses: actions/checkout@v4 - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 +# - name: Setup toolchain +# uses: dtolnay/rust-toolchain@1.74.1 - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} +# - uses: Swatinem/rust-cache@v2 +# with: +# key: ${{ matrix.target }} +# cache-on-failure: true +# save-if: ${{ github.event_name != 'merge_group' }} - - name: Build Nargo - run: cargo build --package nargo_cli --release +# - name: Build Nargo +# run: cargo build --package nargo_cli --release - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz +# - name: Package artifacts +# run: | +# mkdir dist +# cp ./target/release/nargo ./dist/nargo +# 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 +# - name: Upload artifact +# uses: actions/upload-artifact@v4 +# with: +# name: nargo +# path: ./dist/* +# retention-days: 3 - compare_gas_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write +# compare_gas_reports: +# needs: [build-nargo] +# runs-on: ubuntu-latest +# permissions: +# pull-requests: write - steps: - - uses: actions/checkout@v4 +# steps: +# - uses: actions/checkout@v4 - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo +# - name: Download nargo binary +# uses: actions/download-artifact@v4 +# with: +# name: nargo +# path: ./nargo - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V +# - name: Set nargo on PATH +# run: | +# nargo_binary="${{ github.workspace }}/nargo/nargo" +# chmod +x $nargo_binary +# echo "$(dirname $nargo_binary)" >> $GITHUB_PATH +# export PATH="$PATH:$(dirname $nargo_binary)" +# nargo -V - - name: Generate gates report - working-directory: ./test_programs - run: | - ./gates_report.sh - mv gates_report.json ../gates_report.json +# - name: Generate gates report +# working-directory: ./test_programs +# run: | +# ./gates_report.sh +# mv gates_report.json ../gates_report.json - - name: Compare gates reports - id: gates_diff - uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 - with: - report: gates_report.json - summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) +# - name: Compare gates reports +# id: gates_diff +# uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 +# with: +# report: gates_report.json +# summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} +# - name: Add gates diff to sticky comment +# if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' +# uses: marocchino/sticky-pull-request-comment@v2 +# with: +# # delete the comment in case changes no longer impact circuit sizes +# delete: ${{ !steps.gates_diff.outputs.markdown }} +# message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 859579c077f..a8c63c032aa 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -462,7 +462,6 @@ dependencies = [ "dirs", "flate2", "reqwest", - "serde", "serde_json", "tar", "tempfile", diff --git a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs index e51726e3901..a7f32b4a4c7 100644 --- a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs @@ -33,7 +33,7 @@ fn sample_program(num_opcodes: usize) -> Program { functions: vec![Circuit { current_witness_index: 4000, opcodes: assert_zero_opcodes.to_vec(), - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), public_parameters: PublicInputs(BTreeSet::from([Witness(5)])), return_values: PublicInputs(BTreeSet::from([Witness(6)])), diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs index 0e7d28104da..5b778f63f07 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs @@ -282,7 +282,7 @@ mod tests { fn test_circuit(opcodes: Vec) -> Circuit { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index c6ca18d30ae..0e1629717b3 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -164,7 +164,7 @@ mod tests { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index ef874d45f88..5f1985b0553 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -54,8 +54,8 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str = #[derive(Args, Clone, Debug, Default)] pub struct CompileOptions { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width)] - pub expression_width: Option, + #[arg(long, value_parser = parse_expression_width, default_value = "4")] + pub expression_width: ExpressionWidth, /// Force a full recompilation. #[arg(long = "force")] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 873ebe51e6f..f660c8e0b7a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1328,7 +1328,15 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.binary_instruction(left, right, result_variable, brillig_binary_op); - self.add_overflow_check(brillig_binary_op, left, right, result_variable, is_signed); + self.add_overflow_check( + brillig_binary_op, + left, + right, + result_variable, + binary, + dfg, + is_signed, + ); } /// Splits a two's complement signed integer in the sign bit and the absolute value. @@ -1481,15 +1489,20 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(bias); } + #[allow(clippy::too_many_arguments)] fn add_overflow_check( &mut self, binary_operation: BrilligBinaryOp, left: SingleAddrVariable, right: SingleAddrVariable, result: SingleAddrVariable, + binary: &Binary, + dfg: &DataFlowGraph, is_signed: bool, ) { let bit_size = left.bit_size; + let max_lhs_bits = dfg.get_value_max_num_bits(binary.lhs); + let max_rhs_bits = dfg.get_value_max_num_bits(binary.rhs); if bit_size == FieldElement::max_num_bits() { return; @@ -1497,6 +1510,11 @@ impl<'block> BrilligBlock<'block> { match (binary_operation, is_signed) { (BrilligBinaryOp::Add, false) => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `left` and `right` have both been casted up from smaller types and so cannot overflow. + return; + } + let condition = SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); // Check that lhs <= result @@ -1511,6 +1529,12 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(condition); } (BrilligBinaryOp::Sub, false) => { + if dfg.is_constant(binary.lhs) && max_lhs_bits > max_rhs_bits { + // `left` is a fixed constant and `right` is restricted such that `left - right > 0` + // Note strict inequality as `right > left` while `max_lhs_bits == max_rhs_bits` is possible. + return; + } + let condition = SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); // Check that rhs <= lhs @@ -1527,39 +1551,36 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_single_addr(condition); } (BrilligBinaryOp::Mul, false) => { - // Multiplication overflow is only possible for bit sizes > 1 - if bit_size > 1 { - let is_right_zero = - SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); - let zero = - self.brillig_context.make_constant_instruction(0_usize.into(), bit_size); - self.brillig_context.binary_instruction( - zero, - right, - is_right_zero, - BrilligBinaryOp::Equals, - ); - self.brillig_context.codegen_if_not(is_right_zero.address, |ctx| { - let condition = SingleAddrVariable::new(ctx.allocate_register(), 1); - let division = SingleAddrVariable::new(ctx.allocate_register(), bit_size); - // Check that result / rhs == lhs - ctx.binary_instruction( - result, - right, - division, - BrilligBinaryOp::UnsignedDiv, - ); - ctx.binary_instruction(division, left, condition, BrilligBinaryOp::Equals); - ctx.codegen_constrain( - condition, - Some("attempt to multiply with overflow".to_string()), - ); - ctx.deallocate_single_addr(condition); - ctx.deallocate_single_addr(division); - }); - self.brillig_context.deallocate_single_addr(is_right_zero); - self.brillig_context.deallocate_single_addr(zero); + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `left` and `right` have both been casted up from smaller types and so cannot overflow. + return; } + + let is_right_zero = + SingleAddrVariable::new(self.brillig_context.allocate_register(), 1); + let zero = self.brillig_context.make_constant_instruction(0_usize.into(), bit_size); + self.brillig_context.binary_instruction( + zero, + right, + is_right_zero, + BrilligBinaryOp::Equals, + ); + self.brillig_context.codegen_if_not(is_right_zero.address, |ctx| { + let condition = SingleAddrVariable::new(ctx.allocate_register(), 1); + let division = SingleAddrVariable::new(ctx.allocate_register(), bit_size); + // Check that result / rhs == lhs + ctx.binary_instruction(result, right, division, BrilligBinaryOp::UnsignedDiv); + ctx.binary_instruction(division, left, condition, BrilligBinaryOp::Equals); + ctx.codegen_constrain( + condition, + Some("attempt to multiply with overflow".to_string()), + ); + ctx.deallocate_single_addr(condition); + ctx.deallocate_single_addr(division); + }); + self.brillig_context.deallocate_single_addr(is_right_zero); + self.brillig_context.deallocate_single_addr(zero); } _ => {} } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 8abb31e8276..0de0c28be75 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -1849,15 +1849,15 @@ impl<'a> Context<'a> { let binary_type = AcirType::from(binary_type); let bit_count = binary_type.bit_size(); - - match binary.operator { + let num_type = binary_type.to_numeric_type(); + let result = match binary.operator { BinaryOp::Add => self.acir_context.add_var(lhs, rhs), BinaryOp::Sub => self.acir_context.sub_var(lhs, rhs), BinaryOp::Mul => self.acir_context.mul_var(lhs, rhs), BinaryOp::Div => self.acir_context.div_var( lhs, rhs, - binary_type, + binary_type.clone(), self.current_side_effects_enabled_var, ), // Note: that this produces unnecessary constraints when @@ -1881,7 +1881,71 @@ impl<'a> Context<'a> { BinaryOp::Shl | BinaryOp::Shr => unreachable!( "ICE - bit shift operators do not exist in ACIR and should have been replaced" ), + }?; + + if let NumericType::Unsigned { bit_size } = &num_type { + // Check for integer overflow + self.check_unsigned_overflow( + result, + *bit_size, + binary.lhs, + binary.rhs, + dfg, + binary.operator, + )?; } + + Ok(result) + } + + /// Adds a range check against the bit size of the result of addition, subtraction or multiplication + fn check_unsigned_overflow( + &mut self, + result: AcirVar, + bit_size: u32, + lhs: ValueId, + rhs: ValueId, + dfg: &DataFlowGraph, + op: BinaryOp, + ) -> Result<(), RuntimeError> { + // We try to optimize away operations that are guaranteed not to overflow + let max_lhs_bits = dfg.get_value_max_num_bits(lhs); + let max_rhs_bits = dfg.get_value_max_num_bits(rhs); + + let msg = match op { + BinaryOp::Add => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return Ok(()); + } + "attempt to add with overflow".to_string() + } + BinaryOp::Sub => { + if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { + // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` + // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. + return Ok(()); + } + "attempt to subtract with overflow".to_string() + } + BinaryOp::Mul => { + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return Ok(()); + } + "attempt to multiply with overflow".to_string() + } + _ => return Ok(()), + }; + + let with_pred = self.acir_context.mul_var(result, self.current_side_effects_enabled_var)?; + self.acir_context.range_constrain_var( + with_pred, + &NumericType::Unsigned { bit_size }, + Some(msg), + )?; + Ok(()) } /// Operands in a binary operation are checked to have the same type. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index 42727054503..65a77552c79 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -109,7 +109,7 @@ impl Context<'_> { return InsertInstructionResult::SimplifiedTo(zero).first(); } } - let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); + let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ.clone()); let max_lhs_bits = self.function.dfg.get_value_max_num_bits(lhs); @@ -123,15 +123,18 @@ impl Context<'_> { // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); let pow = self.pow(base, rhs_unsigned); - let pow = self.insert_cast(pow, typ); + let pow = self.insert_cast(pow, typ.clone()); (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) }; if max_bit <= bit_size { self.insert_binary(lhs, BinaryOp::Mul, pow) } else { - let result = self.insert_binary(lhs, BinaryOp::Mul, pow); - self.insert_truncate(result, bit_size, max_bit) + let lhs_field = self.insert_cast(lhs, Type::field()); + let pow_field = self.insert_cast(pow, Type::field()); + let result = self.insert_binary(lhs_field, BinaryOp::Mul, pow_field); + let result = self.insert_truncate(result, bit_size, max_bit); + self.insert_cast(result, typ) } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 02b9202b209..ea37d857e58 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -108,17 +108,19 @@ impl Context { fn responds_to_side_effects_var(dfg: &DataFlowGraph, instruction: &Instruction) -> bool { use Instruction::*; match instruction { - Binary(binary) => { - if matches!(binary.operator, BinaryOp::Div | BinaryOp::Mod) { + Binary(binary) => match binary.operator { + BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul => { + dfg.type_of_value(binary.lhs).is_unsigned() + } + BinaryOp::Div | BinaryOp::Mod => { if let Some(rhs) = dfg.get_numeric_constant(binary.rhs) { rhs == FieldElement::zero() } else { true } - } else { - false } - } + _ => false, + }, Cast(_, _) | Not(_) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index f7ecdc8870d..ebcbfbabe73 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -304,7 +304,7 @@ impl<'a> FunctionContext<'a> { /// Insert constraints ensuring that the operation does not overflow the bit size of the result /// - /// If the result is unsigned, we simply range check against the bit size + /// If the result is unsigned, overflow will be checked during acir-gen (cf. issue #4456), except for bit-shifts, because we will convert them to field multiplication /// /// If the result is signed, we just prepare it for check_signed_overflow() by casting it to /// an unsigned value representing the signed integer. @@ -351,51 +351,12 @@ impl<'a> FunctionContext<'a> { } Type::Numeric(NumericType::Unsigned { bit_size }) => { let dfg = &self.builder.current_function.dfg; - - let max_lhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(lhs); - let max_rhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(rhs); + let max_lhs_bits = dfg.get_value_max_num_bits(lhs); match operator { - BinaryOpKind::Add => { - if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { - // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. - return result; - } - - let message = "attempt to add with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); - } - BinaryOpKind::Subtract => { - if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { - // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` - // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. - return result; - } - - let message = "attempt to subtract with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); - } - BinaryOpKind::Multiply => { - if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { - // Either performing boolean multiplication (which cannot overflow), - // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. - return result; - } - - let message = "attempt to multiply with overflow".to_string(); - self.builder.set_location(location).insert_range_check( - result, - bit_size, - Some(message), - ); + BinaryOpKind::Add | BinaryOpKind::Subtract | BinaryOpKind::Multiply => { + // Overflow check is deferred to acir-gen + return result; } BinaryOpKind::ShiftLeft => { if let Some(rhs_const) = dfg.get_numeric_constant(rhs) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 254ec4a7590..1c5a5c610aa 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -32,6 +32,7 @@ use iter_extended::vecmap; pub enum IntegerBitSize { One, Eight, + Sixteen, ThirtyTwo, SixtyFour, } @@ -48,6 +49,7 @@ impl From for u32 { match size { One => 1, Eight => 8, + Sixteen => 16, ThirtyTwo => 32, SixtyFour => 64, } @@ -64,6 +66,7 @@ impl TryFrom for IntegerBitSize { match value { 1 => Ok(One), 8 => Ok(Eight), + 16 => Ok(Sixteen), 32 => Ok(ThirtyTwo), 64 => Ok(SixtyFour), _ => Err(InvalidIntegerBitSizeError(value)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 26b7c212a30..84df3a0a244 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -401,6 +401,14 @@ impl<'a> Interpreter<'a> { let value = if is_negative { 0u8.wrapping_sub(value) } else { value }; Ok(Value::U8(value)) } + (Signedness::Unsigned, IntegerBitSize::Sixteen) => { + let value: u16 = + value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( + InterpreterError::IntegerOutOfRangeForType { value, typ, location }, + )?; + let value = if is_negative { 0u16.wrapping_sub(value) } else { value }; + Ok(Value::U16(value)) + } (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => { let value: u32 = value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( @@ -430,6 +438,14 @@ impl<'a> Interpreter<'a> { let value = if is_negative { -value } else { value }; Ok(Value::I8(value)) } + (Signedness::Signed, IntegerBitSize::Sixteen) => { + let value: i16 = + value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( + InterpreterError::IntegerOutOfRangeForType { value, typ, location }, + )?; + let value = if is_negative { -value } else { value }; + Ok(Value::I16(value)) + } (Signedness::Signed, IntegerBitSize::ThirtyTwo) => { let value: i32 = value.try_to_u64().and_then(|value| value.try_into().ok()).ok_or( @@ -509,9 +525,11 @@ impl<'a> Interpreter<'a> { crate::ast::UnaryOp::Minus => match rhs { Value::Field(value) => Ok(Value::Field(FieldElement::zero() - value)), Value::I8(value) => Ok(Value::I8(-value)), + Value::I16(value) => Ok(Value::I16(-value)), Value::I32(value) => Ok(Value::I32(-value)), Value::I64(value) => Ok(Value::I64(-value)), Value::U8(value) => Ok(Value::U8(0 - value)), + Value::U16(value) => Ok(Value::U16(0 - value)), Value::U32(value) => Ok(Value::U32(0 - value)), Value::U64(value) => Ok(Value::U64(0 - value)), value => { @@ -523,9 +541,11 @@ impl<'a> Interpreter<'a> { crate::ast::UnaryOp::Not => match rhs { Value::Bool(value) => Ok(Value::Bool(!value)), Value::I8(value) => Ok(Value::I8(!value)), + Value::I16(value) => Ok(Value::I16(!value)), Value::I32(value) => Ok(Value::I32(!value)), Value::I64(value) => Ok(Value::I64(!value)), Value::U8(value) => Ok(Value::U8(!value)), + Value::U16(value) => Ok(Value::U16(!value)), Value::U32(value) => Ok(Value::U32(!value)), Value::U64(value) => Ok(Value::U64(!value)), value => { @@ -559,9 +579,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Add => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs + rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs + rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs + rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs + rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs + rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs + rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs + rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs + rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs + rhs)), (lhs, rhs) => { @@ -572,9 +594,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Subtract => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs - rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs - rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs - rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs - rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs - rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs - rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs - rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs - rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs - rhs)), (lhs, rhs) => { @@ -585,9 +609,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Multiply => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs * rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs * rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs * rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs * rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs * rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs * rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs * rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs * rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs * rhs)), (lhs, rhs) => { @@ -598,9 +624,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Divide => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs / rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs / rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs / rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs / rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs / rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs / rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs / rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs / rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs / rhs)), (lhs, rhs) => { @@ -611,9 +639,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Equal => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs == rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs == rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs == rhs)), (lhs, rhs) => { @@ -624,9 +654,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::NotEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs != rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs != rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs != rhs)), (lhs, rhs) => { @@ -637,9 +669,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Less => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs < rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs < rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs < rhs)), (lhs, rhs) => { @@ -650,9 +684,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::LessEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs <= rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs <= rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs <= rhs)), (lhs, rhs) => { @@ -663,9 +699,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Greater => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs > rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs > rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs > rhs)), (lhs, rhs) => { @@ -676,9 +714,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::GreaterEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::Bool(lhs >= rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::Bool(lhs >= rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs >= rhs)), (lhs, rhs) => { @@ -689,9 +729,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::And => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs & rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs & rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs & rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs & rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs & rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs & rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs & rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs & rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs & rhs)), (lhs, rhs) => { @@ -702,9 +744,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Or => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs | rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs | rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs | rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs | rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs | rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs | rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs | rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs | rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs | rhs)), (lhs, rhs) => { @@ -715,9 +759,11 @@ impl<'a> Interpreter<'a> { BinaryOpKind::Xor => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs ^ rhs)), (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs ^ rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs ^ rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs ^ rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs ^ rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs ^ rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs ^ rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs ^ rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs ^ rhs)), (lhs, rhs) => { @@ -727,9 +773,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::ShiftRight => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs >> rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs >> rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs >> rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs >> rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs >> rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs >> rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs >> rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs >> rhs)), (lhs, rhs) => { @@ -739,9 +787,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::ShiftLeft => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs << rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs << rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs << rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs << rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs << rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs << rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs << rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs << rhs)), (lhs, rhs) => { @@ -751,9 +801,11 @@ impl<'a> Interpreter<'a> { }, BinaryOpKind::Modulo => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs % rhs)), + (Value::I16(lhs), Value::I16(rhs)) => Ok(Value::I16(lhs % rhs)), (Value::I32(lhs), Value::I32(rhs)) => Ok(Value::I32(lhs % rhs)), (Value::I64(lhs), Value::I64(rhs)) => Ok(Value::I64(lhs % rhs)), (Value::U8(lhs), Value::U8(rhs)) => Ok(Value::U8(lhs % rhs)), + (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs % rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs % rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs % rhs)), (lhs, rhs) => { @@ -795,9 +847,11 @@ impl<'a> Interpreter<'a> { value.try_to_u64().expect("index could not fit into u64") as usize } Value::I8(value) => value as usize, + Value::I16(value) => value as usize, Value::I32(value) => value as usize, Value::I64(value) => value as usize, Value::U8(value) => value as usize, + Value::U16(value) => value as usize, Value::U32(value) => value as usize, Value::U64(value) => value as usize, value => { @@ -908,9 +962,11 @@ impl<'a> Interpreter<'a> { let (mut lhs, lhs_is_negative) = match self.evaluate(cast.lhs)? { Value::Field(value) => (value, false), Value::U8(value) => ((value as u128).into(), false), + Value::U16(value) => ((value as u128).into(), false), Value::U32(value) => ((value as u128).into(), false), Value::U64(value) => ((value as u128).into(), false), Value::I8(value) => signed_int_to_field!(value), + Value::I16(value) => signed_int_to_field!(value), Value::I32(value) => signed_int_to_field!(value), Value::I64(value) => signed_int_to_field!(value), Value::Bool(value) => { @@ -946,6 +1002,9 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::TypeUnsupported { typ: cast.r#type, location }) } (Signedness::Unsigned, IntegerBitSize::Eight) => cast_to_int!(lhs, to_u128, u8, U8), + (Signedness::Unsigned, IntegerBitSize::Sixteen) => { + cast_to_int!(lhs, to_u128, u16, U16) + } (Signedness::Unsigned, IntegerBitSize::ThirtyTwo) => { cast_to_int!(lhs, to_u128, u32, U32) } @@ -957,6 +1016,9 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::TypeUnsupported { typ: cast.r#type, location }) } (Signedness::Signed, IntegerBitSize::Eight) => cast_to_int!(lhs, to_i128, i8, I8), + (Signedness::Signed, IntegerBitSize::Sixteen) => { + cast_to_int!(lhs, to_i128, i16, I16) + } (Signedness::Signed, IntegerBitSize::ThirtyTwo) => { cast_to_int!(lhs, to_i128, i32, I32) } @@ -1149,9 +1211,11 @@ impl<'a> Interpreter<'a> { let get_index = |this: &mut Self, expr| -> IResult<(_, fn(_) -> _)> { match this.evaluate(expr)? { Value::I8(value) => Ok((value as i128, |i| Value::I8(i as i8))), + Value::I16(value) => Ok((value as i128, |i| Value::I16(i as i16))), Value::I32(value) => Ok((value as i128, |i| Value::I32(i as i32))), Value::I64(value) => Ok((value as i128, |i| Value::I64(i as i64))), Value::U8(value) => Ok((value as i128, |i| Value::U8(i as u8))), + Value::U16(value) => Ok((value as i128, |i| Value::U16(i as u16))), Value::U32(value) => Ok((value as i128, |i| Value::U32(i as u32))), Value::U64(value) => Ok((value as i128, |i| Value::U64(i as u64))), value => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index 5a12eb7292c..41475d3ccf4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -103,6 +103,19 @@ fn for_loop() { assert_eq!(result, Value::U8(15)); } +#[test] +fn for_loop_u16() { + let program = "fn main() -> pub u16 { + let mut x = 0; + for i in 0 .. 6 { + x += i; + } + x + }"; + let result = interpret(program, vec!["main".into()]); + assert_eq!(result, Value::U16(15)); +} + #[test] fn for_loop_with_break() { let program = "unconstrained fn main() -> pub u32 { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index 6845c6ac5a9..4e4a260871a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -22,9 +22,11 @@ pub enum Value { Bool(bool), Field(FieldElement), I8(i8), + I16(i16), I32(i32), I64(i64), U8(u8), + U16(u16), U32(u32), U64(u64), String(Rc), @@ -45,9 +47,11 @@ impl Value { Value::Bool(_) => Type::Bool, Value::Field(_) => Type::FieldElement, Value::I8(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Eight), + Value::I16(_) => Type::Integer(Signedness::Signed, IntegerBitSize::Sixteen), Value::I32(_) => Type::Integer(Signedness::Signed, IntegerBitSize::ThirtyTwo), Value::I64(_) => Type::Integer(Signedness::Signed, IntegerBitSize::SixtyFour), Value::U8(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), + Value::U16(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::Sixteen), Value::U32(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo), Value::U64(_) => Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour), Value::String(value) => { @@ -87,6 +91,12 @@ impl Value { let value = (value as u128).into(); HirExpression::Literal(HirLiteral::Integer(value, negative)) } + Value::I16(value) => { + let negative = value < 0; + let value = value.abs(); + let value = (value as u128).into(); + HirExpression::Literal(HirLiteral::Integer(value, negative)) + } Value::I32(value) => { let negative = value < 0; let value = value.abs(); @@ -102,6 +112,9 @@ impl Value { Value::U8(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } + Value::U16(value) => { + HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) + } Value::U32(value) => { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index b627714d2a6..b527284d1a9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -1374,7 +1374,7 @@ mod test { fresh_statement(), true, ), - vec!["x as u8", "0 as Field", "(x + 3) as [Field; 8]"], + vec!["x as u8", "x as u16", "0 as Field", "(x + 3) as [Field; 8]"], ); parse_all_failing( atom_or_right_unary( @@ -1546,7 +1546,10 @@ mod test { // Let statements are not type checked here, so the parser will accept as // long as it is a type. Other statements such as Public are type checked // Because for now, they can only have one type - parse_all(declaration(expression()), vec!["let _ = 42", "let x = y", "let x : u8 = y"]); + parse_all( + declaration(expression()), + vec!["let _ = 42", "let x = y", "let x : u8 = y", "let x: u16 = y"], + ); } #[test] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 5f99e9e347a..6f7470807be 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1,1236 +1,1214 @@ +#![cfg(test)] + +#[cfg(test)] +mod name_shadowing; + // XXX: These tests repeat a lot of code // what we should do is have test cases which are passed to a test harness // A test harness will allow for more expressive and readable tests -#[cfg(test)] -mod test { - - use core::panic; - use std::collections::BTreeMap; - - use fm::FileId; - - use iter_extended::vecmap; - use noirc_errors::Location; - - use crate::hir::def_collector::dc_crate::CompilationError; - use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; - use crate::hir::def_map::ModuleData; - use crate::hir::resolution::errors::ResolverError; - use crate::hir::resolution::import::PathResolutionError; - use crate::hir::type_check::TypeCheckError; - use crate::hir::Context; - use crate::node_interner::{NodeInterner, StmtId}; - - use crate::hir::def_collector::dc_crate::DefCollector; - use crate::hir_def::expr::HirExpression; - use crate::hir_def::stmt::HirStatement; - use crate::monomorphization::monomorphize; - use crate::parser::ParserErrorReason; - use crate::ParsedModule; - use crate::{ - hir::def_map::{CrateDefMap, LocalModuleId}, - parse_program, - }; - use fm::FileManager; - use noirc_arena::Arena; +use core::panic; +use std::collections::BTreeMap; + +use fm::FileId; + +use iter_extended::vecmap; +use noirc_errors::Location; + +use crate::hir::def_collector::dc_crate::CompilationError; +use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; +use crate::hir::def_map::ModuleData; +use crate::hir::resolution::errors::ResolverError; +use crate::hir::resolution::import::PathResolutionError; +use crate::hir::type_check::TypeCheckError; +use crate::hir::Context; +use crate::node_interner::{NodeInterner, StmtId}; + +use crate::hir::def_collector::dc_crate::DefCollector; +use crate::hir_def::expr::HirExpression; +use crate::hir_def::stmt::HirStatement; +use crate::monomorphization::monomorphize; +use crate::parser::ParserErrorReason; +use crate::ParsedModule; +use crate::{ + hir::def_map::{CrateDefMap, LocalModuleId}, + parse_program, +}; +use fm::FileManager; +use noirc_arena::Arena; + +pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { + errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) +} - pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { - errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) - } +pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { + errors.retain(|(error, _)| match error { + CompilationError::ParseError(error) => { + !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) + } + _ => true, + }); +} - pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { - errors.retain(|(error, _)| match error { - CompilationError::ParseError(error) => { - !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) - } - _ => true, - }); - } - - pub(crate) fn get_program( - src: &str, - ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { - let root = std::path::Path::new("/"); - let fm = FileManager::new(root); - - let mut context = Context::new(fm, Default::default()); - context.def_interner.populate_dummy_operator_traits(); - let root_file_id = FileId::dummy(); - let root_crate_id = context.crate_graph.add_crate_root(root_file_id); - - let (program, parser_errors) = parse_program(src); - let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); - remove_experimental_warnings(&mut errors); - - if !has_parser_error(&errors) { - // Allocate a default Module for the root, giving it a ModuleId - let mut modules: Arena = Arena::default(); - let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); - - let def_map = CrateDefMap { - root: LocalModuleId(root), - modules, - krate: root_crate_id, - extern_prelude: BTreeMap::new(), - }; +pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { + let root = std::path::Path::new("/"); + let fm = FileManager::new(root); + + let mut context = Context::new(fm, Default::default()); + context.def_interner.populate_dummy_operator_traits(); + let root_file_id = FileId::dummy(); + let root_crate_id = context.crate_graph.add_crate_root(root_file_id); + + let (program, parser_errors) = parse_program(src); + let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); + remove_experimental_warnings(&mut errors); + + if !has_parser_error(&errors) { + // Allocate a default Module for the root, giving it a ModuleId + let mut modules: Arena = Arena::default(); + let location = Location::new(Default::default(), root_file_id); + let root = modules.insert(ModuleData::new(None, location, false)); + + let def_map = CrateDefMap { + root: LocalModuleId(root), + modules, + krate: root_crate_id, + extern_prelude: BTreeMap::new(), + }; - // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( - def_map, - &mut context, - program.clone().into_sorted(), - root_file_id, - &[], // No macro processors - )); - } - (program, context, errors) + // Now we want to populate the CrateDefMap using the DefCollector + errors.extend(DefCollector::collect( + def_map, + &mut context, + program.clone().into_sorted(), + root_file_id, + &[], // No macro processors + )); } + (program, context, errors) +} - pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src).2 - } +pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { + get_program(src).2 +} - #[test] - fn check_trait_implemented_for_all_t() { - let src = " - trait Default { - fn default() -> Self; - } - - trait Eq { - fn eq(self, other: Self) -> bool; +#[test] +fn check_trait_implemented_for_all_t() { + let src = " + trait Default { + fn default() -> Self; + } + + trait Eq { + fn eq(self, other: Self) -> bool; + } + + trait IsDefault { + fn is_default(self) -> bool; + } + + impl IsDefault for T where T: Default + Eq { + fn is_default(self) -> bool { + self.eq(T::default()) } - - trait IsDefault { - fn is_default(self) -> bool; + } + + struct Foo { + a: u64, + } + + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } + + impl Default for u64 { + fn default() -> Self { + 0 } - - impl IsDefault for T where T: Default + Eq { - fn is_default(self) -> bool { - self.eq(T::default()) - } + } + + impl Default for Foo { + fn default() -> Self { + Foo { a: Default::default() } } - - struct Foo { - a: u64, + } + + fn main(a: Foo) -> pub bool { + a.is_default() + }"; + + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +#[test] +fn check_trait_implementation_duplicate_method() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + y + 2 * x } - - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + x + 2 * y } - - impl Default for u64 { - fn default() -> Self { - 0 + } + + fn main() {}"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); + assert_eq!(first_def, "default"); + assert_eq!(second_def, "default"); } - } - - impl Default for Foo { - fn default() -> Self { - Foo { a: Default::default() } + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main(a: Foo) -> pub bool { - a.is_default() - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_implementation_duplicate_method() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_return_type() { + let src = " + trait Default { + fn default() -> Self; + } + + struct Foo { + } + + impl Default for Foo { + fn default() -> Field { + 0 } - - impl Default for Foo { - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - y + 2 * x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - x + 2 * y + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main() {}"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); - assert_eq!(first_def, "default"); - assert_eq!(second_def, "default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + }; } +} - #[test] - fn check_trait_wrong_method_return_type() { - let src = " - trait Default { - fn default() -> Self; - } - - struct Foo { +#[test] +fn check_trait_wrong_method_return_type2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, _y: Field) -> Field { + x } - - impl Default for Foo { - fn default() -> Field { - 0 + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_return_type2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_missing_implementation() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + + fn method2(x: Field) -> Field; + + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, _y: Field) -> Field { - x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { + trait_name, + method_name, + trait_impl_span: _, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(method_name, "method2"); } - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_missing_implementation() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - - fn method2(x: Field) -> Field; - - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_not_in_scope() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + // Default trait does not exist + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { + trait_path, + }) => { + assert_eq!(trait_path.as_string(), "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { - trait_name, - method_name, - trait_impl_span: _, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(method_name, "method2"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_not_in_scope() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_name() { + let src = " + trait Default { + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + // wrong trait name method should not compile + impl Default for Foo { + fn does_not_exist(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - // Default trait does not exist - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + }"; + let compilation_errors = get_program_errors(src); + assert!(!has_parser_error(&compilation_errors)); + assert!( + compilation_errors.len() == 1, + "Expected 1 compilation error, got: {:?}", + compilation_errors + ); + + for (err, _file_id) in compilation_errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { + trait_name, + impl_method, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(impl_method, "does_not_exist"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { - trait_path, - }) => { - assert_eq!(trait_path.as_string(), "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_name() { - let src = " - trait Default { - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_parameter() { + let src = " + trait Default { + fn default(x: Field) -> Self; + } + + struct Foo { + bar: u32, + } + + impl Default for Foo { + fn default(x: u32) -> Self { + Foo {bar: x} } - - // wrong trait name method should not compile - impl Default for Foo { - fn does_not_exist(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "u32"); } - } - - fn main() { - }"; - let compilation_errors = get_program_errors(src); - assert!(!has_parser_error(&compilation_errors)); - assert!( - compilation_errors.len() == 1, - "Expected 1 compilation error, got: {:?}", - compilation_errors - ); - - for (err, _file_id) in compilation_errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { - trait_name, - impl_method, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(impl_method, "does_not_exist"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter() { - let src = " - trait Default { - fn default(x: Field) -> Self; - } - - struct Foo { - bar: u32, +#[test] +fn check_trait_wrong_parameter2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Foo) -> Self { + Self { bar: x, array: [x, y.bar] } } - - impl Default for Foo { - fn default(x: u32) -> Self { - Foo {bar: x} + } + + fn main() { + }"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "Foo"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "u32"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], - } - - impl Default for Foo { - fn default(x: Field, y: Foo) -> Self { - Self { bar: x, array: [x, y.bar] } +#[test] +fn check_trait_wrong_parameter_type() { + let src = " + trait Default { + fn default(x: Field, y: NotAType) -> Field; + } + + fn main(x: Field, y: Field) { + assert(y == x); + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Unresolved(ident), + )) => { + assert_eq!(ident, "NotAType"); } - } - - fn main() { - }"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "Foo"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter_type() { - let src = " - trait Default { - fn default(x: Field, y: NotAType) -> Field; - } - - fn main(x: Field, y: Field) { - assert(y == x); - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::PathResolutionError( - PathResolutionError::Unresolved(ident), - )) => { - assert_eq!(ident, "NotAType"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_wrong_parameters_count() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_wrong_parameters_count() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field) -> Self { + Self { bar: x, array: [x, x] } } - - impl Default for Foo { - fn default(x: Field) -> Self { - Self { bar: x, array: [x, x] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters, + expected_num_parameters, + trait_name, + method_name, + .. + }) => { + assert_eq!(actual_num_parameters, &1_usize); + assert_eq!(expected_num_parameters, &2_usize); + assert_eq!(method_name, "default"); + assert_eq!(trait_name, "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { - actual_num_parameters, - expected_num_parameters, - trait_name, - method_name, - .. - }) => { - assert_eq!(actual_num_parameters, &1_usize); - assert_eq!(expected_num_parameters, &2_usize); - assert_eq!(method_name, "default"); - assert_eq!(trait_name, "Default"); - } - _ => { - panic!("No other errors are expected in this test case! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected in this test case! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_impl_for_non_type() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - impl Default for main { - fn default(x: Field, y: Field) -> Field { - x + y - } - } +#[test] +fn check_trait_impl_for_non_type() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } - fn main() {} - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::Expected { - expected, got, .. - }) => { - assert_eq!(expected, "type"); - assert_eq!(got, "function"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + impl Default for main { + fn default(x: Field, y: Field) -> Field { + x + y } } - #[test] - fn check_impl_struct_not_trait() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], - } - - struct Default { - x: Field, - z: Field, - } - - // Default is struct not a trait - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + fn main() {} + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::Expected { expected, got, .. }) => { + assert_eq!(expected, "type"); + assert_eq!(got, "function"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { - not_a_trait_name, - }) => { - assert_eq!(not_a_trait_name.to_string(), "plain::Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_declaration() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_impl_struct_not_trait() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + struct Default { + x: Field, + z: Field, + } + + // Default is struct not a trait + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { + not_a_trait_name, + }) => { + assert_eq!(not_a_trait_name.to_string(), "plain::Default"); } - } - - - trait Default { - fn default(x: Field) -> Self; - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::Trait); - assert_eq!(first_def, "Default"); - assert_eq!(second_def, "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_implementation() { - let src = " - trait Default { - } - struct Foo { - bar: Field, - } - - impl Default for Foo { - } - impl Default for Foo { - } - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_duplicate_declaration() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_duplicate_implementation_with_alias() { - let src = " - trait Default { - } - - struct MyStruct { - } - - type MyType = MyStruct; - - impl Default for MyStruct { - } - - impl Default for MyType { - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } } } + + + trait Default { + fn default(x: Field) -> Self; + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::Trait); + assert_eq!(first_def, "Default"); + assert_eq!(second_def, "Default"); + } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; + } +} - #[test] - fn test_impl_self_within_default_def() { - let src = " - trait Bar { - fn ok(self) -> Self; - - fn ref_ok(self) -> Self { - self.ok() +#[test] +fn check_trait_duplicate_implementation() { + let src = " + trait Default { + } + struct Foo { + bar: Field, + } + + impl Default for Foo { + } + impl Default for Foo { + } + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } + }; + } +} - impl Bar for (T, T) where T: Bar { - fn ok(self) -> Self { - self +#[test] +fn check_trait_duplicate_implementation_with_alias() { + let src = " + trait Default { + } + + struct MyStruct { + } + + type MyType = MyStruct; + + impl Default for MyStruct { + } + + impl Default for MyType { + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - }"; - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_as_type_as_fn_parameter() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } +#[test] +fn test_impl_self_within_default_def() { + let src = " + trait Bar { + fn ok(self) -> Self; - struct Foo { - a: u64, + fn ref_ok(self) -> Self { + self.ok() } + } - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + impl Bar for (T, T) where T: Bar { + fn ok(self) -> Self { + self } + }"; + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - fn test_eq(x: impl Eq) -> bool { - x.eq(x) - } +#[test] +fn check_trait_as_type_as_fn_parameter() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - fn main(a: Foo) -> pub bool { - test_eq(a) - }"; + struct Foo { + a: u64, + } - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } } - #[test] - fn check_trait_as_type_as_two_fn_parameters() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } + fn test_eq(x: impl Eq) -> bool { + x.eq(x) + } - trait Test { - fn test(self) -> bool; - } + fn main(a: Foo) -> pub bool { + test_eq(a) + }"; - struct Foo { - a: u64, - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } - } +#[test] +fn check_trait_as_type_as_two_fn_parameters() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - impl Test for u64 { - fn test(self) -> bool { self == self } - } + trait Test { + fn test(self) -> bool; + } - fn test_eq(x: impl Eq, y: impl Test) -> bool { - x.eq(x) == y.test() - } + struct Foo { + a: u64, + } - fn main(a: Foo, b: u64) -> pub bool { - test_eq(a, b) - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); - } - - fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src); - let interner = context.def_interner; - let mut all_captures: Vec> = Vec::new(); - for func in program.into_sorted().functions { - let func_id = interner.find_function(func.name()).unwrap(); - let hir_func = interner.function(&func_id); - // Iterate over function statements and apply filtering function - find_lambda_captures( - hir_func.block(&interner).statements(), - &interner, - &mut all_captures, - ); - } - all_captures - } - - fn find_lambda_captures( - stmts: &[StmtId], - interner: &NodeInterner, - result: &mut Vec>, - ) { - for stmt_id in stmts.iter() { - let hir_stmt = interner.statement(stmt_id); - let expr_id = match hir_stmt { - HirStatement::Expression(expr_id) => expr_id, - HirStatement::Let(let_stmt) => let_stmt.expression, - HirStatement::Assign(assign_stmt) => assign_stmt.expression, - HirStatement::Constrain(constr_stmt) => constr_stmt.0, - HirStatement::Semi(semi_expr) => semi_expr, - HirStatement::For(for_loop) => for_loop.block, - HirStatement::Error => panic!("Invalid HirStatement!"), - HirStatement::Break => panic!("Unexpected break"), - HirStatement::Continue => panic!("Unexpected continue"), - HirStatement::Comptime(_) => panic!("Unexpected comptime"), - }; - let expr = interner.expression(&expr_id); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } - get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter - } + impl Test for u64 { + fn test(self) -> bool { self == self } } - fn get_lambda_captures( - expr: HirExpression, - interner: &NodeInterner, - result: &mut Vec>, - ) { - if let HirExpression::Lambda(lambda_expr) = expr { - let mut cur_capture = Vec::new(); + fn test_eq(x: impl Eq, y: impl Test) -> bool { + x.eq(x) == y.test() + } - for capture in lambda_expr.captures.iter() { - cur_capture.push(interner.definition(capture.ident.id).name.clone()); - } - result.push(cur_capture); + fn main(a: Foo, b: u64) -> pub bool { + test_eq(a, b) + }"; - // Check for other captures recursively within the lambda body - let hir_body_expr = interner.expression(&lambda_expr.body); - if let HirExpression::Block(block_expr) = hir_body_expr { - find_lambda_captures(block_expr.statements(), interner, result); - } - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +fn get_program_captures(src: &str) -> Vec> { + let (program, context, _errors) = get_program(src); + let interner = context.def_interner; + let mut all_captures: Vec> = Vec::new(); + for func in program.into_sorted().functions { + let func_id = interner.find_function(func.name()).unwrap(); + let hir_func = interner.function(&func_id); + // Iterate over function statements and apply filtering function + find_lambda_captures(hir_func.block(&interner).statements(), &interner, &mut all_captures); } + all_captures +} - #[test] - fn resolve_empty_function() { - let src = " - fn main() { +fn find_lambda_captures(stmts: &[StmtId], interner: &NodeInterner, result: &mut Vec>) { + for stmt_id in stmts.iter() { + let hir_stmt = interner.statement(stmt_id); + let expr_id = match hir_stmt { + HirStatement::Expression(expr_id) => expr_id, + HirStatement::Let(let_stmt) => let_stmt.expression, + HirStatement::Assign(assign_stmt) => assign_stmt.expression, + HirStatement::Constrain(constr_stmt) => constr_stmt.0, + HirStatement::Semi(semi_expr) => semi_expr, + HirStatement::For(for_loop) => for_loop.block, + HirStatement::Error => panic!("Invalid HirStatement!"), + HirStatement::Break => panic!("Unexpected break"), + HirStatement::Continue => panic!("Unexpected continue"), + HirStatement::Comptime(_) => panic!("Unexpected comptime"), + }; + let expr = interner.expression(&expr_id); - } - "; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_basic_function() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_unused_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(x == x); - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unused variable - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { - assert_eq!(&ident.0.contents, "y"); - } - _ => unreachable!("we should only have an unused var error"), - } + get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter } +} - #[test] - fn resolve_unresolved_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == z); - } - "#; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - span: _, - }) => assert_eq!(name, "z"), - _ => unimplemented!("we should only have an unresolved variable"), +fn get_lambda_captures( + expr: HirExpression, + interner: &NodeInterner, + result: &mut Vec>, +) { + if let HirExpression::Lambda(lambda_expr) = expr { + let mut cur_capture = Vec::new(); + + for capture in lambda_expr.captures.iter() { + cur_capture.push(interner.definition(capture.ident.id).name.clone()); + } + result.push(cur_capture); + + // Check for other captures recursively within the lambda body + let hir_body_expr = interner.expression(&lambda_expr.body); + if let HirExpression::Block(block_expr) = hir_body_expr { + find_lambda_captures(block_expr.statements(), interner, result); } } +} + +#[test] +fn resolve_empty_function() { + let src = " + fn main() { - #[test] - fn unresolved_path() { - let src = " - fn main(x : Field) { - let _z = some::path::to::a::func(x); - } - "; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "some"); - } - _ => unimplemented!("we should only have an unresolved function"), - }; - } - _ => unimplemented!(), - } } + "; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_basic_function() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_unused_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(x == x); + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unused variable + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { + assert_eq!(&ident.0.contents, "y"); + } + _ => unreachable!("we should only have an unused var error"), } +} - #[test] - fn resolve_literal_expr() { - let src = r#" - fn main(x : Field) { - let y = 5; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); +#[test] +fn resolve_unresolved_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == z); + } + "#; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, span: _ }) => { + assert_eq!(name, "z"); + } + _ => unimplemented!("we should only have an unresolved variable"), } +} - #[test] - fn multiple_resolution_errors() { - let src = r#" - fn main(x : Field) { - let y = foo::bar(x); - let z = y + a; - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); - - // Errors are: - // `a` is undeclared - // `z` is unused - // `foo::bar` does not exist - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::UnusedVariable { ident } => { - assert_eq!(&ident.0.contents, "z"); - } - ResolverError::VariableNotDeclared { name, .. } => { - assert_eq!(name, "a"); - } - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "foo"); - } - _ => unimplemented!(), - }; - } - _ => unimplemented!(), +#[test] +fn unresolved_path() { + let src = " + fn main(x : Field) { + let _z = some::path::to::a::func(x); + } + "; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "some"); + } + _ => unimplemented!("we should only have an unresolved function"), + }; } + _ => unimplemented!(), } } +} - #[test] - fn resolve_prefix_expr() { - let src = r#" - fn main(x : Field) { - let _y = -x; - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_literal_expr() { + let src = r#" + fn main(x : Field) { + let y = 5; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_for_expr() { - let src = r#" - fn main(x : u64) { - for i in 1..20 { - let _z = x + i; +#[test] +fn multiple_resolution_errors() { + let src = r#" + fn main(x : Field) { + let y = foo::bar(x); + let z = y + a; + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); + + // Errors are: + // `a` is undeclared + // `z` is unused + // `foo::bar` does not exist + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::UnusedVariable { ident } => { + assert_eq!(&ident.0.contents, "z"); + } + ResolverError::VariableNotDeclared { name, .. } => { + assert_eq!(name, "a"); + } + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "foo"); + } + _ => unimplemented!(), }; } - "#; - assert!(get_program_errors(src).is_empty()); + _ => unimplemented!(), + } } +} - #[test] - fn resolve_call_expr() { - let src = r#" - fn main(x : Field) { - let _z = foo(x); - } +#[test] +fn resolve_prefix_expr() { + let src = r#" + fn main(x : Field) { + let _y = -x; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } - - #[test] - fn resolve_shadowing() { - let src = r#" - fn main(x : Field) { - let x = foo(x); - let x = x; - let (x, x) = (x, x); - let _ = x; - } +#[test] +fn resolve_for_expr() { + let src = r#" + fn main(x : u64) { + for i in 1..20 { + let _z = x + i; + }; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_call_expr() { + let src = r#" + fn main(x : Field) { + let _z = foo(x); + } - #[test] - fn resolve_basic_closure() { - let src = r#" - fn main(x : Field) -> pub Field { - let closure = |y| y + x; - closure(x) - } - "#; - assert!(get_program_errors(src).is_empty()); - } + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_simplified_closure() { - // based on bug https://github.com/noir-lang/noir/issues/1088 +#[test] +fn resolve_shadowing() { + let src = r#" + fn main(x : Field) { + let x = foo(x); + let x = x; + let (x, x) = (x, x); + let _ = x; + } - let src = r#"fn do_closure(x: Field) -> Field { - let y = x; - let ret_capture = || { - y - }; - ret_capture() - } - - fn main(x: Field) { - assert(do_closure(x) == 100); - } - - "#; - let parsed_captures = get_program_captures(src); - let expected_captures = vec![vec!["y".to_string()]]; - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_complex_closures() { - let src = r#" - fn main(x: Field) -> pub Field { - let closure_without_captures = |x: Field| -> Field { x + x }; - let a = closure_without_captures(1); - - let closure_capturing_a_param = |y: Field| -> Field { y + x }; - let b = closure_capturing_a_param(2); - - let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; - let c = closure_capturing_a_local_var(3); - - let closure_with_transitive_captures = |y: Field| -> Field { - let d = 5; - let nested_closure = |z: Field| -> Field { - let doubly_nested_closure = |w: Field| -> Field { w + x + b }; - a + z + y + d + x + doubly_nested_closure(4) + x + y - }; - let res = nested_closure(5); - res + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_basic_closure() { + let src = r#" + fn main(x : Field) -> pub Field { + let closure = |y| y + x; + closure(x) + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_simplified_closure() { + // based on bug https://github.com/noir-lang/noir/issues/1088 + + let src = r#"fn do_closure(x: Field) -> Field { + let y = x; + let ret_capture = || { + y + }; + ret_capture() + } + + fn main(x: Field) { + assert(do_closure(x) == 100); + } + + "#; + let parsed_captures = get_program_captures(src); + let expected_captures = vec![vec!["y".to_string()]]; + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_complex_closures() { + let src = r#" + fn main(x: Field) -> pub Field { + let closure_without_captures = |x: Field| -> Field { x + x }; + let a = closure_without_captures(1); + + let closure_capturing_a_param = |y: Field| -> Field { y + x }; + let b = closure_capturing_a_param(2); + + let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; + let c = closure_capturing_a_local_var(3); + + let closure_with_transitive_captures = |y: Field| -> Field { + let d = 5; + let nested_closure = |z: Field| -> Field { + let doubly_nested_closure = |w: Field| -> Field { w + x + b }; + a + z + y + d + x + doubly_nested_closure(4) + x + y }; + let res = nested_closure(5); + res + }; + + a + b + c + closure_with_transitive_captures(6) + } + "#; + assert!(get_program_errors(src).is_empty(), "there should be no errors"); + + let expected_captures = vec![ + vec![], + vec!["x".to_string()], + vec!["b".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string(), "y".to_string(), "d".to_string()], + vec!["x".to_string(), "b".to_string()], + ]; + + let parsed_captures = get_program_captures(src); + + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_fmt_strings() { + let src = r#" + fn main() { + let string = f"this is i: {i}"; + println(string); + + println(f"I want to print {0}"); + + let new_val = 10; + println(f"random_string{new_val}{new_val}"); + } + fn println(x : T) -> T { + x + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - a + b + c + closure_with_transitive_captures(6) + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { + name, .. + }) => { + assert_eq!(name, "i"); } - "#; - assert!(get_program_errors(src).is_empty(), "there should be no errors"); - - let expected_captures = vec![ - vec![], - vec!["x".to_string()], - vec!["b".to_string()], - vec!["x".to_string(), "b".to_string(), "a".to_string()], - vec![ - "x".to_string(), - "b".to_string(), - "a".to_string(), - "y".to_string(), - "d".to_string(), - ], - vec!["x".to_string(), "b".to_string()], - ]; - - let parsed_captures = get_program_captures(src); - - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_fmt_strings() { - let src = r#" - fn main() { - let string = f"this is i: {i}"; - println(string); - - println(f"I want to print {0}"); - - let new_val = 10; - println(f"random_string{new_val}{new_val}"); + CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { + name, + .. + }) => { + assert_eq!(name, "0"); } - fn println(x : T) -> T { - x + CompilationError::TypeError(TypeCheckError::UnusedResultError { + expr_type: _, + expr_span, + }) => { + let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); + assert!( + a == "println(string)" + || a == "println(f\"I want to print {0}\")" + || a == "println(f\"random_string{new_val}{new_val}\")" + ); } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - .. - }) => { - assert_eq!(name, "i"); - } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } - CompilationError::TypeError(TypeCheckError::UnusedResultError { - expr_type: _, - expr_span, - }) => { - let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); - assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" - ); - } - _ => unimplemented!(), - }; - } + _ => unimplemented!(), + }; } +} - fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src); - let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); - assert!(format!("{}", program) == expected); - } +fn check_rewrite(src: &str, expected: &str) { + let (_program, mut context, _errors) = get_program(src); + let main_func_id = context.def_interner.find_function("main").unwrap(); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); + assert!(format!("{}", program) == expected); +} - #[test] - fn simple_closure_with_no_captured_variables() { - let src = r#" - fn main() -> pub Field { - let x = 1; - let closure = || x; - closure() - } - "#; +#[test] +fn simple_closure_with_no_captured_variables() { + let src = r#" + fn main() -> pub Field { + let x = 1; + let closure = || x; + closure() + } + "#; - let expected_rewrite = r#"fn main$f0() -> Field { + let expected_rewrite = r#"fn main$f0() -> Field { let x$0 = 1; let closure$3 = { let closure_variable$2 = { @@ -1248,167 +1226,154 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { env$l1.0 } "#; - check_rewrite(src, expected_rewrite); - } - - #[test] - fn deny_mutually_recursive_structs() { - let src = r#" - struct Foo { bar: Bar } - struct Bar { foo: Foo } - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_globals() { - let src = r#" - global A = B; - global B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_type_aliases() { - let src = r#" - type A = B; - type B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn ensure_nested_type_aliases_type_check() { - let src = r#" - type A = B; - type B = u8; - fn main() { - let _a: A = 0 as u16; - } - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn type_aliases_in_entry_point() { - let src = r#" - type Foo = u8; - fn main(_x: Foo) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn operators_in_global_used_in_type() { - let src = r#" - global ONE = 1; - global COUNT = ONE + 2; - fn main() { - let _array: [Field; COUNT] = [1, 2, 3]; - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } + check_rewrite(src, expected_rewrite); +} - #[test] - fn break_and_continue_in_constrained_fn() { - let src = r#" - fn main() { - for i in 0 .. 10 { - if i == 2 { - continue; - } - if i == 5 { - break; - } +#[test] +fn deny_cyclic_globals() { + let src = r#" + global A = B; + global B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_cyclic_type_aliases() { + let src = r#" + type A = B; + type B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn ensure_nested_type_aliases_type_check() { + let src = r#" + type A = B; + type B = u8; + fn main() { + let _a: A = 0 as u16; + } + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn type_aliases_in_entry_point() { + let src = r#" + type Foo = u8; + fn main(_x: Foo) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn operators_in_global_used_in_type() { + let src = r#" + global ONE = 1; + global COUNT = ONE + 2; + fn main() { + let _array: [Field; COUNT] = [1, 2, 3]; + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn break_and_continue_in_constrained_fn() { + let src = r#" + fn main() { + for i in 0 .. 10 { + if i == 2 { + continue; + } + if i == 5 { + break; } } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - #[test] - fn break_and_continue_outside_loop() { - let src = r#" - unconstrained fn main() { - continue; - break; - } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } +#[test] +fn break_and_continue_outside_loop() { + let src = r#" + unconstrained fn main() { + continue; + break; + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - // Regression for #2540 - #[test] - fn for_loop_over_array() { - let src = r#" - fn hello(_array: [u1; N]) { - for _ in 0..N {} - } +// Regression for #2540 +#[test] +fn for_loop_over_array() { + let src = r#" + fn hello(_array: [u1; N]) { + for _ in 0..N {} + } - fn main() { - let array: [u1; 2] = [0, 1]; - hello(array); - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - // Regression for #4545 - #[test] - fn type_aliases_in_main() { - let src = r#" - type Outer = [u8; N]; - fn main(_arg: Outer<1>) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn ban_mutable_globals() { - // Mutable globals are only allowed in a comptime context - let src = r#" - mut global FOO: Field = 0; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_inline_attribute_on_unconstrained() { - let src = r#" - #[no_predicates] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError( - ResolverError::NoPredicatesAttributeOnUnconstrained { .. } - ) - )); - } + fn main() { + let array: [u1; 2] = [0, 1]; + hello(array); + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} - #[test] - fn deny_fold_attribute_on_unconstrained() { - let src = r#" - #[fold] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) - )); - } +// Regression for #4545 +#[test] +fn type_aliases_in_main() { + let src = r#" + type Outer = [u8; N]; + fn main(_arg: Outer<1>) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn ban_mutable_globals() { + // Mutable globals are only allowed in a comptime context + let src = r#" + mut global FOO: Field = 0; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_inline_attribute_on_unconstrained() { + let src = r#" + #[no_predicates] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NoPredicatesAttributeOnUnconstrained { .. }) + )); +} + +#[test] +fn deny_fold_attribute_on_unconstrained() { + let src = r#" + #[fold] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) + )); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs new file mode 100644 index 00000000000..b0d83510039 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/name_shadowing.rs @@ -0,0 +1,419 @@ +#![cfg(test)] +use super::get_program_errors; +use std::collections::HashSet; + +#[test] +fn test_name_shadowing() { + let src = " + trait Default { + fn default() -> Self; + } + + impl Default for bool { + fn default() -> bool { + false + } + } + + impl Default for Field { + fn default() -> Field { + 0 + } + } + + impl Default for [T; N] where T: Default { + fn default() -> [T; N] { + [Default::default(); N] + } + } + + impl Default for (T, U) where T: Default, U: Default { + fn default() -> (T, U) { + (Default::default(), Default::default()) + } + } + + fn drop_var(_x: T, y: U) -> U { y } + + mod local_module { + use crate::{Default, drop_var}; + + global LOCAL_GLOBAL_N: Field = 0; + + global LOCAL_GLOBAL_M: Field = 1; + + struct LocalStruct { + field1: A, + field2: B, + field3: [A; N], + field4: ([A; N], [B; M]), + field5: &mut A, + } + + impl Default for LocalStruct where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + field1: Default::default(), + field2: Default::default(), + field3: Default::default(), + field4: Default::default(), + field5: mut_field, + } + } + } + + trait DefinedInLocalModule1 { + fn trait_fn1(self, x: A); + fn trait_fn2(self, y: B); + fn trait_fn3(&mut self, x: A, y: B); + fn trait_fn4(self, x: [A; 0], y: [B]); + fn trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl DefinedInLocalModule1 for LocalStruct { + fn trait_fn1(self, _x: A) { drop_var(self, ()) } + fn trait_fn2(self, _y: B) { drop_var(self, ()) } + fn trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn local_fn4(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + let x: Field = 0; + assert(x == 0); + let x: Field = 1; + assert(x == 1); + [] + } + } + + mod library { + use crate::{Default, drop_var}; + + mod library2 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_2: Field = 4; + + global IMPORT_GLOBAL_M_2: Field = 5; + + // When we re-export this type from another library and then use it in + // main, we get a panic + struct ReExportMeFromAnotherLib1 { + x : Field, + } + + struct PubLibLocalStruct3 { + pub_field1: A, + pub_field2: B, + pub_field3: [A; N], + pub_field4: ([A; N], [B; M]), + pub_field5: &mut A, + } + + impl Default for PubLibLocalStruct3 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + pub_field1: Default::default(), + pub_field2: Default::default(), + pub_field3: Default::default(), + pub_field4: Default::default(), + pub_field5: mut_field, + } + } + } + + trait PubLibDefinedInLocalModule3 { + fn pub_trait_fn1(self, x: A); + fn pub_trait_fn2(self, y: B); + fn pub_trait_fn3(&mut self, x: A, y: B); + fn pub_trait_fn4(self, x: [A; 0], y: [B]); + fn pub_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubLibDefinedInLocalModule3 for PubLibLocalStruct3 { + fn pub_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn pub_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn pub_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn pub_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn pub_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn pub_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn PubLiblocal_fn3(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + [] + } + } + + // Re-export + use library2::ReExportMeFromAnotherLib1; + + global IMPORT_GLOBAL_N_1: Field = 2; + + global IMPORT_GLOBAL_M_1: Field = 3; + + struct LibLocalStruct1 { + lib_field1: A, + lib_field2: B, + lib_field3: [A; N], + lib_field4: ([A; N], [B; M]), + lib_field5: &mut A, + } + + impl Default for LibLocalStruct1 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + lib_field1: Default::default(), + lib_field2: Default::default(), + lib_field3: Default::default(), + lib_field4: Default::default(), + lib_field5: mut_field, + } + } + } + + trait LibDefinedInLocalModule1 { + fn lib_trait_fn1(self, x: A); + fn lib_trait_fn2(self, y: B); + fn lib_trait_fn3(&mut self, x: A, y: B); + fn lib_trait_fn4(self, x: [A; 0], y: [B]); + fn lib_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl LibDefinedInLocalModule1 for LibLocalStruct1 { + fn lib_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn lib_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn lib_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn lib_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn lib_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn lib_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn Liblocal_fn1(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + [] + } + } + + mod library3 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_3: Field = 6; + + global IMPORT_GLOBAL_M_3: Field = 7; + + struct ReExportMeFromAnotherLib2 { + x : Field, + } + + struct PubCrateLibLocalStruct2 { + crate_field1: A, + crate_field2: B, + crate_field3: [A; N], + crate_field4: ([A; N], [B; M]), + crate_field5: &mut A, + } + + impl Default for PubCrateLibLocalStruct2 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + crate_field1: Default::default(), + crate_field2: Default::default(), + crate_field3: Default::default(), + crate_field4: Default::default(), + crate_field5: mut_field, + } + } + } + + trait PubCrateLibDefinedInLocalModule2 { + fn crate_trait_fn1(self, x: A); + fn crate_trait_fn2(self, y: B); + fn crate_trait_fn3(&mut self, x: A, y: B); + fn crate_trait_fn4(self, x: [A; 0], y: [B]); + fn crate_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubCrateLibDefinedInLocalModule2 for PubCrateLibLocalStruct2 { + fn crate_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn crate_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn crate_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn crate_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn crate_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + fn crate_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + } + + pub(crate) fn PubCrateLiblocal_fn2(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + [] + } + } + + + use crate::local_module::{local_fn4, LocalStruct, DefinedInLocalModule1, LOCAL_GLOBAL_N, LOCAL_GLOBAL_M}; + + use library::{ReExportMeFromAnotherLib1, LibLocalStruct1, LibDefinedInLocalModule1, Liblocal_fn1, IMPORT_GLOBAL_N_1, IMPORT_GLOBAL_M_1}; + + // overlapping + // use library::library2::ReExportMeFromAnotherLib1; + use crate::library::library2::{PubLibLocalStruct3, PubLibDefinedInLocalModule3, PubLiblocal_fn3, IMPORT_GLOBAL_N_2, IMPORT_GLOBAL_M_2}; + + use library3::{ReExportMeFromAnotherLib2, PubCrateLibLocalStruct2, PubCrateLibDefinedInLocalModule2, PubCrateLiblocal_fn2, IMPORT_GLOBAL_N_3, IMPORT_GLOBAL_M_3}; + + + fn main(_x: ReExportMeFromAnotherLib1, _y: ReExportMeFromAnotherLib2) { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + + let x: LocalStruct = Default::default(); + assert(drop_var(x.trait_fn5([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + assert(drop_var(x.trait_fn6([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + + let x: LibLocalStruct1 = Default::default(); + assert(drop_var(x.lib_trait_fn5([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + assert(drop_var(x.lib_trait_fn6([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + + let x: PubLibLocalStruct3 = Default::default(); + assert(drop_var(x.pub_trait_fn5([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + assert(drop_var(x.pub_trait_fn6([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + + let x: PubCrateLibLocalStruct2 = Default::default(); + assert(drop_var(x.crate_trait_fn5([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + assert(drop_var(x.crate_trait_fn6([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + + assert(drop_var(local_fn2((0, 1), [], []), true)); + assert(drop_var(Liblocal_fn1((0, 1), [], []), true)); + assert(drop_var(PubLiblocal_fn4((0, 1), [], []), true)); + assert(drop_var(PubCrateLiblocal_fn3((0, 1), [], []), true)); + }"; + + // NOTE: these names must be "replacement-unique", i.e. + // replacing one in a discinct name should do nothing + let names_to_collapse = [ + "DefinedInLocalModule1", + "IMPORT_GLOBAL_M_1", + "IMPORT_GLOBAL_M_2", + "IMPORT_GLOBAL_M_3", + "IMPORT_GLOBAL_N_1", + "IMPORT_GLOBAL_N_2", + "IMPORT_GLOBAL_N_3", + "LOCAL_GLOBAL_M", + "LOCAL_GLOBAL_N", + "LibDefinedInLocalModule1", + "LibLocalStruct1", + "Liblocal_fn1", + "LocalStruct", + "PubCrateLibDefinedInLocalModule2", + "PubCrateLibLocalStruct2", + "PubCrateLiblocal_fn2", + "PubLibDefinedInLocalModule3", + "PubLibLocalStruct3", + "PubLiblocal_fn3", + "ReExportMeFromAnotherLib1", + "ReExportMeFromAnotherLib2", + "local_fn4", + "crate_field1", + "crate_field2", + "crate_field3", + "crate_field4", + "crate_field5", + "crate_trait_fn1", + "crate_trait_fn2", + "crate_trait_fn3", + "crate_trait_fn4", + "crate_trait_fn5", + "crate_trait_fn6", + "crate_trait_fn7", + "field1", + "field2", + "field3", + "field4", + "field5", + "lib_field1", + "lib_field2", + "lib_field3", + "lib_field4", + "lib_field5", + "lib_trait_fn1", + "lib_trait_fn2", + "lib_trait_fn3", + "lib_trait_fn4", + "lib_trait_fn5", + "lib_trait_fn6", + "lib_trait_fn7", + "pub_field1", + "pub_field2", + "pub_field3", + "pub_field4", + "pub_field5", + "pub_trait_fn1", + "pub_trait_fn2", + "pub_trait_fn3", + "pub_trait_fn4", + "pub_trait_fn5", + "pub_trait_fn6", + "pub_trait_fn7", + "trait_fn1", + "trait_fn2", + "trait_fn3", + "trait_fn4", + "trait_fn5", + "trait_fn6", + "trait_fn7", + ]; + + // TODO(https://github.com/noir-lang/noir/issues/4973): + // Name resolution panic from name shadowing test + let cases_to_skip = [ + (1, 21), + (2, 11), + (2, 21), + (3, 11), + (3, 18), + (3, 21), + (4, 21), + (5, 11), + (5, 21), + (6, 11), + (6, 18), + (6, 21), + ]; + let cases_to_skip: HashSet<(usize, usize)> = cases_to_skip.into_iter().collect(); + + for (i, x) in names_to_collapse.iter().enumerate() { + for (j, y) in names_to_collapse.iter().enumerate().filter(|(j, _)| i < *j) { + if !cases_to_skip.contains(&(i, j)) { + dbg!((i, j)); + + let modified_src = src.replace(x, y); + let errors = get_program_errors(&modified_src); + assert!(!errors.is_empty(), "Expected errors, got: {:?}", errors); + } + } + } +} diff --git a/noir/noir-repo/compiler/wasm/src/compile.rs b/noir/noir-repo/compiler/wasm/src/compile.rs index de157a1fe20..57b17a6f79e 100644 --- a/noir/noir-repo/compiler/wasm/src/compile.rs +++ b/noir/noir-repo/compiler/wasm/src/compile.rs @@ -1,3 +1,4 @@ +use acvm::acir::circuit::ExpressionWidth; use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; @@ -169,9 +170,10 @@ pub fn compile_program( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_program = noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) @@ -184,7 +186,8 @@ pub fn compile_program( })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -199,9 +202,10 @@ pub fn compile_contract( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_contract = noirc_driver::compile_contract(&mut context, crate_id, &compile_options) @@ -214,7 +218,8 @@ pub fn compile_contract( })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); diff --git a/noir/noir-repo/compiler/wasm/src/compile_new.rs b/noir/noir-repo/compiler/wasm/src/compile_new.rs index c187fe7f3de..4f11cafb975 100644 --- a/noir/noir-repo/compiler/wasm/src/compile_new.rs +++ b/noir/noir-repo/compiler/wasm/src/compile_new.rs @@ -3,6 +3,7 @@ use crate::compile::{ PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use acvm::acir::circuit::ExpressionWidth; use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use nargo::parse_all; use noirc_driver::{ @@ -96,11 +97,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; let root_crate_id = *self.context.root_crate_id(); - let compiled_program = compile_main(&mut self.context, root_crate_id, &compile_options, None) .map_err(|errs| { @@ -112,7 +116,8 @@ impl CompilerContext { })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -122,10 +127,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; - let root_crate_id = *self.context.root_crate_id(); + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; + let root_crate_id = *self.context.root_crate_id(); let compiled_contract = compile_contract(&mut self.context, root_crate_id, &compile_options) .map_err(|errs| { @@ -137,7 +146,8 @@ impl CompilerContext { })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); @@ -166,7 +176,7 @@ pub fn compile_program_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_program(program_width) } @@ -183,7 +193,7 @@ pub fn compile_contract_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_contract(program_width) } diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index 1c6b375db49..6b2d3773912 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -5,7 +5,9 @@ keywords: [noir, integer types, methods, examples, arithmetic] sidebar_position: 1 --- -An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. +An integer type is a range constrained field type. +The Noir frontend supports both unsigned and signed integer types. +The allowed sizes are 1, 8, 16, 32 and 64 bits. :::info diff --git a/noir/noir-repo/docs/docs/noir/standard_library/traits.md b/noir/noir-repo/docs/docs/noir/standard_library/traits.md index b32a2969563..96a7b8e2f22 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/traits.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/traits.md @@ -186,10 +186,10 @@ These traits abstract over addition, subtraction, multiplication, and division r Implementing these traits for a given type will also allow that type to be used with the corresponding operator for that trait (`+` for Add, etc) in addition to the normal method names. -#include_code add-trait noir_stdlib/src/ops.nr rust -#include_code sub-trait noir_stdlib/src/ops.nr rust -#include_code mul-trait noir_stdlib/src/ops.nr rust -#include_code div-trait noir_stdlib/src/ops.nr rust +#include_code add-trait noir_stdlib/src/ops/arith.nr rust +#include_code sub-trait noir_stdlib/src/ops/arith.nr rust +#include_code mul-trait noir_stdlib/src/ops/arith.nr rust +#include_code div-trait noir_stdlib/src/ops/arith.nr rust The implementations block below is given for the `Add` trait, but the same types that implement `Add` also implement `Sub`, `Mul`, and `Div`. @@ -211,7 +211,7 @@ impl Add for u64 { .. } ### `std::ops::Rem` -#include_code rem-trait noir_stdlib/src/ops.nr rust +#include_code rem-trait noir_stdlib/src/ops/arith.nr rust `Rem::rem(a, b)` is the remainder function returning the result of what is left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator @@ -234,18 +234,27 @@ impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } ### `std::ops::Neg` -#include_code neg-trait noir_stdlib/src/ops.nr rust +#include_code neg-trait noir_stdlib/src/ops/arith.nr rust `Neg::neg` is equivalent to the unary negation operator `-`. Implementations: -#include_code neg-trait-impls noir_stdlib/src/ops.nr rust +#include_code neg-trait-impls noir_stdlib/src/ops/arith.nr rust + +### `std::ops::Not` + +#include_code not-trait noir_stdlib/src/ops/bit.nr rust + +`Not::not` is equivalent to the unary bitwise NOT operator `!`. + +Implementations: +#include_code not-trait-impls noir_stdlib/src/ops/bit.nr rust ### `std::ops::{ BitOr, BitAnd, BitXor }` -#include_code bitor-trait noir_stdlib/src/ops.nr rust -#include_code bitand-trait noir_stdlib/src/ops.nr rust -#include_code bitxor-trait noir_stdlib/src/ops.nr rust +#include_code bitor-trait noir_stdlib/src/ops/bit.nr rust +#include_code bitand-trait noir_stdlib/src/ops/bit.nr rust +#include_code bitxor-trait noir_stdlib/src/ops/bit.nr rust Traits for the bitwise operations `|`, `&`, and `^`. @@ -272,8 +281,8 @@ impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } ### `std::ops::{ Shl, Shr }` -#include_code shl-trait noir_stdlib/src/ops.nr rust -#include_code shr-trait noir_stdlib/src/ops.nr rust +#include_code shl-trait noir_stdlib/src/ops/bit.nr rust +#include_code shr-trait noir_stdlib/src/ops/bit.nr rust Traits for a bit shift left and bit shift right. diff --git a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr index 6a1f17dae98..21d658db615 100644 --- a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr @@ -1,4 +1,4 @@ -use crate::ops::{Add, Sub, Neg}; +use crate::ops::arith::{Add, Sub, Neg}; // TODO(https://github.com/noir-lang/noir/issues/4931) struct EmbeddedCurvePoint { @@ -76,7 +76,4 @@ fn embedded_curve_add( } #[foreign(embedded_curve_add)] -fn embedded_curve_add_array_return( - _point1: EmbeddedCurvePoint, - _point2: EmbeddedCurvePoint -) -> [Field; 2] {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} diff --git a/noir/noir-repo/noir_stdlib/src/ops.nr b/noir/noir-repo/noir_stdlib/src/ops.nr index e0814267aea..8b1903cff0b 100644 --- a/noir/noir-repo/noir_stdlib/src/ops.nr +++ b/noir/noir-repo/noir_stdlib/src/ops.nr @@ -1,170 +1,5 @@ -// docs:start:add-trait -trait Add { - fn add(self, other: Self) -> Self; -} -// docs:end:add-trait - -impl Add for Field { fn add(self, other: Field) -> Field { self + other } } - -impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } -impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } -impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } - -impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } -impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } -impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } - -// docs:start:sub-trait -trait Sub { - fn sub(self, other: Self) -> Self; -} -// docs:end:sub-trait - -impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } - -impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } -impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } -impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } - -impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } -impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } -impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } - -// docs:start:mul-trait -trait Mul { - fn mul(self, other: Self) -> Self; -} -// docs:end:mul-trait - -impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } - -impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } -impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } -impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } - -impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } -impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } -impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } - -// docs:start:div-trait -trait Div { - fn div(self, other: Self) -> Self; -} -// docs:end:div-trait - -impl Div for Field { fn div(self, other: Field) -> Field { self / other } } - -impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } -impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } -impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } - -impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } -impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } -impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } - -// docs:start:rem-trait -trait Rem{ - fn rem(self, other: Self) -> Self; -} -// docs:end:rem-trait - -impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } -impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } -impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } - -impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } -impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } -impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } - -// docs:start:neg-trait -trait Neg { - fn neg(self) -> Self; -} -// docs:end:neg-trait - -// docs:start:neg-trait-impls -impl Neg for Field { fn neg(self) -> Field { -self } } - -impl Neg for i8 { fn neg(self) -> i8 { -self } } -impl Neg for i32 { fn neg(self) -> i32 { -self } } -impl Neg for i64 { fn neg(self) -> i64 { -self } } -// docs:end:neg-trait-impls - -// docs:start:bitor-trait -trait BitOr { - fn bitor(self, other: Self) -> Self; -} -// docs:end:bitor-trait - -impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } - -impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } -impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } -impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } - -impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } -impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } -impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } - -// docs:start:bitand-trait -trait BitAnd { - fn bitand(self, other: Self) -> Self; -} -// docs:end:bitand-trait - -impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } - -impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } -impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } -impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } - -impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } -impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } -impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } - -// docs:start:bitxor-trait -trait BitXor { - fn bitxor(self, other: Self) -> Self; -} -// docs:end:bitxor-trait - -impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } - -impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } -impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } -impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } - -impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } -impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } -impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } - -// docs:start:shl-trait -trait Shl { - fn shl(self, other: u8) -> Self; -} -// docs:end:shl-trait - -impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } -impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } -impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } -impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } - -impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } -impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } -impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } - -// docs:start:shr-trait -trait Shr { - fn shr(self, other: u8) -> Self; -} -// docs:end:shr-trait - -impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } -impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } -impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } -impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } - -impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } -impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } -impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } +mod arith; +mod bit; +use arith::{Add, Sub, Mul, Div, Rem, Neg}; +use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; diff --git a/noir/noir-repo/noir_stdlib/src/ops/arith.nr b/noir/noir-repo/noir_stdlib/src/ops/arith.nr new file mode 100644 index 00000000000..df0ff978a7c --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/ops/arith.nr @@ -0,0 +1,103 @@ +// docs:start:add-trait +trait Add { + fn add(self, other: Self) -> Self; +} +// docs:end:add-trait + +impl Add for Field { fn add(self, other: Field) -> Field { self + other } } + +impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } +impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } +impl Add for u16 { fn add(self, other: u16) -> u16 { self + other } } +impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } + +impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } +impl Add for i16 { fn add(self, other: i16) -> i16 { self + other } } +impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } +impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } + +// docs:start:sub-trait +trait Sub { + fn sub(self, other: Self) -> Self; +} +// docs:end:sub-trait + +impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } + +impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } +impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } +impl Sub for u16 { fn sub(self, other: u16) -> u16 { self - other } } +impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } + +impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } +impl Sub for i16 { fn sub(self, other: i16) -> i16 { self - other } } +impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } +impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } + +// docs:start:mul-trait +trait Mul { + fn mul(self, other: Self) -> Self; +} +// docs:end:mul-trait + +impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } + +impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } +impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } +impl Mul for u16 { fn mul(self, other: u16) -> u16 { self * other } } +impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } + +impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } +impl Mul for i16 { fn mul(self, other: i16) -> i16 { self * other } } +impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } +impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } + +// docs:start:div-trait +trait Div { + fn div(self, other: Self) -> Self; +} +// docs:end:div-trait + +impl Div for Field { fn div(self, other: Field) -> Field { self / other } } + +impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } +impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } +impl Div for u16 { fn div(self, other: u16) -> u16 { self / other } } +impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } + +impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } +impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } +impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } +impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } + +// docs:start:rem-trait +trait Rem{ + fn rem(self, other: Self) -> Self; +} +// docs:end:rem-trait + +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +// docs:start:neg-trait +trait Neg { + fn neg(self) -> Self; +} +// docs:end:neg-trait + +// docs:start:neg-trait-impls +impl Neg for Field { fn neg(self) -> Field { -self } } + +impl Neg for i8 { fn neg(self) -> i8 { -self } } +impl Neg for i16 { fn neg(self) -> i16 { -self } } +impl Neg for i32 { fn neg(self) -> i32 { -self } } +impl Neg for i64 { fn neg(self) -> i64 { -self } } +// docs:end:neg-trait-impls + diff --git a/noir/noir-repo/noir_stdlib/src/ops/bit.nr b/noir/noir-repo/noir_stdlib/src/ops/bit.nr new file mode 100644 index 00000000000..a31cfee878c --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/ops/bit.nr @@ -0,0 +1,109 @@ +// docs:start:not-trait +trait Not { + fn not(self: Self) -> Self; +} +// docs:end:not-trait + +// docs:start:not-trait-impls +impl Not for bool { fn not(self) -> bool { !self } } + +impl Not for u64 { fn not(self) -> u64 { !self } } +impl Not for u32 { fn not(self) -> u32 { !self } } +impl Not for u16 { fn not(self) -> u16 { !self } } +impl Not for u8 { fn not(self) -> u8 { !self } } +impl Not for u1 { fn not(self) -> u1 { !self } } + +impl Not for i8 { fn not(self) -> i8 { !self } } +impl Not for i16 { fn not(self) -> i16 { !self } } +impl Not for i32 { fn not(self) -> i32 { !self } } +impl Not for i64 { fn not(self) -> i64 { !self } } +// docs:end:not-trait-impls + +// docs:start:bitor-trait +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +// docs:end:bitor-trait + +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +// docs:start:bitand-trait +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +// docs:end:bitand-trait + +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +// docs:start:bitxor-trait +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +// docs:end:bitxor-trait + +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +// docs:start:shl-trait +trait Shl { + fn shl(self, other: u8) -> Self; +} +// docs:end:shl-trait + +impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } +impl Shl for u16 { fn shl(self, other: u8) -> u16 { self << other } } +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } + +impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } +impl Shl for i16 { fn shl(self, other: u8) -> i16 { self << other } } +impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } +impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } + +// docs:start:shr-trait +trait Shr { + fn shr(self, other: u8) -> Self; +} +// docs:end:shr-trait + +impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } +impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } +impl Shr for u16 { fn shr(self, other: u8) -> u16 { self >> other } } +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } + +impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } +impl Shr for i16 { fn shr(self, other: u8) -> i16 { self >> other } } +impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } +impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } + diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index d0f38079e6f..9c61fc801f3 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -1,4 +1,4 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::ops::{Add, Sub, Mul, Div, Rem, Not, BitOr, BitAnd, BitXor, Shl, Shr}; use crate::cmp::{Eq, Ord, Ordering}; global pow64 : Field = 18446744073709551616; //2^64; @@ -228,11 +228,20 @@ impl Ord for U128 { } } +impl Not for U128 { + fn not(self) -> U128 { + U128 { + lo: (!(self.lo as u64)) as Field, + hi: (!(self.hi as u64)) as Field + } + } +} + impl BitOr for U128 { fn bitor(self, other: U128) -> U128 { U128 { lo: ((self.lo as u64) | (other.lo as u64)) as Field, - hi: ((self.hi as u64) | (other.hi as u64))as Field + hi: ((self.hi as u64) | (other.hi as u64)) as Field } } } @@ -284,3 +293,20 @@ impl Shr for U128 { self / U128::from_integer(y) } } + +mod test { + use crate::uint128::{U128, pow64}; + + #[test] + fn test_not() { + let num = U128::from_u64s_le(0, 0); + let not_num = num.not(); + + let max_u64: Field = pow64 - 1; + assert_eq(not_num.hi, max_u64); + assert_eq(not_num.lo, max_u64); + + let not_not_num = not_num.not(); + assert_eq(num, not_not_num); + } +} diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml b/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml new file mode 100644 index 00000000000..1c6b58e01e8 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "u16_support" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml b/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml new file mode 100644 index 00000000000..a56a84e61a4 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/Prover.toml @@ -0,0 +1 @@ +x = "2" diff --git a/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr b/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr new file mode 100644 index 00000000000..e8b418f16da --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/u16_support/src/main.nr @@ -0,0 +1,24 @@ +fn main(x: u16) { + test_u16(x); + test_u16_unconstrained(x); +} + +unconstrained fn test_u16_unconstrained(x: u16) { + test_u16(x) +} + +fn test_u16(x: u16) { + let t1: u16 = 1234; + let t2: u16 = 4321; + let t = t1 + t2; + + let t4 = t - t2; + assert(t4 == t1); + + let mut small_int = x as u16; + let shift = small_int << (x as u8); + assert(shift == 8); + assert(shift >> (x as u8) == small_int); + assert(shift >> 15 == 0); + assert(shift << 15 == 0); +} diff --git a/noir/noir-repo/tooling/backend_interface/Cargo.toml b/noir/noir-repo/tooling/backend_interface/Cargo.toml index b731c138c7d..f6b5d5d0132 100644 --- a/noir/noir-repo/tooling/backend_interface/Cargo.toml +++ b/noir/noir-repo/tooling/backend_interface/Cargo.toml @@ -13,7 +13,6 @@ license.workspace = true acvm.workspace = true dirs.workspace = true thiserror.workspace = true -serde.workspace = true serde_json.workspace = true bb_abstraction_leaks.workspace = true tracing.workspace = true diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/info.rs b/noir/noir-repo/tooling/backend_interface/src/cli/info.rs deleted file mode 100644 index 6e6603ce53e..00000000000 --- a/noir/noir-repo/tooling/backend_interface/src/cli/info.rs +++ /dev/null @@ -1,62 +0,0 @@ -use acvm::acir::circuit::ExpressionWidth; - -use serde::Deserialize; -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -pub(crate) struct InfoCommand { - pub(crate) crs_path: PathBuf, -} - -#[derive(Deserialize)] -struct InfoResponse { - language: LanguageResponse, -} - -#[derive(Deserialize)] -struct LanguageResponse { - name: String, - width: Option, -} - -impl InfoCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command.arg("info").arg("-c").arg(self.crs_path).arg("-o").arg("-"); - - let output = command.output()?; - - if !output.status.success() { - return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); - } - - let backend_info: InfoResponse = - serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - let expression_width: ExpressionWidth = match backend_info.language.name.as_str() { - "PLONK-CSAT" => { - let width = backend_info.language.width.unwrap(); - ExpressionWidth::Bounded { width } - } - "R1CS" => ExpressionWidth::Unbounded, - _ => panic!("Unknown Expression width configuration"), - }; - - Ok(expression_width) - } -} - -#[test] -fn info_command() -> Result<(), BackendError> { - let backend = crate::get_mock_backend()?; - let crs_path = backend.backend_directory(); - - let expression_width = InfoCommand { crs_path }.run(backend.binary_path())?; - - assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 4 })); - - Ok(()) -} diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs index b4dec859839..df43bd5cc2f 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs @@ -2,7 +2,6 @@ mod contract; mod gates; -mod info; mod proof_as_fields; mod prove; mod verify; @@ -12,7 +11,6 @@ mod write_vk; pub(crate) use contract::ContractCommand; pub(crate) use gates::GatesCommand; -pub(crate) use info::InfoCommand; pub(crate) use proof_as_fields::ProofAsFieldsCommand; pub(crate) use prove::ProveCommand; pub(crate) use verify::VerifyCommand; diff --git a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs index fa1f82a5722..20a6dcf70f1 100644 --- a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs +++ b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs @@ -3,7 +3,7 @@ use std::io::Write; use std::path::Path; use acvm::acir::{ - circuit::{ExpressionWidth, Program}, + circuit::Program, native_types::{WitnessMap, WitnessStack}, }; use acvm::FieldElement; @@ -11,8 +11,8 @@ use tempfile::tempdir; use tracing::warn; use crate::cli::{ - GatesCommand, InfoCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, - VkAsFieldsCommand, WriteVkCommand, + GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, + WriteVkCommand, }; use crate::{Backend, BackendError}; @@ -33,25 +33,6 @@ impl Backend { .run(binary_path) } - pub fn get_backend_info(&self) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - InfoCommand { crs_path: self.crs_directory() }.run(binary_path) - } - - /// If we cannot get a valid backend, returns `ExpressionWidth::Bound { width: 4 }`` - /// The function also prints a message saying we could not find a backend - pub fn get_backend_info_or_default(&self) -> ExpressionWidth { - if let Ok(expression_width) = self.get_backend_info() { - expression_width - } else { - warn!( - "No valid backend found, ExpressionWidth defaulting to Bounded with a width of 4" - ); - ExpressionWidth::Bounded { width: 4 } - } - } - #[tracing::instrument(level = "trace", skip_all)] pub fn prove( &self, diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs deleted file mode 100644 index cdaebb95fc9..00000000000 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs +++ /dev/null @@ -1,40 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -const INFO_RESPONSE: &str = r#"{ - "language": { - "name": "PLONK-CSAT", - "width": 4 - }, - "opcodes_supported": ["arithmetic", "directive", "brillig", "memory_init", "memory_op"], - "black_box_functions_supported": [ - "and", - "xor", - "range", - "sha256", - "blake2s", - "blake3", - "keccak256", - "schnorr_verify", - "pedersen", - "pedersen_hash", - "ecdsa_secp256k1", - "ecdsa_secp256r1", - "multi_scalar_mul", - "recursive_aggregation" - ] -}"#; - -#[derive(Debug, Clone, Args)] -pub(crate) struct InfoCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'o')] - pub(crate) info_path: Option, -} - -pub(crate) fn run(_args: InfoCommand) { - std::io::stdout().write_all(INFO_RESPONSE.as_bytes()).unwrap(); -} diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs index ef8819af94b..74ea82d28f8 100644 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs +++ b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/main.rs @@ -7,7 +7,6 @@ use clap::{Parser, Subcommand}; mod contract_cmd; mod gates_cmd; -mod info_cmd; mod prove_cmd; mod verify_cmd; mod write_vk_cmd; @@ -21,7 +20,6 @@ struct BackendCli { #[derive(Subcommand, Clone, Debug)] enum BackendCommand { - Info(info_cmd::InfoCommand), Contract(contract_cmd::ContractCommand), Gates(gates_cmd::GatesCommand), Prove(prove_cmd::ProveCommand), @@ -34,7 +32,6 @@ fn main() { let BackendCli { command } = BackendCli::parse(); match command { - BackendCommand::Info(args) => info_cmd::run(args), BackendCommand::Contract(args) => contract_cmd::run(args), BackendCommand::Gates(args) => gates_cmd::run(args), BackendCommand::Prove(args) => prove_cmd::run(args), diff --git a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs index b3dfff9e94c..45da7f9d00c 100644 --- a/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs +++ b/noir/noir-repo/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.35.1"; +const VERSION: &str = "0.38.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 2b729e44b8a..208379b098d 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use clap::Args; @@ -42,11 +41,7 @@ pub(crate) struct CheckCommand { compile_options: CompileOptions, } -pub(crate) fn run( - _backend: &Backend, - args: CheckCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 259e209b65a..8c64d9cd935 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -44,7 +44,6 @@ pub(crate) fn run( insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = backend.get_backend_info()?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { let compilation_result = compile_program( @@ -62,7 +61,7 @@ pub(crate) fn run( args.compile_options.silence_warnings, )?; - let program = nargo::ops::transform_program(program, expression_width); + let program = nargo::ops::transform_program(program, args.compile_options.expression_width); // TODO(https://github.com/noir-lang/noir/issues/4428): // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 54e8535f094..2f878406939 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -20,7 +20,6 @@ use noirc_frontend::hir::ParsedFiles; use notify::{EventKind, RecursiveMode, Watcher}; use notify_debouncer_full::new_debouncer; -use crate::backends::Backend; use crate::errors::CliError; use super::fs::program::only_acir; @@ -47,11 +46,7 @@ pub(crate) struct CompileCommand { watch: bool, } -pub(crate) fn run( - backend: &Backend, - mut args: CompileCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -63,10 +58,6 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), )?; - if args.compile_options.expression_width.is_none() { - args.compile_options.expression_width = Some(backend.get_backend_info_or_default()); - }; - if args.watch { watch_workspace(&workspace, &args.compile_options) .map_err(|err| CliError::Generic(err.to_string()))?; @@ -128,8 +119,6 @@ fn compile_workspace_full( insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = - compile_options.expression_width.expect("expression width should have been set"); let compiled_workspace = compile_workspace(&workspace_file_manager, &parsed_files, workspace, compile_options); @@ -149,12 +138,12 @@ fn compile_workspace_full( // Save build artifacts to disk. let only_acir = compile_options.only_acir; for (package, program) in binary_packages.into_iter().zip(compiled_programs) { - let program = nargo::ops::transform_program(program, expression_width); + let program = nargo::ops::transform_program(program, compile_options.expression_width); save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); } let circuit_dir = workspace.target_directory_path(); for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { - let contract = nargo::ops::transform_contract(contract, expression_width); + let contract = nargo::ops::transform_contract(contract, compile_options.expression_width); save_contract(contract, &package, &circuit_dir); } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs index ba4f91609ef..124e30069ae 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -1,6 +1,5 @@ use acvm::acir::circuit::ExpressionWidth; use acvm::acir::native_types::WitnessMap; -use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; use nargo::workspace::Workspace; @@ -29,8 +28,8 @@ use noir_debugger::errors::{DapError, LoadError}; #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width)] - expression_width: Option, + #[arg(long, value_parser = parse_expression_width, default_value = "4")] + expression_width: ExpressionWidth, #[clap(long)] preflight_check: bool, @@ -249,14 +248,7 @@ fn run_preflight_check( Ok(()) } -pub(crate) fn run( - backend: &Backend, - args: DapCommand, - _config: NargoConfig, -) -> Result<(), CliError> { - let expression_width = - args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); - +pub(crate) fn run(args: DapCommand, _config: NargoConfig) -> Result<(), CliError> { // When the --preflight-check flag is present, we run Noir's DAP server in "pre-flight mode", which test runs // the DAP initialization code without actually starting the DAP server. // @@ -270,12 +262,12 @@ pub(crate) fn run( // the DAP loop is established, which otherwise are considered "out of band" by the maintainers of the DAP spec. // More details here: https://github.com/microsoft/vscode/issues/108138 if args.preflight_check { - return run_preflight_check(expression_width, args).map_err(CliError::DapError); + return run_preflight_check(args.expression_width, args).map_err(CliError::DapError); } let output = BufWriter::new(std::io::stdout()); let input = BufReader::new(std::io::stdin()); let server = Server::new(input, output); - loop_uninitialized_dap(server, expression_width).map_err(CliError::DapError) + loop_uninitialized_dap(server, args.expression_width).map_err(CliError::DapError) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs index 7cb5cd7846b..f950cd0405c 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -24,7 +24,6 @@ use noirc_frontend::hir::ParsedFiles; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Executes a circuit in debug mode @@ -53,11 +52,7 @@ pub(crate) struct DebugCommand { skip_instrumentation: Option, } -pub(crate) fn run( - backend: &Backend, - args: DebugCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: DebugCommand, config: NargoConfig) -> Result<(), CliError> { let acir_mode = args.acir_mode; let skip_instrumentation = args.skip_instrumentation.unwrap_or(acir_mode); @@ -69,10 +64,6 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; let target_dir = &workspace.target_directory_path(); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -89,7 +80,8 @@ pub(crate) fn run( args.compile_options.clone(), )?; - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let compiled_program = + nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 854ad559012..68f902dfe33 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -18,7 +18,6 @@ use noirc_frontend::graph::CrateName; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -48,11 +47,7 @@ pub(crate) struct ExecuteCommand { oracle_resolver: Option, } -pub(crate) fn run( - backend: &Backend, - args: ExecuteCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -68,10 +63,6 @@ pub(crate) fn run( insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { let compilation_result = compile_program( @@ -89,7 +80,8 @@ pub(crate) fn run( args.compile_options.silence_warnings, )?; - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let compiled_program = + nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); let (return_value, witness_stack) = execute_program_and_decode( compiled_program, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index 044c2cb4ebb..a61f3ccfc02 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -20,7 +20,6 @@ use noirc_frontend::graph::CrateName; use clap::Args; -use crate::backends::Backend; use crate::errors::CliError; use super::check_cmd::check_crate_and_report_errors; @@ -43,11 +42,7 @@ pub(crate) struct ExportCommand { compile_options: CompileOptions, } -pub(crate) fn run( - _backend: &Backend, - args: ExportCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: ExportCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index 3695fb57d31..cac3c36f904 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -70,10 +70,6 @@ pub(crate) fn run( insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let compiled_workspace = compile_workspace( &workspace_file_manager, &parsed_files, @@ -89,10 +85,10 @@ pub(crate) fn run( )?; let compiled_programs = vecmap(compiled_programs, |program| { - nargo::ops::transform_program(program, expression_width) + nargo::ops::transform_program(program, args.compile_options.expression_width) }); let compiled_contracts = vecmap(compiled_contracts, |contract| { - nargo::ops::transform_contract(contract, expression_width) + nargo::ops::transform_contract(contract, args.compile_options.expression_width) }); if args.profile_info { @@ -122,13 +118,24 @@ pub(crate) fn run( let program_info = binary_packages .par_bridge() .map(|(package, program)| { - count_opcodes_and_gates_in_program(backend, program, package, expression_width) + count_opcodes_and_gates_in_program( + backend, + program, + package, + args.compile_options.expression_width, + ) }) .collect::>()?; let contract_info = compiled_contracts .into_par_iter() - .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, expression_width)) + .map(|contract| { + count_opcodes_and_gates_in_contract( + backend, + contract, + args.compile_options.expression_width, + ) + }) .collect::>()?; let info_report = InfoReport { programs: program_info, contracts: contract_info }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs index 1428b8070c8..45ac02ea552 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -8,7 +8,6 @@ use noir_lsp::NargoLspService; use tower::ServiceBuilder; use super::NargoConfig; -use crate::backends::Backend; use crate::errors::CliError; /// Starts the Noir LSP server @@ -19,12 +18,7 @@ use crate::errors::CliError; #[derive(Debug, Clone, Args)] pub(crate) struct LspCommand; -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the lsp in the future - _backend: &Backend, - _args: LspCommand, - _config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(_args: LspCommand, _config: NargoConfig) -> Result<(), CliError> { use tokio::runtime::Builder; let runtime = Builder::new_current_thread().enable_all().build().unwrap(); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs index e8e17893815..ad778549ac0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs @@ -107,21 +107,21 @@ pub(crate) fn start_cli() -> eyre::Result<()> { let backend = crate::backends::Backend::new(active_backend); match command { - NargoCommand::New(args) => new_cmd::run(&backend, args, config), + NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), - NargoCommand::Check(args) => check_cmd::run(&backend, args, config), - NargoCommand::Compile(args) => compile_cmd::run(&backend, args, config), - NargoCommand::Debug(args) => debug_cmd::run(&backend, args, config), - NargoCommand::Execute(args) => execute_cmd::run(&backend, args, config), - NargoCommand::Export(args) => export_cmd::run(&backend, args, config), + NargoCommand::Check(args) => check_cmd::run(args, config), + NargoCommand::Compile(args) => compile_cmd::run(args, config), + NargoCommand::Debug(args) => debug_cmd::run(args, config), + NargoCommand::Execute(args) => execute_cmd::run(args, config), + NargoCommand::Export(args) => export_cmd::run(args, config), NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), - NargoCommand::Test(args) => test_cmd::run(&backend, args, config), + NargoCommand::Test(args) => test_cmd::run(args, config), NargoCommand::Info(args) => info_cmd::run(&backend, args, config), NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), NargoCommand::Backend(args) => backend_cmd::run(args), - NargoCommand::Lsp(args) => lsp_cmd::run(&backend, args, config), - NargoCommand::Dap(args) => dap_cmd::run(&backend, args, config), + NargoCommand::Lsp(args) => lsp_cmd::run(args, config), + NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), }?; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs index b4c823d0c1e..21951f27260 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/new_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use super::{init_cmd::initialize_project, NargoConfig}; @@ -30,12 +29,7 @@ pub(crate) struct NewCommand { pub(crate) contract: bool, } -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the "new" template in the future - _backend: &Backend, - args: NewCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: NewCommand, config: NargoConfig) -> Result<(), CliError> { let package_dir = config.program_dir.join(&args.path); if package_dir.exists() { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs index b9e4bca9e69..47c71527fd8 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -69,10 +69,6 @@ pub(crate) fn run( insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { let compilation_result = compile_program( @@ -90,7 +86,8 @@ pub(crate) fn run( args.compile_options.silence_warnings, )?; - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let compiled_program = + nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); prove_package( backend, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 88a804d5cf4..967d4c87e6d 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -19,7 +19,7 @@ use noirc_frontend::{ use rayon::prelude::{IntoParallelIterator, ParallelBridge, ParallelIterator}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; -use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; +use crate::{cli::check_cmd::check_crate_and_report_errors, errors::CliError}; use super::NargoConfig; @@ -54,11 +54,7 @@ pub(crate) struct TestCommand { oracle_resolver: Option, } -pub(crate) fn run( - _backend: &Backend, - args: TestCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs index 7202a179aae..a6078f6c1d3 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -54,10 +54,6 @@ pub(crate) fn run( insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let parsed_files = parse_all(&workspace_file_manager); - let expression_width = args - .compile_options - .expression_width - .unwrap_or_else(|| backend.get_backend_info_or_default()); let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { let compilation_result = compile_program( @@ -75,7 +71,8 @@ pub(crate) fn run( args.compile_options.silence_warnings, )?; - let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); + let compiled_program = + nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; } diff --git a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts index b2e76e54efc..dcf9f489003 100644 --- a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts +++ b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts @@ -81,39 +81,3 @@ it('circuit with a raw assert payload should fail with the decoded payload', asy }); } }); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); - -it('successfully executes a program with multiple acir circuits', async () => { - const inputs = { - x: '10', - }; - try { - await new Noir(fold_fibonacci_program).execute(inputs); - } catch (error) { - const knownError = error as Error; - expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); - } -}); From 27ac34497b08f6aae97152c8a0737d6c247e6db1 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 8 May 2024 16:08:26 -0400 Subject: [PATCH 05/43] hotfix(ci): less runners --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d696c0d41bf..a4c566e1de9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 50 + runner_concurrency: 30 subaction: ${{ inputs.runner_action || 'start' }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge From b2c019b6b11c3aaa98d8bbb79b77b42a5f87f0d0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 8 May 2024 21:09:17 +0100 Subject: [PATCH 06/43] chore: simplify nargo CLI to read from artifacts (#6279) As we're shifting towards a more artifact-focused workflow, I've modified the nargo CLI to prioritise writing everything to file. These can then be read from again when these programs are needed. The aim is that as we remove these commands from the `nargo` binary, we can ensure that we have compatibility between nargo artifacts and `bb` --- .../tooling/nargo/src/artifacts/contract.rs | 6 +- .../tooling/nargo/src/artifacts/debug.rs | 27 +++++++ .../tooling/nargo/src/artifacts/program.rs | 2 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 29 ++----- .../tooling/nargo_cli/src/cli/compile_cmd.rs | 2 +- .../tooling/nargo_cli/src/cli/execute_cmd.rs | 35 +++------ .../tooling/nargo_cli/src/cli/fs/program.rs | 13 ++++ .../tooling/nargo_cli/src/cli/info_cmd.rs | 76 +++++++++---------- .../tooling/nargo_cli/src/cli/prove_cmd.rs | 50 ++++-------- .../tooling/nargo_cli/src/cli/verify_cmd.rs | 36 +++------ 10 files changed, 120 insertions(+), 156 deletions(-) diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs index 83bb4b94f82..a864da7c33c 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/contract.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, HashMap}; use fm::FileId; -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractOutputsArtifact { pub structs: HashMap>, pub globals: HashMap>, @@ -21,7 +21,7 @@ impl From for ContractOutputsArtifact { } } -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, @@ -51,7 +51,7 @@ impl From for ContractArtifact { /// /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ContractFunctionArtifact { pub name: String, diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs index 496896468cc..2570c3f5c9f 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/debug.rs @@ -9,6 +9,7 @@ use std::{ }; pub use super::debug_vars::{DebugVars, StackFrame}; +use super::{contract::ContractArtifact, program::ProgramArtifact}; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -128,6 +129,16 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(program_artifact: ProgramArtifact) -> Self { + DebugArtifact { + debug_symbols: program_artifact.debug_symbols.debug_infos, + file_map: program_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl From for DebugArtifact { fn from(compiled_artifact: CompiledContract) -> Self { let all_functions_debug: Vec = compiled_artifact @@ -144,6 +155,22 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(compiled_artifact: ContractArtifact) -> Self { + let all_functions_debug: Vec = compiled_artifact + .functions + .into_iter() + .flat_map(|contract_function| contract_function.debug_symbols.debug_infos) + .collect(); + + DebugArtifact { + debug_symbols: all_functions_debug, + file_map: compiled_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl<'a> Files<'a> for DebugArtifact { type FileId = FileId; type Name = PathString; diff --git a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs index 67ac9f53ec8..3c25b9e3345 100644 --- a/noir/noir-repo/tooling/nargo/src/artifacts/program.rs +++ b/noir/noir-repo/tooling/nargo/src/artifacts/program.rs @@ -8,7 +8,7 @@ use noirc_driver::DebugFile; use noirc_errors::debug_info::ProgramDebugInfo; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug)] pub struct ProgramArtifact { pub noir_version: String, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 8c64d9cd935..04ed5c2b6b8 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,13 +1,13 @@ +use super::compile_cmd::compile_workspace_full; use super::fs::{create_named_dir, write_to_file}; use super::NargoConfig; use crate::backends::Backend; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; use clap::Args; -use nargo::ops::{compile_program, report_errors}; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program @@ -40,28 +40,13 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let program = nargo::ops::transform_program(program, args.compile_options.expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); // TODO(https://github.com/noir-lang/noir/issues/4428): // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 2f878406939..8f28e5d9388 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -111,7 +111,7 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } -fn compile_workspace_full( +pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 68f902dfe33..862a46884ef 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,19 +5,18 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::{compile_program, report_errors, DefaultForeignCallExecutor}; +use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -59,32 +58,16 @@ pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliEr )?; let target_dir = &workspace.target_directory_path(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); let (return_value, witness_stack) = execute_program_and_decode( - compiled_program, + program, package, &args.prover_name, args.oracle_resolver.as_deref(), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs index 77005e8d5af..72d686b0b36 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fs/program.rs @@ -60,3 +60,16 @@ pub(crate) fn read_program_from_file>( Ok(program) } + +pub(crate) fn read_contract_from_file>( + circuit_path: P, +) -> Result { + let file_path = circuit_path.as_ref().with_extension("json"); + + let input_string = + std::fs::read(&file_path).map_err(|_| FilesystemError::PathNotValid(file_path))?; + let contract = serde_json::from_slice(&input_string) + .map_err(|err| FilesystemError::ProgramSerializationError(err.to_string()))?; + + Ok(contract) +} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index cac3c36f904..1ae2d5db104 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -5,14 +5,11 @@ use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; use nargo::{ - artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - ops::report_errors, package::Package, parse_all, + artifacts::{contract::ContractArtifact, debug::DebugArtifact, program::ProgramArtifact}, + package::Package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, - NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; use prettytable::{row, table, Row}; @@ -22,7 +19,11 @@ use serde::Serialize; use crate::backends::Backend; use crate::errors::CliError; -use super::{compile_cmd::compile_workspace, NargoConfig}; +use super::{ + compile_cmd::compile_workspace_full, + fs::program::{read_contract_from_file, read_program_from_file}, + NargoConfig, +}; /// Provides detailed information on each of a program's function (represented by a single circuit) /// @@ -66,35 +67,32 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let compiled_workspace = compile_workspace( - &workspace_file_manager, - &parsed_files, - &workspace, - &args.compile_options, - ); - - let (compiled_programs, compiled_contracts) = report_errors( - compiled_workspace, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let compiled_programs = vecmap(compiled_programs, |program| { - nargo::ops::transform_program(program, args.compile_options.expression_width) - }); - let compiled_contracts = vecmap(compiled_contracts, |contract| { - nargo::ops::transform_contract(contract, args.compile_options.expression_width) - }); + let binary_packages: Vec<(Package, ProgramArtifact)> = workspace + .into_iter() + .filter(|package| package.is_binary()) + .map(|package| -> Result<(Package, ProgramArtifact), CliError> { + let program_artifact_path = workspace.package_build_path(package); + let program = read_program_from_file(program_artifact_path)?; + Ok((package.clone(), program)) + }) + .collect::>()?; + + let compiled_contracts: Vec = workspace + .into_iter() + .filter(|package| package.is_contract()) + .map(|package| { + let contract_artifact_path = workspace.package_build_path(package); + read_contract_from_file(contract_artifact_path) + }) + .collect::>()?; if args.profile_info { - for compiled_program in &compiled_programs { + for (_, compiled_program) in &binary_packages { let debug_artifact = DebugArtifact::from(compiled_program.clone()); - for function_debug in compiled_program.debug.iter() { + for function_debug in compiled_program.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } @@ -104,7 +102,7 @@ pub(crate) fn run( let debug_artifact = DebugArtifact::from(compiled_contract.clone()); let functions = &compiled_contract.functions; for contract_function in functions { - for function_debug in contract_function.debug.iter() { + for function_debug in contract_function.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } @@ -112,16 +110,14 @@ pub(crate) fn run( } } - let binary_packages = - workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); - let program_info = binary_packages + .into_iter() .par_bridge() .map(|(package, program)| { count_opcodes_and_gates_in_program( backend, program, - package, + &package, args.compile_options.expression_width, ) }) @@ -287,12 +283,12 @@ impl From for Vec { fn count_opcodes_and_gates_in_program( backend: &Backend, - compiled_program: CompiledProgram, + compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, ) -> Result { let functions = compiled_program - .program + .bytecode .functions .into_par_iter() .enumerate() @@ -314,7 +310,7 @@ fn count_opcodes_and_gates_in_program( fn count_opcodes_and_gates_in_contract( backend: &Backend, - contract: CompiledContract, + contract: ContractArtifact, expression_width: ExpressionWidth, ) -> Result { let functions = contract diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs index 47c71527fd8..6fb6e7269f7 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,16 +1,13 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; +use super::fs::program::read_program_from_file; use super::fs::{ inputs::{read_inputs_from_file, write_inputs_to_file}, proof::save_proof_to_dir, @@ -65,56 +62,39 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); - prove_package( + let proof = prove_package( backend, - &workspace, package, - compiled_program, + program, &args.prover_name, &args.verifier_name, args.verify, args.oracle_resolver.as_deref(), )?; + + save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; } Ok(()) } -#[allow(clippy::too_many_arguments)] -pub(crate) fn prove_package( +fn prove_package( backend: &Backend, - workspace: &Workspace, package: &Package, compiled_program: CompiledProgram, prover_name: &str, verifier_name: &str, check_proof: bool, foreign_call_resolver_url: Option<&str>, -) -> Result<(), CliError> { +) -> Result, CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; @@ -148,7 +128,5 @@ pub(crate) fn prove_package( } } - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - - Ok(()) + Ok(proof) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs index a6078f6c1d3..a7f2772330a 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -1,18 +1,16 @@ +use super::compile_cmd::compile_workspace_full; +use super::fs::program::read_program_from_file; use super::fs::{inputs::read_inputs_from_file, load_hex_data}; use super::NargoConfig; use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; use nargo::package::Package; use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Given a proof and a program, verify whether the proof is valid @@ -50,31 +48,15 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); - - verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); + + verify_package(backend, &workspace, package, program, &args.verifier_name)?; } Ok(()) From 4f887040473bf942f76042d0efced5257da1bf51 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 9 May 2024 02:13:20 +0000 Subject: [PATCH 07/43] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "a0f30c476" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "a0f30c476" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 02e8f6da718..3b68178f091 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 7ffbebd1eb8f60fb77145842a31358522ad161b9 - parent = 856657fbd1f82b7526b3ff0214e3e6758db214e3 + commit = a0f30c4760a4fe7db9680377d97cd7a75b048fdb + parent = b2c019b6b11c3aaa98d8bbb79b77b42a5f87f0d0 method = merge cmdver = 0.4.6 From 8330f70b6813d70f8a98d2d120185cf7420624f5 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 9 May 2024 02:13:52 +0000 Subject: [PATCH 08/43] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..5e2e608edad 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 13404b37324..7f343e48f74 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 884116010808bb9243e1d95496443377c0476aa8 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 9 May 2024 02:13:52 +0000 Subject: [PATCH 09/43] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index ef0a30f2bb4..e954fe3f080 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = 425256e90b778e29913427d71bf0038187ca6bc7 method = merge cmdver = 0.4.6 - parent = 4b4187f4bd004a11710b1fdd0119e9c098ae969c + parent = 3b48234a94dec37da4276bd7eb2da71215d273b6 From 721bcf023b1af94491c5d3adac64a6bde5f1d2e3 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 9 May 2024 02:13:57 +0000 Subject: [PATCH 10/43] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "440d97fb9" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "440d97fb9" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index e954fe3f080..5aa17568bc3 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 425256e90b778e29913427d71bf0038187ca6bc7 + commit = 440d97fb931948aa90fcd6a1ee0206abdc468745 method = merge cmdver = 0.4.6 - parent = 3b48234a94dec37da4276bd7eb2da71215d273b6 + parent = 7a81f4568348ceee1dde52ec2c93c5245420f880 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 5e2e608edad..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 7f343e48f74..13404b37324 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From 60104e9ff00ab5b39ee94310816f1e1098af6f53 Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 9 May 2024 00:43:45 -0400 Subject: [PATCH 11/43] fix(ci): stop mass serialization (#6290) --- .github/workflows/ci.yml | 2 +- yarn-project/end-to-end/Earthfile | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a4c566e1de9..d696c0d41bf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,7 @@ jobs: with: runner_label: ${{ inputs.username || github.actor }}-x86 ebs_cache_size_gb: 256 - runner_concurrency: 30 + runner_concurrency: 50 subaction: ${{ inputs.runner_action || 'start' }} # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index e6b2b72fd26..ea6fdbe5ae7 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -18,8 +18,8 @@ E2E_COMPOSE_TEST: # Let docker compose know about the pushed tags above ENV AZTEC_DOCKER_TAG=$(git rev-parse HEAD) # Optimize to not cause serial behavior if image already exists - IF ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/aztec:$AZTEC_DOCKER_TAG" && \ - docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG" + IF ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/aztec:$AZTEC_DOCKER_TAG" || \ + ! docker image ls --format '{{.Repository}}:{{.Tag}}' | grep "aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG" WAIT BUILD ../+export-e2e-test-images END From 5514143aab1db195aa466752e1e476d95a993a08 Mon Sep 17 00:00:00 2001 From: David Banks <47112877+dbanks12@users.noreply.github.com> Date: Thu, 9 May 2024 03:37:18 -0400 Subject: [PATCH 12/43] chore(avm-simulator): add U128 overflow tests to AVM simulator (#6281) --- .../contracts/avm_test_contract/src/main.nr | 13 ++++++ .../simulator/src/avm/avm_simulator.test.ts | 46 +++++++++++++------ 2 files changed, 45 insertions(+), 14 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index c26c4f2551a..94d70614a13 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -140,6 +140,19 @@ contract AvmTest { a + b } + #[aztec(public-vm)] + fn u128_addition_overflow() -> U128 { + let max_u128: U128 = U128::from_hex("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"); + let one: U128 = U128::from_integer(1); + max_u128 + one + } + + #[aztec(public-vm)] + fn u128_from_integer_overflow() -> U128 { + let should_overflow: Field = 2.pow_32(128); // U128::max() + 1; + U128::from_integer(should_overflow) + } + /************************************************************************ * Hashing functions ************************************************************************/ diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 9e21712d017..1d0d7f3af50 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -85,22 +85,40 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(isAvmBytecode(bytecode)); }); - it('U128 addition', async () => { - const calldata: Fr[] = [ - // First U128 - new Fr(1), - new Fr(2), - // Second U128 - new Fr(3), - new Fr(4), - ]; - const context = initContext({ env: initExecutionEnvironment({ calldata }) }); + describe('U128 addition and overflows', () => { + it('U128 addition', async () => { + const calldata: Fr[] = [ + // First U128 + new Fr(1), + new Fr(2), + // Second U128 + new Fr(3), + new Fr(4), + ]; + const context = initContext({ env: initExecutionEnvironment({ calldata }) }); - const bytecode = getAvmTestContractBytecode('add_u128'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); + const bytecode = getAvmTestContractBytecode('add_u128'); + const results = await new AvmSimulator(context).executeBytecode(bytecode); - expect(results.reverted).toBe(false); - expect(results.output).toEqual([new Fr(4), new Fr(6)]); + expect(results.reverted).toBe(false); + expect(results.output).toEqual([new Fr(4), new Fr(6)]); + }); + + it('Expect failure on U128::add() overflow', async () => { + const bytecode = getAvmTestContractBytecode('u128_addition_overflow'); + const results = await new AvmSimulator(initContext()).executeBytecode(bytecode); + expect(results.reverted).toBe(true); + expect(results.revertReason?.message).toEqual('Reverted with output: attempt to add with overflow'); + }); + + it('Expect failure on U128::from_integer() overflow', async () => { + const bytecode = getAvmTestContractBytecode('u128_from_integer_overflow'); + const results = await new AvmSimulator(initContext()).executeBytecode(bytecode); + expect(results.reverted).toBe(true); + expect(results.revertReason?.message).toEqual(undefined); + // Note: compiler intrinsic messages (like below) are not known to the AVM + //expect(results.revertReason?.message).toEqual("Reverted with output: call to assert_max_bit_size 'self.__assert_max_bit_size(bit_size)'"); + }); }); it('Assertion message', async () => { From f51acfaade686ffab0bde7d91c97a13280b9e2c6 Mon Sep 17 00:00:00 2001 From: Facundo Date: Thu, 9 May 2024 10:12:30 +0100 Subject: [PATCH 13/43] fix(avm-simulator): fix test expectation (#6293) --- yarn-project/simulator/src/avm/avm_simulator.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 1d0d7f3af50..346e2861a8f 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -108,7 +108,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const bytecode = getAvmTestContractBytecode('u128_addition_overflow'); const results = await new AvmSimulator(initContext()).executeBytecode(bytecode); expect(results.reverted).toBe(true); - expect(results.revertReason?.message).toEqual('Reverted with output: attempt to add with overflow'); + expect(results.revertReason?.message).toEqual('Assertion failed: attempt to add with overflow'); }); it('Expect failure on U128::from_integer() overflow', async () => { @@ -117,7 +117,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.reverted).toBe(true); expect(results.revertReason?.message).toEqual(undefined); // Note: compiler intrinsic messages (like below) are not known to the AVM - //expect(results.revertReason?.message).toEqual("Reverted with output: call to assert_max_bit_size 'self.__assert_max_bit_size(bit_size)'"); + //expect(results.revertReason?.message).toEqual("Assertion failed: call to assert_max_bit_size 'self.__assert_max_bit_size(bit_size)'"); }); }); From ef9cdde09d6cdd8a5deb0217fea1e828477f0c03 Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Thu, 9 May 2024 10:35:24 +0100 Subject: [PATCH 14/43] feat: Proper padding in ts AES and constrained AES in body and header computations (#6269) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #6172. Also fixes an issue as the typescript AES was not doing proper padding. Battling noir, good with help from @Thunkar. --------- Co-authored-by: thunkar Co-authored-by: David Banks <47112877+dbanks12@users.noreply.github.com> Co-authored-by: Jan Beneš --- .../aztec-nr/aztec/src/encrypted_logs/body.nr | 60 +++++++++---------- .../aztec/src/encrypted_logs/header.nr | 19 +++--- .../contracts/test_contract/src/main.nr | 10 ++-- noir/noir-repo/noir_stdlib/src/aes128.nr | 3 + .../src/logs/encrypted_log_body.test.ts | 2 +- .../barretenberg/crypto/aes128/index.test.ts | 27 ++++++++- .../src/barretenberg/crypto/aes128/index.ts | 12 ++-- 7 files changed, 74 insertions(+), 59 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr index 07bd08b46e3..4393d9da16c 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/body.nr @@ -1,7 +1,7 @@ use crate::note::{note_interface::NoteInterface}; use dep::protocol_types::{grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; -use crate::oracle::encryption::aes128_encrypt; +use dep::std::aes128::aes128_encrypt_slice; use crate::keys::point_to_symmetric_key::point_to_symmetric_key; struct EncryptedLogBody { @@ -19,32 +19,30 @@ impl EncryptedLogBody { Self { storage_slot, note_type_id, note } } - pub fn compute_ciphertext( + pub fn compute_ciphertext( self, secret: GrumpkinPrivateKey, point: GrumpkinPoint - ) -> [u8; M] where Note: NoteInterface { - // We need 32 bytes for every field in the note, and then we have 2 extra fields (storage_slot and note_type_id) - let serialized_note: [Field; N] = Note::serialize_content(self.note); + ) -> [u8] where Note: NoteInterface { + let serialized_note: [Field; N] = self.note.serialize_content(); - // Work around not being able to use N directly beyond the size of the array above. - let N_ = serialized_note.len(); - - assert(N_ * 32 + 64 == M, "Invalid size of encrypted log body"); - - let mut buffer: [u8; M] = [0; M]; + let mut buffer_slice: [u8] = &[]; let storage_slot_bytes = self.storage_slot.to_be_bytes(32); let note_type_id_bytes = self.note_type_id.to_be_bytes(32); + + for i in 0..32 { + buffer_slice = buffer_slice.push_back(storage_slot_bytes[i]); + } + for i in 0..32 { - buffer[i] = storage_slot_bytes[i]; - buffer[32 + i] = note_type_id_bytes[i]; + buffer_slice = buffer_slice.push_back(note_type_id_bytes[i]); } - for i in 0..N_ { + for i in 0..serialized_note.len() { let bytes = serialized_note[i].to_be_bytes(32); for j in 0..32 { - buffer[64 + i * 32 + j] = bytes[j]; + buffer_slice = buffer_slice.push_back(bytes[j]); } } @@ -56,16 +54,16 @@ impl EncryptedLogBody { sym_key[i] = full_key[i]; iv[i] = full_key[i + 16]; } - - aes128_encrypt(buffer, iv, sym_key) + aes128_encrypt_slice(buffer_slice, iv, sym_key) } } -/* -// Test is semi broken, needs to be fixed along with #6172 mod test { use crate::encrypted_logs::body::EncryptedLogBody; - use dep::protocol_types::{address::AztecAddress, traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER}; + use dep::protocol_types::{ + address::AztecAddress, traits::Empty, constants::GENERATOR_INDEX__NOTE_NULLIFIER, + grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint + }; use crate::{ note::{note_header::NoteHeader, note_interface::NoteInterface, utils::compute_note_hash_for_consumption}, @@ -73,8 +71,6 @@ mod test { context::PrivateContext, hash::poseidon2_hash }; - use dep::protocol_types::{address::AztecAddress, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; - struct AddressNote { address: AztecAddress, owner: AztecAddress, @@ -82,9 +78,9 @@ mod test { header: NoteHeader, } - global BIB_BOB_ADDRESS_NOTE_LEN: Field = 3; + global ADDRESS_NOTE_LEN: Field = 3; - impl NoteInterface for AddressNote { + impl NoteInterface for AddressNote { fn compute_note_content_hash(self) -> Field {1} fn get_note_type_id() -> Field {2} @@ -99,9 +95,9 @@ mod test { fn broadcast(self, context: &mut PrivateContext, slot: Field) {} - fn serialize_content(self) -> [Field; BIB_BOB_ADDRESS_NOTE_LEN] { [self.address.to_field(), self.owner.to_field(), self.randomness]} + fn serialize_content(self) -> [Field; ADDRESS_NOTE_LEN] { [self.address.to_field(), self.owner.to_field(), self.randomness]} - fn deserialize_content(fields: [Field; BIB_BOB_ADDRESS_NOTE_LEN]) -> Self { + fn deserialize_content(fields: [Field; ADDRESS_NOTE_LEN]) -> Self { AddressNote { address: AztecAddress::from_field(fields[0]), owner: AztecAddress::from_field(fields[1]), randomness: fields[2], header: NoteHeader::empty() } } } @@ -110,10 +106,9 @@ mod test { pub fn new(address: AztecAddress, owner: AztecAddress, randomness: Field) -> Self { AddressNote { address, owner, randomness, header: NoteHeader::empty() } } - // docs:end:address_note_def } - // @todo Issue(#6172) This is to be run as a test. But it is currently using the AES oracle so will fail there. + #[test] fn test_encrypted_log_body() { let note = AddressNote::new( AztecAddress::from_field(0x1), @@ -137,11 +132,12 @@ mod test { let ciphertext = body.compute_ciphertext(secret, point); let expected_body_ciphertext = [ - 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 36, 194, 14, 168, 0, 137, 126, 59, 151, 177, 136, 254, 153, 190, 92, 33, 40, 151, 178, 54, 34, 166, 124, 96, 117, 108, 168, 7, 147, 222, 81, 201, 254, 170, 244, 151, 60, 64, 226, 45, 156, 185, 53, 23, 121, 63, 243, 101, 134, 21, 167, 39, 226, 203, 162, 223, 28, 74, 244, 159, 54, 201, 192, 168, 19, 85, 103, 82, 148, 3, 153, 210, 89, 245, 171, 171, 12, 248, 40, 74, 199, 65, 96, 42, 84, 83, 48, 21, 188, 134, 45, 247, 134, 166, 109, 170, 68, 212, 99, 235, 74, 202, 162, 108, 130, 128, 122, 16, 79, 242, 30, 157, 26, 75, 57, 24, 18, 124, 217, 74, 155, 13, 171, 205, 194, 193, 103, 134, 224, 204, 46, 105, 135, 166, 192, 163, 186, 42, 71, 51, 156, 161, 8, 131 + 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 47, 232, 95, 17, 240, 230, 80, 129, 174, 158, 23, 76, 114, 185, 43, 18, 254, 148, 147, 230, 66, 216, 167, 62, 180, 213, 238, 33, 108, 29, 84, 139, 99, 206, 212, 253, 92, 116, 137, 31, 0, 104, 45, 91, 250, 109, 141, 114, 189, 53, 35, 60, 108, 156, 170, 206, 150, 114, 150, 187, 198, 13, 62, 153, 133, 13, 169, 167, 242, 221, 40, 168, 186, 203, 104, 82, 47, 238, 142, 179, 90, 37, 9, 70, 245, 176, 122, 247, 42, 87, 75, 7, 20, 89, 166, 123, 14, 26, 230, 156, 49, 94, 0, 94, 72, 58, 171, 239, 115, 174, 155, 7, 151, 17, 60, 206, 193, 134, 70, 87, 215, 88, 21, 194, 63, 26, 106, 105, 124, 213, 252, 152, 192, 71, 115, 13, 181, 5, 169, 15, 170, 196, 174, 228, 170, 192, 91, 76, 110, 220, 89, 47, 248, 144, 189, 251, 167, 149, 248, 226 ]; - assert_eq(ciphertext, expected_body_ciphertext); + for i in 0..expected_body_ciphertext.len() { + assert_eq(ciphertext[i], expected_body_ciphertext[i]); + } + assert_eq(expected_body_ciphertext.len(), ciphertext.len()); } } - -*/ diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr index 03b5a33e3d1..04f6eb59691 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr @@ -1,8 +1,9 @@ use dep::protocol_types::{address::AztecAddress, grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint}; -use crate::oracle::encryption::aes128_encrypt; use crate::keys::point_to_symmetric_key::point_to_symmetric_key; +use dep::std::aes128::aes128_encrypt_slice; + struct EncryptedLogHeader { address: AztecAddress, } @@ -13,28 +14,22 @@ impl EncryptedLogHeader { } // @todo Issue(#5901) Figure out if we return the bytes or fields for the log - fn compute_ciphertext(self, secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + fn compute_ciphertext(self, secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 48] { let full_key = point_to_symmetric_key(secret, point); let mut sym_key = [0; 16]; let mut iv = [0; 16]; - let mut input = [0; 32]; - let input_slice = self.address.to_field().to_be_bytes(32); for i in 0..16 { sym_key[i] = full_key[i]; iv[i] = full_key[i + 16]; - - // We copy address on the following 2 lines in order to avoid having 2 loops - input[i] = input_slice[i]; - input[i + 16] = input_slice[i + 16]; } - // @todo Issue(#6172) This encryption is currently using an oracle. It is not actually constrained atm. - aes128_encrypt(input, iv, sym_key) + let input: [u8] = self.address.to_field().to_be_bytes(32); + aes128_encrypt_slice(input, iv, sym_key).as_array() } } -// @todo Issue(#6172) This is to be run as a test. But it is currently using the AES oracle so will fail there. +#[test] fn test_encrypted_log_header() { let address = AztecAddress::from_field(0xdeadbeef); let header = EncryptedLogHeader::new(address); @@ -50,7 +45,7 @@ fn test_encrypted_log_header() { let ciphertext = header.compute_ciphertext(secret, point); let expected_header_ciphertext = [ - 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 179, 36, 250, 95, 56, 167, 171, 16, 195, 164, 223, 57, 75, 5, 24, 119 + 131, 119, 105, 129, 244, 32, 151, 205, 12, 99, 93, 62, 10, 180, 72, 21, 179, 36, 250, 95, 56, 167, 171, 16, 195, 164, 223, 57, 75, 5, 24, 119, 198, 34, 99, 189, 193, 183, 227, 43, 79, 204, 214, 89, 221, 153, 246, 64 ]; assert_eq(ciphertext, expected_header_ciphertext); diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 5ab03eabf18..97210ff7b09 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -337,7 +337,7 @@ contract Test { } #[aztec(private)] - fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) -> [u8; 64] { + fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) -> [u8; 80] { aes128_encrypt(input, iv, key) } @@ -347,20 +347,20 @@ contract Test { } #[aztec(private)] - fn compute_note_header_ciphertext(secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 32] { + fn compute_note_header_ciphertext(secret: GrumpkinPrivateKey, point: GrumpkinPoint) -> [u8; 48] { EncryptedLogHeader::new(context.this_address()).compute_ciphertext(secret, point) } - // 64 bytes + 32 * #fields = 96 bytes + // 64 bytes + 32 * #fields + 16 = 112 bytes #[aztec(private)] fn compute_note_body_ciphertext( secret: GrumpkinPrivateKey, point: GrumpkinPoint, storage_slot: Field, value: Field - ) -> [u8; 96] { + ) -> [u8; 112] { let note = TestNote::new(value); - EncryptedLogBody::new(storage_slot, TestNote::get_note_type_id(), note).compute_ciphertext(secret, point) + EncryptedLogBody::new(storage_slot, TestNote::get_note_type_id(), note).compute_ciphertext(secret, point).as_array() } #[aztec(public)] diff --git a/noir/noir-repo/noir_stdlib/src/aes128.nr b/noir/noir-repo/noir_stdlib/src/aes128.nr index e6e2a5e4997..cd61021a953 100644 --- a/noir/noir-repo/noir_stdlib/src/aes128.nr +++ b/noir/noir-repo/noir_stdlib/src/aes128.nr @@ -2,3 +2,6 @@ // docs:start:aes128 pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} // docs:end:aes128 + +#[foreign(aes128_encrypt)] +pub fn aes128_encrypt_slice(input: [u8], iv: [u8; 16], key: [u8; 16]) -> [u8] {} diff --git a/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts b/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts index 170c26078b5..db814bc0d98 100644 --- a/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts +++ b/yarn-project/circuit-types/src/logs/encrypted_log_body.test.ts @@ -47,7 +47,7 @@ describe('encrypt log body', () => { const noteTypeId = new Fr(1); const storageSlot = new Fr(2); - const body = new EncryptedLogBody(noteTypeId, storageSlot, note); + const body = new EncryptedLogBody(storageSlot, noteTypeId, note); const encrypted = body.computeCiphertext(ephSecretKey, viewingPubKey); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts index 9b8afc328e2..2f6ca3d5e94 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts @@ -9,14 +9,33 @@ describe('aes128', () => { aes128 = new Aes128(); }); + // PKCS#7 padding + const pad = (data: Buffer): Buffer => { + const rawLength = data.length; + const numPaddingBytes = 16 - (rawLength % 16); + const paddingBuffer = Buffer.alloc(numPaddingBytes); + paddingBuffer.fill(numPaddingBytes); + return Buffer.concat([data, paddingBuffer]); + }; + + // PKCS#7 padding removal + const removePadding = (paddedBuffer: Buffer): Buffer => { + // We get padding length from the last byte - in PKCS#7 all the padded bytes contain padding length + // and there is always some padding. + const paddingToRemove = paddedBuffer[paddedBuffer.length - 1]; + return paddedBuffer.subarray(0, paddedBuffer.length - paddingToRemove); + }; + it('should correctly encrypt input', () => { const data = randomBytes(32); const key = randomBytes(16); const iv = randomBytes(16); + const paddedData = pad(data); + const cipher = createCipheriv('aes-128-cbc', key, iv); cipher.setAutoPadding(false); - const expected = Buffer.concat([cipher.update(data), cipher.final()]); + const expected = Buffer.concat([cipher.update(paddedData), cipher.final()]); const result: Buffer = aes128.encryptBufferCBC(data, iv, key); @@ -28,13 +47,15 @@ describe('aes128', () => { const key = randomBytes(16); const iv = randomBytes(16); + const paddedData = pad(data); + const cipher = createCipheriv('aes-128-cbc', key, iv); cipher.setAutoPadding(false); - const ciphertext = Buffer.concat([cipher.update(data), cipher.final()]); + const ciphertext = Buffer.concat([cipher.update(paddedData), cipher.final()]); const decipher = createDecipheriv('aes-128-cbc', key, iv); decipher.setAutoPadding(false); - const expected = Buffer.concat([decipher.update(ciphertext), decipher.final()]); + const expected = removePadding(Buffer.concat([decipher.update(ciphertext), decipher.final()])); const result: Buffer = aes128.decryptBufferCBC(ciphertext, iv, key); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts index cf3a8a5ddec..824e83b4b7e 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts @@ -15,13 +15,11 @@ export class Aes128 { */ public encryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { const rawLength = data.length; - const numPaddingBytes = rawLength % 16 != 0 ? 16 - (rawLength % 16) : 0; + const numPaddingBytes = 16 - (rawLength % 16); const paddingBuffer = Buffer.alloc(numPaddingBytes); - // input num bytes needs to be a multiple of 16 + // input num bytes needs to be a multiple of 16 and at least 1 byte // node uses PKCS#7-Padding scheme, where padding byte value = the number of padding bytes - if (numPaddingBytes != 0) { - paddingBuffer.fill(numPaddingBytes); - } + paddingBuffer.fill(numPaddingBytes); const input = Buffer.concat([data, paddingBuffer]); const api = BarretenbergSync.getSingleton(); @@ -39,8 +37,10 @@ export class Aes128 { */ public decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { const api = BarretenbergSync.getSingleton(); - return Buffer.from( + const paddedBuffer = Buffer.from( api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), ); + const paddingToRemove = paddedBuffer[paddedBuffer.length - 1]; + return paddedBuffer.subarray(0, paddedBuffer.length - paddingToRemove); } } From 89ab8eeab35dfeae36efbb1ae159c6600f40e059 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Thu, 9 May 2024 11:56:14 +0200 Subject: [PATCH 15/43] fix: `CombinedConstantData` not registered for serialization (#6292) Fixes: image I decided to not register `CombinedConstantData` directly on JsonRpc server and client since that would just make it all much less readable because CombinedConstantData is not a return value itself on any of AztecNode methods. And since I am a fan of nice readable encapsulated classes instead of the `Pick` type typescript freestyle I refactored ProcessOutput such that we can register that directly on the Json RPC server. --- .../src/aztec-node/http_rpc_server.ts | 10 +++- .../aztec-node/src/aztec-node/server.ts | 22 ++++---- .../src/aztec_node/rpc/aztec_node_client.ts | 13 ++++- .../src/interfaces/aztec-node.ts | 4 +- yarn-project/circuit-types/src/mocks.ts | 20 ++++---- yarn-project/circuit-types/src/tx/index.ts | 1 + .../src/tx/public_simulation_output.ts | 48 +++++++++++++++++ .../circuit-types/src/tx/simulated_tx.ts | 51 ++++--------------- 8 files changed, 102 insertions(+), 67 deletions(-) create mode 100644 yarn-project/circuit-types/src/tx/public_simulation_output.ts diff --git a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts index 8270b171ffe..6234870fe42 100644 --- a/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts +++ b/yarn-project/aztec-node/src/aztec-node/http_rpc_server.ts @@ -6,6 +6,7 @@ import { LogId, NullifierMembershipWitness, PublicDataWitness, + PublicSimulationOutput, SiblingPath, Tx, TxEffect, @@ -41,7 +42,14 @@ export function createAztecNodeRpcServer(node: AztecNode) { PublicDataWitness, SiblingPath, }, - { Tx, TxReceipt, EncryptedL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, + { + PublicSimulationOutput, + Tx, + TxReceipt, + EncryptedL2BlockL2Logs, + UnencryptedL2BlockL2Logs, + NullifierMembershipWitness, + }, // disable methods not part of the AztecNode interface ['start', 'stop'], ); diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 980c9cf6df0..54b15609cd4 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -13,10 +13,10 @@ import { LogType, MerkleTreeId, NullifierMembershipWitness, - type ProcessOutput, type ProverClient, type ProverConfig, PublicDataWitness, + PublicSimulationOutput, type SequencerConfig, type SiblingPath, type Tx, @@ -634,7 +634,7 @@ export class AztecNodeService implements AztecNode { * Simulates the public part of a transaction with the current state. * @param tx - The transaction to simulate. **/ - public async simulatePublicCalls(tx: Tx): Promise { + public async simulatePublicCalls(tx: Tx): Promise { this.log.info(`Simulating tx ${tx.getTxHash()}`); const blockNumber = (await this.blockSource.getBlockNumber()) + 1; @@ -674,15 +674,15 @@ export class AztecNodeService implements AztecNode { } this.log.debug(`Simulated tx ${tx.getTxHash()} succeeds`); const [processedTx] = processedTxs; - return { - constants: processedTx.data.constants, - encryptedLogs: processedTx.encryptedLogs, - unencryptedLogs: processedTx.unencryptedLogs, - end: processedTx.data.end, - revertReason: processedTx.revertReason, - publicReturnValues: returns[0], - gasUsed: processedTx.gasUsed, - }; + return new PublicSimulationOutput( + processedTx.encryptedLogs, + processedTx.unencryptedLogs, + processedTx.revertReason, + processedTx.data.constants, + processedTx.data.end, + returns[0], + processedTx.gasUsed, + ); } public async setConfig(config: Partial): Promise { diff --git a/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts b/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts index 79f9795f9f4..8591c53795f 100644 --- a/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts +++ b/yarn-project/circuit-types/src/aztec_node/rpc/aztec_node_client.ts @@ -9,8 +9,9 @@ import { type AztecNode } from '../../interfaces/aztec-node.js'; import { NullifierMembershipWitness } from '../../interfaces/nullifier_tree.js'; import { L2Block } from '../../l2_block.js'; import { EncryptedL2BlockL2Logs, ExtendedUnencryptedL2Log, LogId, UnencryptedL2BlockL2Logs } from '../../logs/index.js'; +import { PublicDataWitness } from '../../public_data_witness.js'; import { SiblingPath } from '../../sibling_path/index.js'; -import { Tx, TxHash, TxReceipt } from '../../tx/index.js'; +import { PublicSimulationOutput, Tx, TxHash, TxReceipt } from '../../tx/index.js'; import { TxEffect } from '../../tx_effect.js'; /** @@ -34,9 +35,17 @@ export function createAztecNodeClient(url: string, fetch = defaultFetch): AztecN TxEffect, LogId, TxHash, + PublicDataWitness, SiblingPath, }, - { Tx, TxReceipt, EncryptedL2BlockL2Logs, UnencryptedL2BlockL2Logs, NullifierMembershipWitness }, + { + PublicSimulationOutput, + Tx, + TxReceipt, + EncryptedL2BlockL2Logs, + UnencryptedL2BlockL2Logs, + NullifierMembershipWitness, + }, false, 'node', fetch, diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index d59543943e8..bf1df022131 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -22,7 +22,7 @@ import { import { type MerkleTreeId } from '../merkle_tree_id.js'; import { type PublicDataWitness } from '../public_data_witness.js'; import { type SiblingPath } from '../sibling_path/index.js'; -import { type ProcessOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; +import { type PublicSimulationOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; import { type TxEffect } from '../tx_effect.js'; import { type SequencerConfig } from './configs.js'; import { type L2BlockNumber } from './l2_block_number.js'; @@ -283,7 +283,7 @@ export interface AztecNode { * This currently just checks that the transaction execution succeeds. * @param tx - The transaction to simulate. **/ - simulatePublicCalls(tx: Tx): Promise; + simulatePublicCalls(tx: Tx): Promise; /** * Updates the configuration of this node. diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 9cda922037c..ba36cd68556 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -27,7 +27,7 @@ import { type ContractInstanceWithAddress, SerializableContractInstance } from ' import { EncryptedL2Log } from './logs/encrypted_l2_log.js'; import { EncryptedFunctionL2Logs, EncryptedTxL2Logs, Note, UnencryptedTxL2Logs } from './logs/index.js'; import { ExtendedNote } from './notes/index.js'; -import { type ProcessOutput, type ProcessReturnValues, SimulatedTx, Tx, TxHash } from './tx/index.js'; +import { type ProcessReturnValues, PublicSimulationOutput, SimulatedTx, Tx, TxHash } from './tx/index.js'; /** * Testing utility to create empty logs composed from a single empty log. @@ -129,15 +129,15 @@ export const mockTxForRollup = (seed = 1, { hasLogs = false }: { hasLogs?: boole export const mockSimulatedTx = (seed = 1, hasLogs = true) => { const tx = mockTx(seed, { hasLogs }); const dec: ProcessReturnValues = [new Fr(1n), new Fr(2n), new Fr(3n), new Fr(4n)]; - const output: ProcessOutput = { - constants: makeCombinedConstantData(), - encryptedLogs: tx.encryptedLogs, - unencryptedLogs: tx.unencryptedLogs, - end: makeCombinedAccumulatedData(), - revertReason: undefined, - publicReturnValues: dec, - gasUsed: {}, - }; + const output = new PublicSimulationOutput( + tx.encryptedLogs, + tx.unencryptedLogs, + undefined, + makeCombinedConstantData(), + makeCombinedAccumulatedData(), + dec, + {}, + ); return new SimulatedTx(tx, dec, output); }; diff --git a/yarn-project/circuit-types/src/tx/index.ts b/yarn-project/circuit-types/src/tx/index.ts index e113a56430e..6d69130adaf 100644 --- a/yarn-project/circuit-types/src/tx/index.ts +++ b/yarn-project/circuit-types/src/tx/index.ts @@ -3,4 +3,5 @@ export * from './simulated_tx.js'; export * from './tx_hash.js'; export * from './tx_receipt.js'; export * from './processed_tx.js'; +export * from './public_simulation_output.js'; export * from './tx_validator.js'; diff --git a/yarn-project/circuit-types/src/tx/public_simulation_output.ts b/yarn-project/circuit-types/src/tx/public_simulation_output.ts new file mode 100644 index 00000000000..24443814680 --- /dev/null +++ b/yarn-project/circuit-types/src/tx/public_simulation_output.ts @@ -0,0 +1,48 @@ +import { CombinedAccumulatedData, CombinedConstantData, Fr, Gas } from '@aztec/circuits.js'; +import { mapValues } from '@aztec/foundation/collection'; + +import { EncryptedTxL2Logs, UnencryptedTxL2Logs } from '../logs/tx_l2_logs.js'; +import { type SimulationError } from '../simulation_error.js'; +import { type PublicKernelType } from './processed_tx.js'; + +/** Return values of simulating a circuit. */ +export type ProcessReturnValues = Fr[] | undefined; + +/** + * Outputs of processing the public component of a transaction. + */ +export class PublicSimulationOutput { + constructor( + public encryptedLogs: EncryptedTxL2Logs, + public unencryptedLogs: UnencryptedTxL2Logs, + public revertReason: SimulationError | undefined, + public constants: CombinedConstantData, + public end: CombinedAccumulatedData, + public publicReturnValues: ProcessReturnValues, + public gasUsed: Partial>, + ) {} + + toJSON() { + return { + encryptedLogs: this.encryptedLogs.toJSON(), + unencryptedLogs: this.unencryptedLogs.toJSON(), + revertReason: this.revertReason, + constants: this.constants.toBuffer().toString('hex'), + end: this.end.toBuffer().toString('hex'), + publicReturnValues: this.publicReturnValues?.map(fr => fr.toString()), + gasUsed: mapValues(this.gasUsed, gas => gas?.toJSON()), + }; + } + + static fromJSON(json: any): PublicSimulationOutput { + return new PublicSimulationOutput( + EncryptedTxL2Logs.fromJSON(json.encryptedLogs), + UnencryptedTxL2Logs.fromJSON(json.unencryptedLogs), + json.revertReason, + CombinedConstantData.fromBuffer(Buffer.from(json.constants, 'hex')), + CombinedAccumulatedData.fromBuffer(Buffer.from(json.end, 'hex')), + json.publicReturnValues?.map(Fr.fromString), + mapValues(json.gasUsed, gas => (gas ? Gas.fromJSON(gas) : undefined)), + ); + } +} diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.ts b/yarn-project/circuit-types/src/tx/simulated_tx.ts index 61883a5d1f5..45387f1664a 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.ts @@ -1,44 +1,9 @@ -import { CombinedAccumulatedData, CombinedConstantData, Fr, Gas } from '@aztec/circuits.js'; -import { mapValues } from '@aztec/foundation/collection'; +import { Fr, Gas } from '@aztec/circuits.js'; -import { EncryptedTxL2Logs, UnencryptedTxL2Logs } from '../logs/index.js'; -import { type ProcessedTx, PublicKernelType } from './processed_tx.js'; +import { PublicKernelType } from './processed_tx.js'; +import { type ProcessReturnValues, PublicSimulationOutput } from './public_simulation_output.js'; import { Tx } from './tx.js'; -/** Return values of simulating a circuit. */ -export type ProcessReturnValues = Fr[] | undefined; - -/** - * Outputs of processing the public component of a transaction. - * REFACTOR: Rename. - */ -export type ProcessOutput = Pick & - Pick & { publicReturnValues: ProcessReturnValues }; - -function processOutputToJSON(output: ProcessOutput) { - return { - encryptedLogs: output.encryptedLogs.toJSON(), - unencryptedLogs: output.unencryptedLogs.toJSON(), - revertReason: output.revertReason, - constants: output.constants.toBuffer().toString('hex'), - end: output.end.toBuffer().toString('hex'), - publicReturnValues: output.publicReturnValues?.map(fr => fr.toString()), - gasUsed: mapValues(output.gasUsed, gas => gas?.toJSON()), - }; -} - -function processOutputFromJSON(json: any): ProcessOutput { - return { - encryptedLogs: EncryptedTxL2Logs.fromJSON(json.encryptedLogs), - unencryptedLogs: UnencryptedTxL2Logs.fromJSON(json.unencryptedLogs), - revertReason: json.revertReason, - constants: CombinedConstantData.fromBuffer(Buffer.from(json.constants, 'hex')), - end: CombinedAccumulatedData.fromBuffer(Buffer.from(json.end, 'hex')), - publicReturnValues: json.publicReturnValues?.map(Fr.fromString), - gasUsed: mapValues(json.gasUsed, gas => (gas ? Gas.fromJSON(gas) : undefined)), - }; -} - // REFACTOR: Review what we need to expose to the user when running a simulation. // Eg tx already has encrypted and unencrypted logs, but those cover only the ones // emitted during private. We need the ones from ProcessOutput to include the public @@ -46,7 +11,11 @@ function processOutputFromJSON(json: any): ProcessOutput { // the public side of things. This also points at this class needing to be split into // two: one with just private simulation, and one that also includes public simulation. export class SimulatedTx { - constructor(public tx: Tx, public privateReturnValues?: ProcessReturnValues, public publicOutput?: ProcessOutput) {} + constructor( + public tx: Tx, + public privateReturnValues?: ProcessReturnValues, + public publicOutput?: PublicSimulationOutput, + ) {} /** * Returns suggested total and teardown gas limits for the simulated tx. @@ -79,7 +48,7 @@ export class SimulatedTx { return { tx: this.tx.toJSON(), privateReturnValues: this.privateReturnValues?.map(fr => fr.toString()), - publicOutput: this.publicOutput && processOutputToJSON(this.publicOutput), + publicOutput: this.publicOutput && this.publicOutput.toJSON(), }; } @@ -90,7 +59,7 @@ export class SimulatedTx { */ public static fromJSON(obj: any) { const tx = Tx.fromJSON(obj.tx); - const publicOutput = obj.publicOutput ? processOutputFromJSON(obj.publicOutput) : undefined; + const publicOutput = obj.publicOutput ? PublicSimulationOutput.fromJSON(obj.publicOutput) : undefined; const privateReturnValues = obj.privateReturnValues?.map(Fr.fromString); return new SimulatedTx(tx, privateReturnValues, publicOutput); From eae5822cfcf47d03739e09911c183ba9f4ced18b Mon Sep 17 00:00:00 2001 From: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Date: Thu, 9 May 2024 11:24:04 +0100 Subject: [PATCH 16/43] feat: Private Kernel Recursion (#6278) This PR introduces recursive verification to the private kernel circuits. Both app circuit and previous kernel circuit proofs are verified. This closes #5978 The changes can be largely categorised as: 1. PXE modifications to pass proofs and verification keys from the output of a proving process as inputs to the next simulation/proving process. 2. Serialisation of `PrivateCircuitPublicInputs` and `PrivateKernelCircuitPublicInputs` structs to fields. 3. Aggregation of proofs using Noir's `verify_proof` api. Additional task create [here](https://github.com/AztecProtocol/aztec-packages/issues/6285) to prevent the specification of `pub` on arguments to private functions. --- .../src/core/libraries/ConstantsGen.sol | 29 ++++ .../schnorr_account_contract/src/main.nr | 2 +- .../crates/private-kernel-init/src/main.nr | 3 +- .../crates/private-kernel-inner/src/main.nr | 3 +- .../kernel_circuit_public_inputs_composer.nr | 2 +- .../src/private_kernel_init.nr | 7 +- .../src/private_kernel_inner.nr | 18 +-- .../src/private_kernel_tail.nr | 19 ++- .../src/private_kernel_tail_to_public.nr | 5 +- .../private-kernel-tail-to-public/src/main.nr | 3 +- .../crates/private-kernel-tail/src/main.nr | 3 +- .../src/public_kernel_tail.nr | 1 - .../rollup-lib/src/base/base_rollup_inputs.nr | 2 +- .../crates/types/src/abis.nr | 1 + .../combined_accumulated_data.nr | 28 +++- .../private_accumulated_data.nr | 45 +++++- .../crates/types/src/abis/call_request.nr | 19 ++- .../crates/types/src/abis/caller_context.nr | 16 +- .../types/src/abis/combined_constant_data.nr | 17 +- .../kernel_circuit_public_inputs.nr | 3 - .../private_kernel_circuit_public_inputs.nr | 20 ++- ...te_kernel_circuit_public_inputs_builder.nr | 5 - .../public_kernel_circuit_public_inputs.nr | 2 - ...ic_kernel_circuit_public_inputs_builder.nr | 6 +- .../crates/types/src/abis/kernel_data.nr | 24 +-- .../abis/private_kernel/private_call_data.nr | 19 ++- .../types/src/abis/private_kernel_data.nr | 34 ++++ .../crates/types/src/abis/public_data_read.nr | 10 +- .../src/abis/public_data_update_request.nr | 10 +- .../crates/types/src/abis/read_request.nr | 13 +- .../rollup_validation_requests.nr | 8 +- .../validation_requests.nr | 36 ++++- .../crates/types/src/constants.nr | 14 ++ .../crates/types/src/hash.nr | 2 +- .../crates/types/src/tests/fixture_builder.nr | 44 ++--- .../src/tests/private_call_data_builder.nr | 8 +- .../brillig/brillig_gen/brillig_black_box.rs | 4 +- yarn-project/circuits.js/src/constants.gen.ts | 36 +++++ .../src/structs/kernel/private_call_data.ts | 13 +- .../src/structs/kernel/private_kernel_data.ts | 18 +-- .../src/structs/verification_key.ts | 8 + .../circuits.js/src/tests/factories.ts | 4 +- .../client_prover_integration.test.ts | 2 +- .../src/type_conversion.ts | 21 +-- .../bb_prover/bb_native_proof_creator.ts | 153 +++++++++++------- .../kernel_prover/interface/proof_creator.ts | 32 +++- .../src/kernel_prover/kernel_prover.test.ts | 19 ++- .../pxe/src/kernel_prover/kernel_prover.ts | 39 ++--- .../src/kernel_prover/proving_data_oracle.ts | 4 +- .../kernel_prover/test/test_circuit_prover.ts | 48 +++--- .../pxe/src/pxe_service/pxe_service.ts | 2 +- 51 files changed, 617 insertions(+), 267 deletions(-) create mode 100644 noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index a026721c12f..ac060def61e 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -128,6 +128,7 @@ library Constants { uint256 internal constant NULLIFIER_LENGTH = 3; uint256 internal constant SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; uint256 internal constant SIDE_EFFECT_LENGTH = 2; + uint256 internal constant ROLLUP_VALIDATION_REQUESTS_LENGTH = MAX_BLOCK_NUMBER_LENGTH; uint256 internal constant STATE_REFERENCE_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; uint256 internal constant TX_CONTEXT_LENGTH = 2 + GAS_SETTINGS_LENGTH; @@ -157,6 +158,34 @@ library Constants { + 1; uint256 internal constant PRIVATE_CALL_STACK_ITEM_LENGTH = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; + uint256 internal constant SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; + uint256 internal constant PUBLIC_DATA_READ_LENGTH = 2; + uint256 internal constant VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + + (SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + + (SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX) + + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); + uint256 internal constant PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; + uint256 internal constant COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 4 + + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; + uint256 internal constant COMBINED_CONSTANT_DATA_LENGTH = + HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; + uint256 internal constant CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; + uint256 internal constant CALL_REQUEST_LENGTH = + 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; + uint256 internal constant PRIVATE_ACCUMULATED_DATA_LENGTH = ( + SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX + ) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); + uint256 internal constant PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + + VALIDATION_REQUESTS_LENGTH + PRIVATE_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + + CALL_REQUEST_LENGTH; uint256 internal constant ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH = 2 + FUNCTION_DATA_LENGTH + CALL_CONTEXT_LENGTH; uint256 internal constant GET_NOTES_ORACLE_RETURN_LENGTH = 674; diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index 1ec2152e7c3..d42ee2119d6 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -39,7 +39,7 @@ contract SchnorrAccount { // Note: If you globally change the entrypoint signature don't forget to update default_entrypoint.ts file #[aztec(private)] #[aztec(noinitcheck)] - fn entrypoint(app_payload: pub AppPayload, fee_payload: pub FeePayload) { + fn entrypoint(app_payload: AppPayload, fee_payload: FeePayload) { let actions = AccountActions::private( &mut context, storage.approved_actions.storage_slot, diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr index 635b9da54d9..76a82613767 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-init/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelInitCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_initial() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr index 861d229580c..686cce6b595 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelInnerCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_inner() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr index 5abc9c8f4f4..18115cc2ea6 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr @@ -1,7 +1,7 @@ use dep::reset_kernel_lib::verify_squashed_transient_note_hashes_and_nullifiers; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, + private_kernel_data::PrivateKernelData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, PrivateKernelCircuitPublicInputsBuilder, PublicKernelCircuitPublicInputs}, note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::{SideEffect, Ordered}, gas::Gas }, diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr index 64a08cdc7b1..bb4a7db587c 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr @@ -1,7 +1,7 @@ use crate::{common, private_kernel_circuit_public_inputs_composer::PrivateKernelCircuitPublicInputsComposer}; use dep::types::{ abis::{ - private_kernel::private_call_data::PrivateCallData, + private_kernel::private_call_data::{PrivateCallData, verify_private_call}, kernel_circuit_public_inputs::PrivateKernelCircuitPublicInputs }, constants::MAX_NEW_NOTE_HASHES_PER_CALL, mocked::verify_private_function_proof, @@ -68,14 +68,15 @@ impl PrivateKernelInitCircuitPrivateInputs { pub fn native_private_kernel_circuit_initial(self) -> PrivateKernelCircuitPublicInputs { let private_call_public_inputs = self.private_call.call_stack_item.public_inputs; + // verify/aggregate the private call proof + verify_private_call(self.private_call); + self.validate_inputs(); common::validate_private_call_data(self.private_call); self.validate_this_private_call_against_tx_request(); - assert(verify_private_function_proof(self.private_call.proof), "Invalid private function proof."); - PrivateKernelCircuitPublicInputsComposer::new_from_tx_request(self.tx_request, private_call_public_inputs).compose( private_call_public_inputs, self.hints.note_hash_nullifier_counters, diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr index 6a291bafbfa..61a06ea345b 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -1,12 +1,12 @@ use crate::{common, private_kernel_circuit_public_inputs_composer::PrivateKernelCircuitPublicInputsComposer}; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, private_kernel::private_call_data::PrivateCallData, + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, + private_kernel::private_call_data::{PrivateCallData, verify_private_call}, kernel_circuit_public_inputs::{PrivateKernelCircuitPublicInputs, PrivateKernelCircuitPublicInputsBuilder}, side_effect::SideEffect }, - constants::MAX_NEW_NOTE_HASHES_PER_CALL, mocked::verify_previous_kernel_state, - utils::arrays::array_length + constants::MAX_NEW_NOTE_HASHES_PER_CALL, utils::arrays::array_length }; struct PrivateKernelInnerHints { @@ -30,6 +30,12 @@ impl PrivateKernelInnerCircuitPrivateInputs { let private_call_public_inputs = self.private_call.call_stack_item.public_inputs; let previous_kernel_public_inputs = self.previous_kernel.public_inputs; + // verify/aggregate the private call proof + verify_private_call(self.private_call); + + // verify/aggregate the previous kernel + verify_previous_kernel_proof(self.previous_kernel); + common::validate_previous_kernel_values(previous_kernel_public_inputs.end); self.validate_inputs(); @@ -42,12 +48,6 @@ impl PrivateKernelInnerCircuitPrivateInputs { let call_request = private_call_stack[private_call_stack_size - 1]; common::validate_call_against_request(self.private_call, call_request); - let (is_previous_state_valid, _updated_aggregation_object) = verify_previous_kernel_state( - previous_kernel_public_inputs.aggregation_object, - self.private_call.proof - ); - assert(is_previous_state_valid); - PrivateKernelCircuitPublicInputsComposer::new_from_previous_kernel(self.previous_kernel.public_inputs).compose( private_call_public_inputs, self.hints.note_hash_nullifier_counters, diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index 2471caad0be..51d6efd7a07 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -1,14 +1,14 @@ use crate::kernel_circuit_public_inputs_composer::KernelCircuitPublicInputsComposer; use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, PrivateValidationRequestProcessor}; use dep::types::{ - abis::{ - kernel_data::PrivateKernelData, kernel_circuit_public_inputs::KernelCircuitPublicInputs, - note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect -}, - constants::{ - MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, - MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX -}, + abis::{ + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, kernel_circuit_public_inputs::KernelCircuitPublicInputs, + note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect + }, + constants::{ + MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX + }, grumpkin_private_key::GrumpkinPrivateKey, utils::arrays::array_length }; @@ -47,6 +47,9 @@ impl PrivateKernelTailCircuitPrivateInputs { array_length(previous_public_inputs.end.public_call_stack), 0, "Public call stack must be empty when executing the tail circuit" ); + // verify/aggregate the previous kernel + verify_previous_kernel_proof(self.previous_kernel); + let note_hash_tree_root = previous_public_inputs.constants.historical_header.state.partial.note_hash_tree.root; let nullifier_tree_root = previous_public_inputs.constants.historical_header.state.partial.nullifier_tree.root; PrivateValidationRequestProcessor { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index a16c3ea41ba..7b7e17eba88 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -2,7 +2,7 @@ use crate::kernel_circuit_public_inputs_composer::KernelCircuitPublicInputsCompo use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, PrivateValidationRequestProcessor}; use dep::types::{ abis::{ - kernel_data::PrivateKernelData, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect }, constants::{ @@ -47,6 +47,9 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { array_length(previous_public_inputs.end.public_call_stack) != 0, "Public call stack must not be empty when exporting public kernel data from the tail circuit" ); + // verify/aggregate the previous kernel + verify_previous_kernel_proof(self.previous_kernel); + let note_hash_tree_root = previous_public_inputs.constants.historical_header.state.partial.note_hash_tree.root; let nullifier_tree_root = previous_public_inputs.constants.historical_header.state.partial.nullifier_tree.root; PrivateValidationRequestProcessor { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr index 85050fa143b..fd35f8397f3 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelTailToPublicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr index 681eaacb72d..41485a79a2b 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail/src/main.nr @@ -1,6 +1,7 @@ use dep::private_kernel_lib::PrivateKernelTailCircuitPrivateInputs; use dep::types::KernelCircuitPublicInputs; -fn main(input: PrivateKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { +#[recursive] +fn main(input: PrivateKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { input.native_private_kernel_circuit_tail() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr index f8bc620c100..e92e4c791a4 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_tail.nr @@ -92,7 +92,6 @@ impl PublicKernelTailCircuitPrivateInputs { let end = self.propagate_accumulated_data(); KernelCircuitPublicInputs { - aggregation_object: previous_public_inputs.aggregation_object, rollup_validation_requests: previous_public_inputs.validation_requests.for_rollup, end, constants: previous_public_inputs.constants, diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr index 238cc1dbd13..704b1ac6e65 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -247,7 +247,7 @@ impl BaseRollupInputs { // TODO(Kev): This aggregate_proof method is duplicated in a lot of places fn aggregate_proofs(self) -> AggregationObject { // TODO: for now we simply return the aggregation object from the first proof - self.kernel_data.public_inputs.aggregation_object + AggregationObject {} } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr index 2c2a9325bcb..9b95b164f5a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis.nr @@ -27,6 +27,7 @@ mod max_block_number; mod private_kernel; mod kernel_circuit_public_inputs; mod kernel_data; +mod private_kernel_data; mod call_request; mod private_call_stack_item; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr index 1a49b8de968..21e62582622 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/combined_accumulated_data.nr @@ -7,9 +7,9 @@ use crate::{ }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, COMBINED_ACCUMULATED_DATA_LENGTH }, - utils::arrays::array_merge, traits::Empty + utils::arrays::array_merge, traits::{Empty, Serialize} }; struct CombinedAccumulatedData { @@ -82,3 +82,27 @@ impl Empty for CombinedAccumulatedData { } } } + +impl Serialize for CombinedAccumulatedData { + fn serialize(self) -> [Field; COMBINED_ACCUMULATED_DATA_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.new_note_hashes); + fields.extend_from_array(self.new_nullifiers); + fields.extend_from_array(self.new_l2_to_l1_msgs); + fields.push(self.encrypted_logs_hash); + fields.push(self.unencrypted_logs_hash); + fields.push(self.encrypted_log_preimages_length); + fields.push(self.unencrypted_log_preimages_length); + + for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX { + fields.extend_from_array(self.public_data_update_requests[i].serialize()); + } + + fields.extend_from_array(self.gas_used.serialize()); + + assert_eq(fields.len(), COMBINED_ACCUMULATED_DATA_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr index 12c19d640b1..502acaab910 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data.nr @@ -3,12 +3,12 @@ use crate::{ call_request::CallRequest, gas::Gas, note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect }, - messaging::l2_to_l1_message::ScopedL2ToL1Message + traits::Serialize, messaging::l2_to_l1_message::ScopedL2ToL1Message }; use crate::constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, MAX_NEW_L2_TO_L1_MSGS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_TX + MAX_UNENCRYPTED_LOGS_PER_TX, PRIVATE_ACCUMULATED_DATA_LENGTH }; struct PrivateAccumulatedData { @@ -27,3 +27,44 @@ struct PrivateAccumulatedData { private_call_stack: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX], public_call_stack: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX], } + +impl Serialize for PrivateAccumulatedData { + fn serialize(self) -> [Field; PRIVATE_ACCUMULATED_DATA_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { + fields.extend_from_array(self.new_note_hashes[i].serialize()); + } + + for i in 0..MAX_NEW_NULLIFIERS_PER_TX { + fields.extend_from_array(self.new_nullifiers[i].serialize()); + } + + for i in 0..MAX_NEW_L2_TO_L1_MSGS_PER_TX { + fields.extend_from_array(self.new_l2_to_l1_msgs[i].serialize()); + } + + for i in 0..MAX_ENCRYPTED_LOGS_PER_TX { + fields.extend_from_array(self.encrypted_logs_hashes[i].serialize()); + } + + for i in 0..MAX_UNENCRYPTED_LOGS_PER_TX { + fields.extend_from_array(self.unencrypted_logs_hashes[i].serialize()); + } + + fields.push(self.encrypted_log_preimages_length); + fields.push(self.unencrypted_log_preimages_length); + + for i in 0..MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX { + fields.extend_from_array(self.private_call_stack[i].serialize()); + } + + for i in 0..MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX { + fields.extend_from_array(self.public_call_stack[i].serialize()); + } + + assert_eq(fields.len(), PRIVATE_ACCUMULATED_DATA_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr index 8eca6227d7c..140b1967ca7 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/call_request.nr @@ -1,7 +1,8 @@ use crate::address::AztecAddress; use dep::std::cmp::Eq; -use crate::traits::Empty; +use crate::traits::{Empty, Serialize}; use crate::abis::caller_context::CallerContext; +use crate::constants::CALL_REQUEST_LENGTH; struct CallRequest { hash: Field, @@ -38,3 +39,19 @@ impl CallRequest { self.hash == 0 } } + +impl Serialize for CallRequest { + fn serialize(self) -> [Field; CALL_REQUEST_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.push(self.hash); + fields.extend_from_array(self.caller_contract_address.serialize()); + fields.extend_from_array(self.caller_context.serialize()); + fields.push(self.start_side_effect_counter as Field); + fields.push(self.end_side_effect_counter as Field); + + assert_eq(fields.len(), CALL_REQUEST_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr index 829429e4e9e..70c929fc04a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/caller_context.nr @@ -1,6 +1,7 @@ use crate::address::AztecAddress; use dep::std::cmp::Eq; -use crate::traits::Empty; +use crate::traits::{Empty, Serialize}; +use crate::constants::CALLER_CONTEXT_LENGTH; struct CallerContext { msg_sender: AztecAddress, @@ -28,3 +29,16 @@ impl CallerContext { self.msg_sender.is_zero() & self.storage_contract_address.is_zero() } } + +impl Serialize for CallerContext { + fn serialize(self) -> [Field; CALLER_CONTEXT_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.msg_sender.serialize()); + fields.extend_from_array(self.storage_contract_address.serialize()); + + assert_eq(fields.len(), CALLER_CONTEXT_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr index 0d823df58d2..2fcd910a23b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr @@ -1,7 +1,8 @@ use crate::transaction::tx_context::TxContext; use crate::header::Header; -use crate::traits::Empty; +use crate::traits::{Empty, Serialize}; use crate::abis::global_variables::GlobalVariables; +use crate::constants::COMBINED_CONSTANT_DATA_LENGTH; struct CombinedConstantData { historical_header: Header, @@ -29,3 +30,17 @@ impl Empty for CombinedConstantData { } } } + +impl Serialize for CombinedConstantData { + fn serialize(self) -> [Field; COMBINED_CONSTANT_DATA_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.historical_header.serialize()); + fields.extend_from_array(self.tx_context.serialize()); + fields.extend_from_array(self.global_variables.serialize()); + + assert_eq(fields.len(), COMBINED_CONSTANT_DATA_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr index 5584918af23..5256f275a3e 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/kernel_circuit_public_inputs.nr @@ -8,7 +8,6 @@ use crate::{ use crate::mocked::AggregationObject; struct KernelCircuitPublicInputs { - aggregation_object: AggregationObject, rollup_validation_requests: RollupValidationRequests, end: CombinedAccumulatedData, constants: CombinedConstantData, @@ -28,7 +27,6 @@ impl KernelCircuitPublicInputs { impl Empty for KernelCircuitPublicInputs { fn empty() -> Self { KernelCircuitPublicInputs { - aggregation_object: AggregationObject::empty(), rollup_validation_requests: RollupValidationRequests::empty(), end: CombinedAccumulatedData::empty(), constants: CombinedConstantData::empty(), @@ -56,7 +54,6 @@ mod tests { #[test] unconstrained fn non_empty_gas_and_fee() { let mut inputs = KernelCircuitPublicInputs { - aggregation_object: AggregationObject::empty(), rollup_validation_requests: RollupValidationRequests::empty(), end: CombinedAccumulatedData::empty(), constants: CombinedConstantData::empty(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr index 6715590d341..e7aee9e9d52 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs.nr @@ -2,13 +2,29 @@ use crate::abis::{ accumulated_data::PrivateAccumulatedData, combined_constant_data::CombinedConstantData, validation_requests::ValidationRequests, call_request::CallRequest }; -use crate::mocked::AggregationObject; +use crate::constants::PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH; +use crate::traits::Serialize; struct PrivateKernelCircuitPublicInputs { - aggregation_object: AggregationObject, min_revertible_side_effect_counter: u32, validation_requests: ValidationRequests, end: PrivateAccumulatedData, constants: CombinedConstantData, public_teardown_call_request: CallRequest, } + +impl Serialize for PrivateKernelCircuitPublicInputs { + fn serialize(self) -> [Field; PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.push(self.min_revertible_side_effect_counter as Field); + fields.extend_from_array(self.validation_requests.serialize()); + fields.extend_from_array(self.end.serialize()); + fields.extend_from_array(self.constants.serialize()); + fields.extend_from_array(self.public_teardown_call_request.serialize()); + + assert_eq(fields.len(), PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH); + + fields.storage + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr index a6424f53c89..aa137a82225 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr @@ -17,7 +17,6 @@ use crate::{ // .finish_tail: KernelCircuitPublicInputs (from KernelCircuitPublicInputsComposer) // .finish_to_public: PublicKernelCircuitPublicInputs (from KernelCircuitPublicInputsComposer) struct PrivateKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject, min_revertible_side_effect_counter: u32, validation_requests: ValidationRequestsBuilder, end: PrivateAccumulatedDataBuilder, @@ -28,7 +27,6 @@ struct PrivateKernelCircuitPublicInputsBuilder { impl PrivateKernelCircuitPublicInputsBuilder { pub fn finish(self) -> PrivateKernelCircuitPublicInputs { PrivateKernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, min_revertible_side_effect_counter: self.min_revertible_side_effect_counter, validation_requests: self.validation_requests.finish(), end: self.end.finish(), @@ -39,7 +37,6 @@ impl PrivateKernelCircuitPublicInputsBuilder { pub fn finish_tail(self, teardown_gas: Gas) -> KernelCircuitPublicInputs { KernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, rollup_validation_requests: self.validation_requests.to_rollup(), end: self.end.to_combined(teardown_gas), constants: self.constants, @@ -56,7 +53,6 @@ impl PrivateKernelCircuitPublicInputsBuilder { let (end_non_revertible, end) = self.end.split_to_public(min_revertible_side_effect_counter, teardown_gas); PublicKernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, validation_requests: self.validation_requests.finish(), end_non_revertible, end, @@ -70,7 +66,6 @@ impl PrivateKernelCircuitPublicInputsBuilder { impl Empty for PrivateKernelCircuitPublicInputsBuilder { fn empty() -> Self { PrivateKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject::empty(), min_revertible_side_effect_counter: 0 as u32, validation_requests: ValidationRequestsBuilder::empty(), end: PrivateAccumulatedDataBuilder::empty(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr index 4687e3de77c..8e8e4d62045 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr @@ -2,10 +2,8 @@ use crate::abis::{ accumulated_data::PublicAccumulatedData, combined_constant_data::CombinedConstantData, validation_requests::{RollupValidationRequests, ValidationRequests}, call_request::CallRequest }; -use crate::mocked::AggregationObject; struct PublicKernelCircuitPublicInputs { - aggregation_object: AggregationObject, validation_requests: ValidationRequests, end_non_revertible: PublicAccumulatedData, end: PublicAccumulatedData, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr index 824f595430e..41f92bd5f22 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr @@ -4,12 +4,10 @@ use crate::{ combined_constant_data::CombinedConstantData, kernel_circuit_public_inputs::{public_kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs}, validation_requests::ValidationRequestsBuilder, call_request::CallRequest -}, - mocked::AggregationObject, traits::Empty +}, traits::Empty }; struct PublicKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject, validation_requests: ValidationRequestsBuilder, end_non_revertible: PublicAccumulatedDataBuilder, end: PublicAccumulatedDataBuilder, @@ -21,7 +19,6 @@ struct PublicKernelCircuitPublicInputsBuilder { impl PublicKernelCircuitPublicInputsBuilder { pub fn finish(self) -> PublicKernelCircuitPublicInputs { PublicKernelCircuitPublicInputs { - aggregation_object: self.aggregation_object, // Note that we're including both the validation_requests AND the rollup_validation requests, because this // struct is used as an input for both the public kernel and base rollup circuits. In the near future the // base rollup will only receive rollup_validation_requests, and the public kernel only validation_requests. @@ -38,7 +35,6 @@ impl PublicKernelCircuitPublicInputsBuilder { impl Empty for PublicKernelCircuitPublicInputsBuilder { fn empty() -> Self { PublicKernelCircuitPublicInputsBuilder { - aggregation_object: AggregationObject::empty(), validation_requests: ValidationRequestsBuilder::empty(), end_non_revertible: PublicAccumulatedDataBuilder::empty(), end: PublicAccumulatedDataBuilder::empty(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr index f0dd35e98b1..6bdbbb4d4a5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_data.nr @@ -1,28 +1,6 @@ use crate::mocked::{Proof, VerificationKey}; use crate::constants::VK_TREE_HEIGHT; -use crate::abis::kernel_circuit_public_inputs::{PrivateKernelCircuitPublicInputs, PublicKernelCircuitPublicInputs, KernelCircuitPublicInputs}; - -struct PrivateKernelData { - // TODO(David): Left a note asking if we need this due to it - // already being in the proof. - public_inputs: PrivateKernelCircuitPublicInputs, - - // TODO(David): Mentions the dichotomy between a proof created for the - // circuit, which is a sequence of field elements, versus a proof - // created for solidity/native verification which is a collection of bytes. - // Kev: I've been questioning if we _need_ the sequence of field elements. - // It makes verification cheaper, though I have not tested how much cheaper. - // Removing it would also reduce complexity on the Noir side, as we have - // special methods to convert "inner proofs" into sequence of field elements. - proof: Proof, - - vk: VerificationKey, - - // TODO(Mike): left a note saying : this index and path are meant to be those of a leaf within the tree of _kernel circuit_ vks; not the tree - // of functions within the contract tree. - vk_index: u32, - vk_path: [Field; VK_TREE_HEIGHT], -} +use crate::abis::kernel_circuit_public_inputs::{PublicKernelCircuitPublicInputs, KernelCircuitPublicInputs}; struct PublicKernelData { public_inputs: PublicKernelCircuitPublicInputs, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr index 760189375dd..7bca0c1d616 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr @@ -1,7 +1,10 @@ -use crate::abis::{call_request::CallRequest, private_call_stack_item::PrivateCallStackItem}; use crate::address::{SaltedInitializationHash, PublicKeysHash, EthAddress}; use crate::contract_class_id::ContractClassId; -use crate::mocked::{Proof, VerificationKey}; +use crate::recursion::{verification_key::VerificationKey, proof::RecursiveProof}; +use crate::abis::{ + call_request::CallRequest, private_call_stack_item::PrivateCallStackItem, + private_circuit_public_inputs::PrivateCircuitPublicInputs +}; use crate::constants::{MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, FUNCTION_TREE_HEIGHT}; use crate::merkle_tree::membership::MembershipWitness; @@ -11,7 +14,7 @@ struct PrivateCallData { private_call_stack: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], public_call_stack: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], - proof: Proof, + proof: RecursiveProof, vk: VerificationKey, salted_initialization_hash: SaltedInitializationHash, @@ -22,3 +25,13 @@ struct PrivateCallData { acir_hash: Field, } + +fn verify_private_call(call: PrivateCallData) { + let inputs = PrivateCircuitPublicInputs::serialize(call.call_stack_item.public_inputs); + dep::std::verify_proof( + call.vk.key.as_slice(), + call.proof.fields.as_slice(), + inputs.as_slice(), + call.vk.hash + ); +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr new file mode 100644 index 00000000000..9e9564ebf25 --- /dev/null +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel_data.nr @@ -0,0 +1,34 @@ +use crate::recursion::{verification_key::VerificationKey, proof::NestedRecursiveProof}; +use crate::constants::VK_TREE_HEIGHT; +use crate::abis::kernel_circuit_public_inputs::PrivateKernelCircuitPublicInputs; + +struct PrivateKernelData { + // TODO(David): Left a note asking if we need this due to it + // already being in the proof. + public_inputs: PrivateKernelCircuitPublicInputs, + + // TODO(David): Mentions the dichotomy between a proof created for the + // circuit, which is a sequence of field elements, versus a proof + // created for solidity/native verification which is a collection of bytes. + // Kev: I've been questioning if we _need_ the sequence of field elements. + // It makes verification cheaper, though I have not tested how much cheaper. + // Removing it would also reduce complexity on the Noir side, as we have + // special methods to convert "inner proofs" into sequence of field elements. + proof: NestedRecursiveProof, + vk: VerificationKey, + + // TODO(Mike): left a note saying : this index and path are meant to be those of a leaf within the tree of _kernel circuit_ vks; not the tree + // of functions within the contract tree. + vk_index: u32, + vk_path: [Field; VK_TREE_HEIGHT], +} + +fn verify_previous_kernel_proof(previous_kernel: PrivateKernelData) { + let inputs = PrivateKernelCircuitPublicInputs::serialize(previous_kernel.public_inputs); + dep::std::verify_proof( + previous_kernel.vk.key.as_slice(), + previous_kernel.proof.fields.as_slice(), + inputs.as_slice(), + previous_kernel.vk.hash + ); +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr index f790fe142c8..a434580019b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_read.nr @@ -1,6 +1,6 @@ -use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; +use crate::constants::{GENERATOR_INDEX__PUBLIC_DATA_READ, PUBLIC_DATA_READ_LENGTH}; use dep::std::cmp::Eq; -use crate::traits::{Empty, Hash}; +use crate::traits::{Empty, Hash, Serialize}; struct PublicDataRead { leaf_slot : Field, @@ -36,3 +36,9 @@ impl PublicDataRead { (self.leaf_slot == 0) & (self.value == 0) } } + +impl Serialize for PublicDataRead { + fn serialize(self) -> [Field; PUBLIC_DATA_READ_LENGTH] { + [self.leaf_slot, self.value] + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr index 8177f389f18..ab887214acf 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/public_data_update_request.nr @@ -1,6 +1,6 @@ -use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; +use crate::constants::{PUBLIC_DATA_UPDATE_REQUEST_LENGTH, GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST}; use dep::std::cmp::Eq; -use crate::traits::{Empty, Hash}; +use crate::traits::{Empty, Hash, Serialize}; struct PublicDataUpdateRequest { leaf_slot : Field, @@ -37,3 +37,9 @@ impl PublicDataUpdateRequest { (self.leaf_slot == 0) & (self.new_value == 0) } } + +impl Serialize for PublicDataUpdateRequest { + fn serialize(self) -> [Field; PUBLIC_DATA_UPDATE_REQUEST_LENGTH] { + [self.leaf_slot, self.new_value] + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr index 9cac3dc5c88..6d3663354b0 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/read_request.nr @@ -1,11 +1,10 @@ use crate::{ - traits::{Empty, Serialize, Deserialize}, address::AztecAddress, constants::READ_REQUEST_LENGTH, + traits::{Empty, Serialize, Deserialize}, address::AztecAddress, + constants::{READ_REQUEST_LENGTH, SCOPED_READ_REQUEST_LEN}, utils::{arrays::array_concat, reader::Reader} }; use dep::std::cmp::Eq; -global SCOPED_READ_REQUEST_SERIALIZED_LEN = READ_REQUEST_LENGTH + 1; - struct ReadRequest { value: Field, counter: u32, @@ -69,14 +68,14 @@ impl Empty for ScopedReadRequest { } } -impl Serialize for ScopedReadRequest { - fn serialize(self) -> [Field; SCOPED_READ_REQUEST_SERIALIZED_LEN] { +impl Serialize for ScopedReadRequest { + fn serialize(self) -> [Field; SCOPED_READ_REQUEST_LEN] { array_concat(self.read_request.serialize(), [self.contract_address.to_field()]) } } -impl Deserialize for ScopedReadRequest { - fn deserialize(values: [Field; SCOPED_READ_REQUEST_SERIALIZED_LEN]) -> Self { +impl Deserialize for ScopedReadRequest { + fn deserialize(values: [Field; SCOPED_READ_REQUEST_LEN]) -> Self { let mut reader = Reader::new(values); let res = Self { read_request: reader.read_struct(ReadRequest::deserialize), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr index 258167f0bbc..1840668e1b3 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr @@ -1,4 +1,4 @@ -use crate::{abis::max_block_number::MaxBlockNumber, traits::Empty}; +use crate::{abis::max_block_number::MaxBlockNumber, traits::{Empty, Serialize}, constants::ROLLUP_VALIDATION_REQUESTS_LENGTH}; // These are validation requests that cannot be fulfilled in the current context (private or public), and must be // instead forwarded to the rollup for it to take care of them. @@ -14,3 +14,9 @@ impl Empty for RollupValidationRequests { } } +impl Serialize for RollupValidationRequests { + fn serialize(self) -> [Field; ROLLUP_VALIDATION_REQUESTS_LENGTH] { + MaxBlockNumber::serialize(self.max_block_number) + } +} + diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr index 850afddbade..8d56adb7ea6 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr @@ -8,8 +8,8 @@ use crate::{ constants::{ MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - MAX_PUBLIC_DATA_READS_PER_TX -} + MAX_PUBLIC_DATA_READS_PER_TX, VALIDATION_REQUESTS_LENGTH +}, traits::Serialize }; // TODO - Use specific structs for private and public: PrivateValidationRequests vs PublicValidationRequests @@ -21,3 +21,35 @@ struct ValidationRequests { nullifier_key_validation_requests: [ScopedNullifierKeyValidationRequest; MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], public_data_reads: [PublicDataRead; MAX_PUBLIC_DATA_READS_PER_TX], } + +impl Serialize for ValidationRequests { + fn serialize(self) -> [Field; VALIDATION_REQUESTS_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); + + fields.extend_from_array(self.for_rollup.serialize()); + + for i in 0..MAX_NOTE_HASH_READ_REQUESTS_PER_TX { + fields.extend_from_array(self.note_hash_read_requests[i].serialize()); + } + + for i in 0..MAX_NULLIFIER_READ_REQUESTS_PER_TX { + fields.extend_from_array(self.nullifier_read_requests[i].serialize()); + } + + for i in 0..MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX { + fields.extend_from_array(self.nullifier_non_existent_read_requests[i].serialize()); + } + + for i in 0..MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX { + fields.extend_from_array(self.nullifier_key_validation_requests[i].serialize()); + } + + for i in 0..MAX_PUBLIC_DATA_READS_PER_TX { + fields.extend_from_array(self.public_data_reads[i].serialize()); + } + + assert_eq(fields.len(), VALIDATION_REQUESTS_LENGTH); + + fields.storage + } +} \ No newline at end of file diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 5e1631cc24d..84f47f12d87 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -167,6 +167,7 @@ global SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; global NULLIFIER_LENGTH = 3; global SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; global SIDE_EFFECT_LENGTH = 2; +global ROLLUP_VALIDATION_REQUESTS_LENGTH = MAX_BLOCK_NUMBER_LENGTH; global STATE_REFERENCE_LENGTH: u64 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; global TX_CONTEXT_LENGTH: u64 = 2 + GAS_SETTINGS_LENGTH; global TX_REQUEST_LENGTH: u64 = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; @@ -175,6 +176,19 @@ global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 3 + MAX global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u64 = CALL_CONTEXT_LENGTH + 2 + (READ_REQUEST_LENGTH * MAX_NULLIFIER_READ_REQUESTS_PER_CALL) + (READ_REQUEST_LENGTH * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL) + (CONTRACT_STORAGE_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_CALL) + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL + (NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_CALL) + (NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_CALL) + (L2_TO_L1_MESSAGE_LENGTH * MAX_NEW_L2_TO_L1_MSGS_PER_CALL) + 2 + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) + 1 + HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; global PRIVATE_CALL_STACK_ITEM_LENGTH: u64 = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; +global SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; +global PUBLIC_DATA_READ_LENGTH = 2; +global VALIDATION_REQUESTS_LENGTH = ROLLUP_VALIDATION_REQUESTS_LENGTH + (SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX) + (SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX) + (SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX) + (PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX); + +global PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; +global COMBINED_ACCUMULATED_DATA_LENGTH = MAX_NEW_NOTE_HASHES_PER_TX + MAX_NEW_NULLIFIERS_PER_TX + MAX_NEW_L2_TO_L1_MSGS_PER_TX + 4 + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH) + GAS_LENGTH; +global COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; + +global CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; +global CALL_REQUEST_LENGTH = 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; +global PRIVATE_ACCUMULATED_DATA_LENGTH = (SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX) + (SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX) + (MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH) + (SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX) + (SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX) + 2 + (CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX) + (CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX); +global PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = 1 + VALIDATION_REQUESTS_LENGTH + PRIVATE_ACCUMULATED_DATA_LENGTH + COMBINED_CONSTANT_DATA_LENGTH + CALL_REQUEST_LENGTH; + global ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH: u64 = 2 + FUNCTION_DATA_LENGTH + CALL_CONTEXT_LENGTH; global GET_NOTES_ORACLE_RETURN_LENGTH: u64 = 674; global NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP: Field = 2048; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index efb7f6b38c3..b6efc83586f 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -1,5 +1,5 @@ use crate::address::{AztecAddress, EthAddress}; -use crate::mocked::VerificationKey; +use crate::recursion::verification_key::VerificationKey; use crate::abis::function_selector::FunctionSelector; use crate::abis::contract_class_function_leaf_preimage::ContractClassFunctionLeafPreimage; use crate::contract_class_id::ContractClassId; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index 077007c2b5a..b0f043c80ab 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -5,8 +5,9 @@ use crate::{ accumulated_data::{CombinedAccumulatedData, PrivateAccumulatedData, PrivateAccumulatedDataBuilder, PublicAccumulatedData}, global_variables::GlobalVariables, combined_constant_data::CombinedConstantData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PublicKernelCircuitPublicInputs}, - kernel_data::{PrivateKernelData, PublicKernelData, KernelData}, max_block_number::MaxBlockNumber, - note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + kernel_data::{PublicKernelData, KernelData}, max_block_number::MaxBlockNumber, + private_kernel_data::PrivateKernelData, note_hash::{NoteHash, ScopedNoteHash}, + nullifier::{Nullifier, ScopedNullifier}, nullifier_key_validation_request::ScopedNullifierKeyValidationRequest, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest, read_request::{ReadRequest, ScopedReadRequest}, side_effect::SideEffect, @@ -23,8 +24,8 @@ use crate::{ }, hash::silo_nullifier, header::Header, messaging::l2_to_l1_message::{L2ToL1Message, ScopedL2ToL1Message}, - mocked::{AggregationObject, Proof, VerificationKey}, partial_state_reference::PartialStateReference, - tests::fixtures, transaction::tx_context::TxContext, traits::Empty + partial_state_reference::PartialStateReference, tests::fixtures, transaction::tx_context::TxContext, + traits::Empty, recursion::{verification_key::VerificationKey, proof::NestedRecursiveProof} }; struct FixtureBuilder { @@ -62,7 +63,7 @@ struct FixtureBuilder { public_data_reads: BoundedVec, // Proof. - proof: Proof, + proof: NestedRecursiveProof, vk: VerificationKey, vk_index: u32, vk_path: [Field; VK_TREE_HEIGHT], @@ -103,8 +104,8 @@ impl FixtureBuilder { nullifier_non_existent_read_requests: BoundedVec::new(), nullifier_key_validation_requests: BoundedVec::new(), public_data_reads: BoundedVec::new(), - proof: Proof {}, - vk: VerificationKey {}, + proof: NestedRecursiveProof::empty(), + vk: VerificationKey::empty(), vk_index: 0, vk_path: [0; VK_TREE_HEIGHT], revert_code: 0, @@ -188,7 +189,6 @@ impl FixtureBuilder { let constants = self.to_constant_data(); PrivateKernelCircuitPublicInputs { - aggregation_object: AggregationObject {}, min_revertible_side_effect_counter: self.min_revertible_side_effect_counter, end, validation_requests, @@ -218,7 +218,6 @@ impl FixtureBuilder { let constants = self.to_constant_data(); PublicKernelCircuitPublicInputs { - aggregation_object: AggregationObject {}, end_non_revertible, end, validation_requests, @@ -230,7 +229,13 @@ impl FixtureBuilder { pub fn to_public_kernel_data(self, revertible: bool) -> PublicKernelData { let public_inputs = self.to_public_kernel_circuit_public_inputs(revertible); - PublicKernelData { public_inputs, proof: self.proof, vk: self.vk, vk_index: self.vk_index, vk_path: self.vk_path } + PublicKernelData { + public_inputs, + proof: crate::mocked::Proof::empty(), + vk: crate::mocked::VerificationKey::empty(), + vk_index: self.vk_index, + vk_path: self.vk_path + } } pub fn to_kernel_circuit_public_inputs(self) -> KernelCircuitPublicInputs { @@ -238,19 +243,18 @@ impl FixtureBuilder { let end = self.to_combined_accumulated_data(); let constants = self.to_constant_data(); - KernelCircuitPublicInputs { - aggregation_object: AggregationObject {}, - rollup_validation_requests, - end, - constants, - start_state: self.start_state, - revert_code: self.revert_code - } + KernelCircuitPublicInputs { rollup_validation_requests, end, constants, start_state: self.start_state, revert_code: self.revert_code } } pub fn to_kernel_data(self) -> KernelData { let public_inputs = self.to_kernel_circuit_public_inputs(); - KernelData { public_inputs, proof: self.proof, vk: self.vk, vk_index: self.vk_index, vk_path: self.vk_path } + KernelData { + public_inputs, + proof: crate::mocked::Proof::empty(), + vk: crate::mocked::VerificationKey::empty(), + vk_index: self.vk_index, + vk_path: self.vk_path + } } pub fn add_new_note_hash(&mut self, value: Field) { @@ -483,7 +487,7 @@ impl Empty for FixtureBuilder { nullifier_non_existent_read_requests: BoundedVec::new(), nullifier_key_validation_requests: BoundedVec::new(), public_data_reads: BoundedVec::new(), - proof: Proof::empty(), + proof: NestedRecursiveProof::empty(), vk: VerificationKey::empty(), vk_index: 0, vk_path: [0; VK_TREE_HEIGHT], diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr index a4c6a52930e..44d060051a9 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr @@ -7,7 +7,7 @@ use crate::{ }, merkle_tree::membership::MembershipWitness, address::{AztecAddress, EthAddress, SaltedInitializationHash, PublicKeysHash}, - mocked::{Proof, VerificationKey}, + recursion::{proof::RecursiveProof, verification_key::VerificationKey}, tests::{fixtures, private_circuit_public_inputs_builder::PrivateCircuitPublicInputsBuilder}, transaction::{tx_request::TxRequest, tx_context::TxContext} }; @@ -22,7 +22,7 @@ struct PrivateCallDataBuilder { // The rest of the values of PrivateCallData. private_call_stack: BoundedVec, public_call_stack: BoundedVec, - proof: Proof, + proof: RecursiveProof, vk: VerificationKey, salted_initialization_hash: SaltedInitializationHash, public_keys_hash: PublicKeysHash, @@ -48,8 +48,8 @@ impl PrivateCallDataBuilder { function_data, private_call_stack: BoundedVec::new(), public_call_stack: BoundedVec::new(), - proof: Proof {}, - vk: VerificationKey {}, + proof: RecursiveProof::empty(), + vk: VerificationKey::empty(), function_leaf_membership_witness: contract_function.membership_witness, salted_initialization_hash: contract_data.salted_initialization_hash, public_keys_hash: contract_data.public_keys_hash, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index d982d864d06..d587abc9463 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -233,9 +233,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::RANGE => unreachable!( "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" ), - BlackBoxFunc::RecursiveAggregation => unimplemented!( - "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" - ), + BlackBoxFunc::RecursiveAggregation => {} BlackBoxFunc::BigIntAdd => { if let ( [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 8e143fd70ca..e15a83aa388 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -110,6 +110,7 @@ export const SCOPED_NOTE_HASH_LENGTH = NOTE_HASH_LENGTH + 2; export const NULLIFIER_LENGTH = 3; export const SCOPED_NULLIFIER_LENGTH = NULLIFIER_LENGTH + 1; export const SIDE_EFFECT_LENGTH = 2; +export const ROLLUP_VALIDATION_REQUESTS_LENGTH = MAX_BLOCK_NUMBER_LENGTH; export const STATE_REFERENCE_LENGTH = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + PARTIAL_STATE_REFERENCE_LENGTH; export const TX_CONTEXT_LENGTH = 2 + GAS_SETTINGS_LENGTH; export const TX_REQUEST_LENGTH = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; @@ -156,6 +157,41 @@ export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = /* transaction_fee */ 1; export const PRIVATE_CALL_STACK_ITEM_LENGTH = AZTEC_ADDRESS_LENGTH + FUNCTION_DATA_LENGTH + PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH; +export const SCOPED_READ_REQUEST_LEN = READ_REQUEST_LENGTH + 1; +export const PUBLIC_DATA_READ_LENGTH = 2; +export const VALIDATION_REQUESTS_LENGTH = + ROLLUP_VALIDATION_REQUESTS_LENGTH + + SCOPED_READ_REQUEST_LEN * MAX_NOTE_HASH_READ_REQUESTS_PER_TX + + SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_READ_REQUESTS_PER_TX + + SCOPED_READ_REQUEST_LEN * MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX + + SCOPED_NULLIFIER_KEY_VALIDATION_REQUEST_LENGTH * MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX + + PUBLIC_DATA_READ_LENGTH * MAX_PUBLIC_DATA_READS_PER_TX; +export const PUBLIC_DATA_UPDATE_REQUEST_LENGTH = 2; +export const COMBINED_ACCUMULATED_DATA_LENGTH = + MAX_NEW_NOTE_HASHES_PER_TX + + MAX_NEW_NULLIFIERS_PER_TX + + MAX_NEW_L2_TO_L1_MSGS_PER_TX + + 4 + + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_UPDATE_REQUEST_LENGTH + + GAS_LENGTH; +export const COMBINED_CONSTANT_DATA_LENGTH = HEADER_LENGTH + TX_CONTEXT_LENGTH + GLOBAL_VARIABLES_LENGTH; +export const CALLER_CONTEXT_LENGTH = 2 * AZTEC_ADDRESS_LENGTH; +export const CALL_REQUEST_LENGTH = 1 + AZTEC_ADDRESS_LENGTH + CALLER_CONTEXT_LENGTH + 2; +export const PRIVATE_ACCUMULATED_DATA_LENGTH = + SCOPED_NOTE_HASH_LENGTH * MAX_NEW_NOTE_HASHES_PER_TX + + SCOPED_NULLIFIER_LENGTH * MAX_NEW_NULLIFIERS_PER_TX + + MAX_NEW_L2_TO_L1_MSGS_PER_TX * SCOPED_L2_TO_L1_MESSAGE_LENGTH + + SIDE_EFFECT_LENGTH * MAX_ENCRYPTED_LOGS_PER_TX + + SIDE_EFFECT_LENGTH * MAX_UNENCRYPTED_LOGS_PER_TX + + 2 + + CALL_REQUEST_LENGTH * MAX_PRIVATE_CALL_STACK_LENGTH_PER_TX + + CALL_REQUEST_LENGTH * MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX; +export const PRIVATE_KERNEL_CIRCUIT_PUBLIC_INPUTS_LENGTH = + 1 + + VALIDATION_REQUESTS_LENGTH + + PRIVATE_ACCUMULATED_DATA_LENGTH + + COMBINED_CONSTANT_DATA_LENGTH + + CALL_REQUEST_LENGTH; export const ENQUEUE_PUBLIC_FUNCTION_CALL_RETURN_LENGTH = 2 + FUNCTION_DATA_LENGTH + CALL_CONTEXT_LENGTH; export const GET_NOTES_ORACLE_RETURN_LENGTH = 674; export const NOTE_HASHES_NUM_BYTES_PER_BASE_ROLLUP = 2048; diff --git a/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts index 689f2d1d297..1b492da9c87 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts @@ -6,12 +6,13 @@ import { FUNCTION_TREE_HEIGHT, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, + RECURSIVE_PROOF_LENGTH, } from '../../constants.gen.js'; import { CallRequest } from '../call_request.js'; import { MembershipWitness } from '../membership_witness.js'; import { PrivateCallStackItem } from '../private_call_stack_item.js'; -import { Proof } from '../proof.js'; -import { VerificationKey } from '../verification_key.js'; +import { RecursiveProof } from '../recursive_proof.js'; +import { VerificationKeyAsFields } from '../verification_key.js'; /** * Private call data. @@ -33,11 +34,11 @@ export class PrivateCallData { /** * The proof of the execution of this private call. */ - public proof: Proof, + public proof: RecursiveProof, /** * The verification key for the function being invoked. */ - public vk: VerificationKey, + public vk: VerificationKeyAsFields, /** * Artifact hash of the contract class for this private call. */ @@ -108,8 +109,8 @@ export class PrivateCallData { reader.readObject(PrivateCallStackItem), reader.readArray(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, CallRequest), reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, CallRequest), - reader.readObject(Proof), - reader.readObject(VerificationKey), + RecursiveProof.fromBuffer(reader, RECURSIVE_PROOF_LENGTH), + reader.readObject(VerificationKeyAsFields), reader.readObject(Fr), reader.readObject(Fr), reader.readObject(Fr), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts index d7310402ee4..6293bfad56e 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_data.ts @@ -2,10 +2,10 @@ import { makeTuple } from '@aztec/foundation/array'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; -import { VK_TREE_HEIGHT } from '../../constants.gen.js'; -import { Proof, makeEmptyProof } from '../proof.js'; +import { NESTED_RECURSIVE_PROOF_LENGTH, VK_TREE_HEIGHT } from '../../constants.gen.js'; +import { RecursiveProof, makeRecursiveProof } from '../recursive_proof.js'; import { type UInt32 } from '../shared.js'; -import { VerificationKey } from '../verification_key.js'; +import { VerificationKeyAsFields } from '../verification_key.js'; import { PrivateKernelCircuitPublicInputs } from './private_kernel_circuit_public_inputs.js'; /** @@ -20,11 +20,11 @@ export class PrivateKernelData { /** * Proof of the previous kernel. */ - public proof: Proof, + public proof: RecursiveProof, /** * Verification key of the previous kernel. */ - public vk: VerificationKey, + public vk: VerificationKeyAsFields, /** * Index of the previous kernel's vk in a tree of vks. */ @@ -47,8 +47,8 @@ export class PrivateKernelData { const reader = BufferReader.asReader(buffer); return new this( reader.readObject(PrivateKernelCircuitPublicInputs), - reader.readObject(Proof), - reader.readObject(VerificationKey), + RecursiveProof.fromBuffer(reader, NESTED_RECURSIVE_PROOF_LENGTH), + reader.readObject(VerificationKeyAsFields), reader.readNumber(), reader.readArray(VK_TREE_HEIGHT, Fr), ); @@ -57,8 +57,8 @@ export class PrivateKernelData { static empty(): PrivateKernelData { return new PrivateKernelData( PrivateKernelCircuitPublicInputs.empty(), - makeEmptyProof(), - VerificationKey.makeFake(), + makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + VerificationKeyAsFields.makeFake(), 0, makeTuple(VK_TREE_HEIGHT, Fr.zero), ); diff --git a/yarn-project/circuits.js/src/structs/verification_key.ts b/yarn-project/circuits.js/src/structs/verification_key.ts index b7f47e23162..e617e48f7a9 100644 --- a/yarn-project/circuits.js/src/structs/verification_key.ts +++ b/yarn-project/circuits.js/src/structs/verification_key.ts @@ -108,6 +108,14 @@ export class VerificationKeyAsFields { static makeFake(seed = 1): VerificationKeyAsFields { return new VerificationKeyAsFields(makeTuple(VERIFICATION_KEY_LENGTH_IN_FIELDS, Fr.random, seed), Fr.random()); } + + /** + * Builds an 'empty' verification key + * @returns An 'empty' verification key + */ + static makeEmpty(): VerificationKeyAsFields { + return new VerificationKeyAsFields(makeTuple(VERIFICATION_KEY_LENGTH_IN_FIELDS, Fr.zero), Fr.zero()); + } } export class VerificationKey { diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 6dc5712a2fa..90208fa660a 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -729,8 +729,8 @@ export function makePrivateCallData(seed = 1): PrivateCallData { callStackItem: makePrivateCallStackItem(seed), privateCallStack: makeTuple(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, makeCallRequest, seed + 0x10), publicCallStack: makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, makeCallRequest, seed + 0x20), - proof: new Proof(Buffer.alloc(16).fill(seed + 0x50)), - vk: makeVerificationKey(), + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH, seed + 0x50), + vk: makeVerificationKeyAsFields(), contractClassArtifactHash: fr(seed + 0x70), contractClassPublicBytecodeCommitment: fr(seed + 0x71), publicKeysHash: fr(seed + 0x72), diff --git a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts index ed244b2da64..2cda2f3aa0c 100644 --- a/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts +++ b/yarn-project/end-to-end/src/client_prover_integration/client_prover_integration.test.ts @@ -5,7 +5,7 @@ import { type BBNativeProofCreator } from '@aztec/pxe'; import { ClientProverTest } from './client_prover_test.js'; async function verifyProof(circuitType: ClientProtocolArtifact, tx: Tx, proofCreator: BBNativeProofCreator) { - await expect(proofCreator.verifyProof(circuitType, tx.proof)).resolves.not.toThrow(); + await expect(proofCreator.verifyProofForProtocolCircuit(circuitType, tx.proof)).resolves.not.toThrow(); } describe('client_prover_integration', () => { diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index ec844c1a3d6..659afa884a7 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -786,8 +786,8 @@ export function mapPrivateCallDataToNoir(privateCallData: PrivateCallData): Priv call_stack_item: mapPrivateCallStackItemToNoir(privateCallData.callStackItem), private_call_stack: mapTuple(privateCallData.privateCallStack, mapCallRequestToNoir), public_call_stack: mapTuple(privateCallData.publicCallStack, mapCallRequestToNoir), - proof: {}, - vk: {}, + proof: mapRecursiveProofToNoir(privateCallData.proof), + vk: mapVerificationKeyToNoir(privateCallData.vk), function_leaf_membership_witness: mapMembershipWitnessToNoir(privateCallData.functionLeafMembershipWitness), contract_class_artifact_hash: mapFieldToNoir(privateCallData.contractClassArtifactHash), contract_class_public_bytecode_commitment: mapFieldToNoir(privateCallData.contractClassPublicBytecodeCommitment), @@ -1235,7 +1235,6 @@ export function mapPublicKernelCircuitPublicInputsToNoir( inputs: PublicKernelCircuitPublicInputs, ): PublicKernelCircuitPublicInputsNoir { return { - aggregation_object: {}, constants: mapCombinedConstantDataToNoir(inputs.constants), validation_requests: mapValidationRequestsToNoir(inputs.validationRequests), end: mapPublicAccumulatedDataToNoir(inputs.end), @@ -1258,7 +1257,6 @@ export function mapKernelCircuitPublicInputsFromNoir(inputs: KernelCircuitPublic export function mapKernelCircuitPublicInputsToNoir(inputs: KernelCircuitPublicInputs): KernelCircuitPublicInputsNoir { return { - aggregation_object: {}, rollup_validation_requests: mapRollupValidationRequestsToNoir(inputs.rollupValidationRequests), constants: mapCombinedConstantDataToNoir(inputs.constants), end: mapCombinedAccumulatedDataToNoir(inputs.end), @@ -1316,7 +1314,6 @@ export function mapPrivateKernelCircuitPublicInputsToNoir( inputs: PrivateKernelCircuitPublicInputs, ): PrivateKernelCircuitPublicInputsNoir { return { - aggregation_object: {}, constants: mapCombinedConstantDataToNoir(inputs.constants), validation_requests: mapValidationRequestsToNoir(inputs.validationRequests), end: mapPrivateAccumulatedDataToNoir(inputs.end), @@ -1333,8 +1330,8 @@ export function mapPrivateKernelCircuitPublicInputsToNoir( export function mapPrivateKernelDataToNoir(privateKernelInnerData: PrivateKernelData): PrivateKernelDataNoir { return { public_inputs: mapPrivateKernelCircuitPublicInputsToNoir(privateKernelInnerData.publicInputs), - proof: {}, - vk: {}, + proof: mapRecursiveProofToNoir(privateKernelInnerData.proof), + vk: mapVerificationKeyToNoir(privateKernelInnerData.vk), vk_index: mapFieldToNoir(new Fr(privateKernelInnerData.vkIndex)), vk_path: mapTuple(privateKernelInnerData.vkPath, mapFieldToNoir), }; @@ -1733,17 +1730,11 @@ export function mapAppendOnlyTreeSnapshotToNoir(snapshot: AppendOnlyTreeSnapshot }; } -export function mapRootRollupRecursiveProofToNoir(proof: RecursiveProof) { - return { - fields: mapTuple(proof.proof, mapFieldToNoir), - }; -} - export function mapRootRollupParityInputToNoir( rootParityInput: RootParityInput, ): RootRollupParityInputNoir { return { - proof: mapRootRollupRecursiveProofToNoir(rootParityInput.proof), + proof: mapRecursiveProofToNoir(rootParityInput.proof), verification_key: mapVerificationKeyToNoir(rootParityInput.verificationKey), public_inputs: mapParityPublicInputsToNoir(rootParityInput.publicInputs), }; @@ -1771,7 +1762,7 @@ export function mapRootRollupInputsToNoir(rootRollupInputs: RootRollupInputs): R }; } -export function mapRecursiveProofToNoir(proof: RecursiveProof) { +export function mapRecursiveProofToNoir(proof: RecursiveProof) { return { fields: mapTuple(proof.proof, mapFieldToNoir), }; diff --git a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts index 15eed0996a9..cfb7c5d8870 100644 --- a/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts +++ b/yarn-project/pxe/src/kernel_prover/bb_prover/bb_native_proof_creator.ts @@ -1,5 +1,6 @@ import { Fr, + NESTED_RECURSIVE_PROOF_LENGTH, type PrivateCircuitPublicInputs, type PrivateKernelCircuitPublicInputs, type PrivateKernelInitCircuitPrivateInputs, @@ -7,7 +8,10 @@ import { type PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, Proof, + RECURSIVE_PROOF_LENGTH, + RecursiveProof, type VERIFICATION_KEY_LENGTH_IN_FIELDS, + VerificationKeyAsFields, } from '@aztec/circuits.js'; import { siloNoteHash } from '@aztec/circuits.js/hash'; import { randomBytes, sha256 } from '@aztec/foundation/crypto'; @@ -34,7 +38,7 @@ import { serializeWitness } from '@noir-lang/noirc_abi'; import * as proc from 'child_process'; import * as fs from 'fs/promises'; -import { type ProofCreator, type ProofOutput } from '../interface/proof_creator.js'; +import { type AppCircuitProofOutput, type KernelProofOutput, type ProofCreator } from '../interface/proof_creator.js'; /** * Temporary implementation of ProofCreator using the native bb binary. @@ -44,9 +48,9 @@ import { type ProofCreator, type ProofOutput } from '../interface/proof_creator. const VK_FILENAME = 'vk'; const VK_FIELDS_FILENAME = 'vk_fields.json'; const PROOF_FILENAME = 'proof'; -//const PROOF_FIELDS_FILENAME = 'proof_fields.json'; +const PROOF_FIELDS_FILENAME = 'proof_fields.json'; -//const AGGREGATION_OBJECT_SIZE = 16; +const AGGREGATION_OBJECT_SIZE = 16; const CIRCUIT_SIZE_INDEX = 3; const CIRCUIT_PUBLIC_INPUTS_INDEX = 4; const CIRCUIT_RECURSIVE_INDEX = 5; @@ -452,21 +456,21 @@ export class BBNativeProofCreator implements ProofCreator { public async createProofInit( inputs: PrivateKernelInitCircuitPrivateInputs, - ): Promise> { + ): Promise> { const witnessMap = convertPrivateKernelInitInputsToWitnessMap(inputs); return await this.createSafeProof(witnessMap, 'PrivateKernelInitArtifact'); } public async createProofInner( inputs: PrivateKernelInnerCircuitPrivateInputs, - ): Promise> { + ): Promise> { const witnessMap = convertPrivateKernelInnerInputsToWitnessMap(inputs); return await this.createSafeProof(witnessMap, 'PrivateKernelInnerArtifact'); } public async createProofTail( inputs: PrivateKernelTailCircuitPrivateInputs, - ): Promise> { + ): Promise> { if (!inputs.isForPublic()) { const witnessMap = convertPrivateKernelTailInputsToWitnessMap(inputs); return await this.createSafeProof(witnessMap, 'PrivateKernelTailArtifact'); @@ -475,14 +479,25 @@ export class BBNativeProofCreator implements ProofCreator { return await this.createSafeProof(witnessMap, 'PrivateKernelTailToPublicArtifact'); } - public async createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise { + public async createAppCircuitProof( + partialWitness: Map, + bytecode: Buffer, + ): Promise { const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(directory, { recursive: true }); this.log.debug(`Created directory: ${directory}`); try { this.log.debug(`Proving app circuit`); - const proof = await this.createProof(directory, partialWitness, bytecode, 'App'); - return new Proof(proof); + const proofOutput = await this.createProof(directory, partialWitness, bytecode, 'App'); + if (proofOutput.proof.proof.length != RECURSIVE_PROOF_LENGTH) { + throw new Error(`Incorrect proof length`); + } + const proof = proofOutput.proof as RecursiveProof; + const output: AppCircuitProofOutput = { + proof, + verificationKey: proofOutput.verificationKey, + }; + return output; } finally { await fs.rm(directory, { recursive: true, force: true }); this.log.debug(`Deleted directory: ${directory}`); @@ -494,27 +509,16 @@ export class BBNativeProofCreator implements ProofCreator { * @param circuitType - The type of circuit whose proof is to be verified * @param proof - The proof to be verified */ - public async verifyProof(circuitType: ClientProtocolArtifact, proof: Proof) { - // Create random directory to be used for temp files - const bbWorkingDirectory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; - await fs.mkdir(bbWorkingDirectory, { recursive: true }); - - const proofFileName = `${bbWorkingDirectory}/proof`; - const verificationKeyPath = `${bbWorkingDirectory}/vk`; + public async verifyProofForProtocolCircuit(circuitType: ClientProtocolArtifact, proof: Proof) { const verificationKey = await this.getVerificationKeyDataForCircuit(circuitType); this.log.debug(`Verifying with key: ${verificationKey.hash.toString()}`); - await fs.writeFile(proofFileName, proof.buffer); - await fs.writeFile(verificationKeyPath, verificationKey.keyAsBytes); - const logFunction = (message: string) => { this.log.debug(`${circuitType} BB out - ${message}`); }; - const result = await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); - - await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + const result = await this.verifyProofFromKey(verificationKey.keyAsBytes, proof, logFunction); if (result.status === BB_RESULT.FAILURE) { const errorMessage = `Failed to verify ${circuitType} proof!`; @@ -524,6 +528,28 @@ export class BBNativeProofCreator implements ProofCreator { this.log.info(`Successfully verified ${circuitType} proof in ${result.duration} ms`); } + private async verifyProofFromKey( + verificationKey: Buffer, + proof: Proof, + logFunction: (message: string) => void = () => {}, + ) { + // Create random directory to be used for temp files + const bbWorkingDirectory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; + await fs.mkdir(bbWorkingDirectory, { recursive: true }); + + const proofFileName = `${bbWorkingDirectory}/proof`; + const verificationKeyPath = `${bbWorkingDirectory}/vk`; + + await fs.writeFile(proofFileName, proof.buffer); + await fs.writeFile(verificationKeyPath, verificationKey); + + try { + return await verifyProof(this.bbBinaryPath, proofFileName, verificationKeyPath!, logFunction); + } finally { + await fs.rm(bbWorkingDirectory, { recursive: true, force: true }); + } + } + /** * Returns the verification key data for a circuit, will generate and cache it if not cached internally * @param circuitType - The type of circuit for which the verification key is required @@ -588,10 +614,13 @@ export class BBNativeProofCreator implements ProofCreator { this.log.debug(`Updated verification key for circuit: ${circuitType}`); this.verificationKeys.set(circuitType, promise); } - await promise; + return await promise; } - private async createSafeProof(inputs: WitnessMap, circuitType: ClientProtocolArtifact): Promise> { + private async createSafeProof( + inputs: WitnessMap, + circuitType: ClientProtocolArtifact, + ): Promise> { const directory = `${this.bbWorkingDirectory}/${randomBytes(8).toString('hex')}`; await fs.mkdir(directory, { recursive: true }); this.log.debug(`Created directory: ${directory}`); @@ -607,7 +636,7 @@ export class BBNativeProofCreator implements ProofCreator { inputs: WitnessMap, circuitType: ClientProtocolArtifact, directory: string, - ): Promise> { + ): Promise> { this.log.debug(`Generating witness for ${circuitType}`); const compiledCircuit: NoirCompiledCircuit = ClientCircuitArtifacts[circuitType]; @@ -617,18 +646,23 @@ export class BBNativeProofCreator implements ProofCreator { const publicInputs = KernelArtifactMapping[circuitType].convertOutputs(outputWitness) as T; - const proofBuffer = await this.createProof( + const proofOutput = await this.createProof( directory, outputWitness, Buffer.from(compiledCircuit.bytecode, 'base64'), circuitType, ); + if (proofOutput.proof.proof.length != NESTED_RECURSIVE_PROOF_LENGTH) { + throw new Error(`Incorrect proof length`); + } + const nestedProof = proofOutput.proof as RecursiveProof; - const proofOutput: ProofOutput = { + const kernelOutput: KernelProofOutput = { publicInputs, - proof: new Proof(proofBuffer), + proof: nestedProof, + verificationKey: proofOutput.verificationKey, }; - return proofOutput; + return kernelOutput; } private async createProof( @@ -636,7 +670,10 @@ export class BBNativeProofCreator implements ProofCreator { partialWitness: WitnessMap, bytecode: Buffer, circuitType: ClientProtocolArtifact | 'App', - ) { + ): Promise<{ + proof: RecursiveProof | RecursiveProof; + verificationKey: VerificationKeyAsFields; + }> { const compressedBincodedWitness = serializeWitness(partialWitness); const inputsWitnessFile = `${directory}/witness.gz`; @@ -659,11 +696,15 @@ export class BBNativeProofCreator implements ProofCreator { throw new Error(provingResult.reason); } - if (circuitType !== 'App') { - await this.updateVerificationKeyAfterProof(directory, circuitType); + if (circuitType === 'App') { + const vkData = await this.convertVk(directory); + const proof = await this.readProofAsFields(directory, circuitType, vkData); + return { proof, verificationKey: new VerificationKeyAsFields(vkData.keyAsFields, vkData.hash) }; } - const proofFile = `${directory}/${PROOF_FILENAME}`; - return await fs.readFile(proofFile); + + const vkData = await this.updateVerificationKeyAfterProof(directory, circuitType); + const proof = await this.readProofAsFields(directory, circuitType, vkData); + return { proof, verificationKey: new VerificationKeyAsFields(vkData.keyAsFields, vkData.hash) }; } /** @@ -672,28 +713,24 @@ export class BBNativeProofCreator implements ProofCreator { * @param circuitType - The type of circuit proven * @returns The proof */ - // private async readProofAsFields( - // filePath: string, - // circuitType: ClientProtocolArtifact, - // ): Promise> { - // const [binaryProof, proofString] = await Promise.all([ - // fs.readFile(`${filePath}/${PROOF_FILENAME}`), - // fs.readFile(`${filePath}/${PROOF_FIELDS_FILENAME}`, { encoding: 'utf-8' }), - // ]); - // const json = JSON.parse(proofString); - // const fields = json.map(Fr.fromString); - // const vkData = await this.verificationKeys.get(circuitType); - // if (!vkData) { - // throw new Error(`Invalid verification key for ${circuitType}`); - // } - // const numPublicInputs = CIRCUITS_WITHOUT_AGGREGATION.has(circuitType) - // ? vkData.numPublicInputs - // : vkData.numPublicInputs - AGGREGATION_OBJECT_SIZE; - // const fieldsWithoutPublicInputs = fields.slice(numPublicInputs); - // logger.debug( - // `Circuit type: ${circuitType}, complete proof length: ${fields.length}, without public inputs: ${fieldsWithoutPublicInputs.length}, num public inputs: ${numPublicInputs}, circuit size: ${vkData.circuitSize}, is recursive: ${vkData.isRecursive}, raw length: ${binaryProof.length}`, - // ); - // const proof = new RecursiveProof(fieldsWithoutPublicInputs, new Proof(binaryProof)); - // return proof; - // } + private async readProofAsFields( + filePath: string, + circuitType: ClientProtocolArtifact | 'App', + vkData: VerificationKeyData, + ): Promise> { + const [binaryProof, proofString] = await Promise.all([ + fs.readFile(`${filePath}/${PROOF_FILENAME}`), + fs.readFile(`${filePath}/${PROOF_FIELDS_FILENAME}`, { encoding: 'utf-8' }), + ]); + const json = JSON.parse(proofString); + const fields = json.map(Fr.fromString); + const numPublicInputs = + circuitType === 'App' ? vkData.numPublicInputs : vkData.numPublicInputs - AGGREGATION_OBJECT_SIZE; + const fieldsWithoutPublicInputs = fields.slice(numPublicInputs); + this.log.debug( + `Circuit type: ${circuitType}, complete proof length: ${fields.length}, without public inputs: ${fieldsWithoutPublicInputs.length}, num public inputs: ${numPublicInputs}, circuit size: ${vkData.circuitSize}, is recursive: ${vkData.isRecursive}, raw length: ${binaryProof.length}`, + ); + const proof = new RecursiveProof(fieldsWithoutPublicInputs, new Proof(binaryProof)); + return proof; + } } diff --git a/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts b/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts index 5e085ae3b34..5b93d698058 100644 --- a/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts +++ b/yarn-project/pxe/src/kernel_prover/interface/proof_creator.ts @@ -1,11 +1,14 @@ import { + type NESTED_RECURSIVE_PROOF_LENGTH, type PrivateCircuitPublicInputs, type PrivateKernelCircuitPublicInputs, type PrivateKernelInitCircuitPrivateInputs, type PrivateKernelInnerCircuitPrivateInputs, type PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, - type Proof, + type RECURSIVE_PROOF_LENGTH, + type RecursiveProof, + type VerificationKeyAsFields, } from '@aztec/circuits.js'; import { type Fr } from '@aztec/foundation/fields'; import { type ACVMField } from '@aztec/simulator'; @@ -14,7 +17,7 @@ import { type ACVMField } from '@aztec/simulator'; * Represents the output of the proof creation process for init and inner private kernel circuit. * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. */ -export type ProofOutput = { +export type KernelProofOutput = { /** * The public inputs required for the proof generation process. */ @@ -22,7 +25,22 @@ export type ProofOutput = { /** * The zk-SNARK proof for the kernel execution. */ - proof: Proof; + proof: RecursiveProof; + + verificationKey: VerificationKeyAsFields; +}; + +/** + * Represents the output of the proof creation process for init and inner private kernel circuit. + * Contains the public inputs required for the init and inner private kernel circuit and the generated proof. + */ +export type AppCircuitProofOutput = { + /** + * The zk-SNARK proof for the kernel execution. + */ + proof: RecursiveProof; + + verificationKey: VerificationKeyAsFields; }; /** @@ -46,7 +64,7 @@ export interface ProofCreator { */ createProofInit( privateKernelInputsInit: PrivateKernelInitCircuitPrivateInputs, - ): Promise>; + ): Promise>; /** * Creates a proof output for a given previous kernel data and private call data for an inner iteration. @@ -56,7 +74,7 @@ export interface ProofCreator { */ createProofInner( privateKernelInputsInner: PrivateKernelInnerCircuitPrivateInputs, - ): Promise>; + ): Promise>; /** * Creates a proof output based on the last inner kernel iteration kernel data for the final ordering iteration. @@ -66,7 +84,7 @@ export interface ProofCreator { */ createProofTail( privateKernelInputsTail: PrivateKernelTailCircuitPrivateInputs, - ): Promise>; + ): Promise>; /** * Creates a proof for an app circuit. @@ -75,5 +93,5 @@ export interface ProofCreator { * @param bytecode - The circuit bytecode in gzipped bincode format * @returns A Promise resolving to a Proof object */ - createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise; + createAppCircuitProof(partialWitness: Map, bytecode: Buffer): Promise; } diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index 58f29d4ceda..6a52d2c7a67 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -5,16 +5,19 @@ import { MAX_NEW_NOTE_HASHES_PER_CALL, MAX_NEW_NOTE_HASHES_PER_TX, MembershipWitness, + NESTED_RECURSIVE_PROOF_LENGTH, NoteHash, PrivateCallStackItem, PrivateCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PrivateKernelTailCircuitPublicInputs, + RECURSIVE_PROOF_LENGTH, ScopedNoteHash, type TxRequest, VK_TREE_HEIGHT, VerificationKey, - makeEmptyProof, + VerificationKeyAsFields, + makeRecursiveProof, } from '@aztec/circuits.js'; import { makeTxRequest } from '@aztec/circuits.js/testing'; import { makeTuple } from '@aztec/foundation/array'; @@ -91,7 +94,8 @@ describe('Kernel Prover', () => { publicInputs.end.newNoteHashes = noteHashes; return { publicInputs, - proof: makeEmptyProof(), + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; }; @@ -105,7 +109,15 @@ describe('Kernel Prover', () => { return { publicInputs, - proof: makeEmptyProof(), + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), + }; + }; + + const createAppCircuitProofOutput = () => { + return { + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; }; @@ -151,6 +163,7 @@ describe('Kernel Prover', () => { proofCreator.createProofInit.mockResolvedValue(createProofOutput([])); proofCreator.createProofInner.mockResolvedValue(createProofOutput([])); proofCreator.createProofTail.mockResolvedValue(createProofOutputFinal([])); + proofCreator.createAppCircuitProof.mockResolvedValue(createAppCircuitProofOutput()); prover = new KernelProver(oracle, proofCreator); }); diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts index 73612e9e751..e0f2a0fad4a 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts @@ -3,6 +3,7 @@ import { Fr, MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, + NESTED_RECURSIVE_PROOF_LENGTH, PrivateCallData, PrivateKernelCircuitPublicInputs, PrivateKernelData, @@ -10,11 +11,12 @@ import { PrivateKernelInnerCircuitPrivateInputs, PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, - type Proof, + type RECURSIVE_PROOF_LENGTH, + type RecursiveProof, type TxRequest, VK_TREE_HEIGHT, - VerificationKey, - makeEmptyProof, + VerificationKeyAsFields, + makeRecursiveProof, } from '@aztec/circuits.js'; import { padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -22,7 +24,7 @@ import { assertLength } from '@aztec/foundation/serialize'; import { pushTestData } from '@aztec/foundation/testing'; import { type ExecutionResult, collectNoteHashLeafIndexMap, collectNullifiedNoteHashCounters } from '@aztec/simulator'; -import { type ProofCreator, type ProofOutput } from './interface/proof_creator.js'; +import { type KernelProofOutput, type ProofCreator } from './interface/proof_creator.js'; import { buildPrivateKernelInnerHints, buildPrivateKernelTailHints, @@ -54,14 +56,14 @@ export class KernelProver { async prove( txRequest: TxRequest, executionResult: ExecutionResult, - ): Promise> { + ): Promise> { const executionStack = [executionResult]; let firstIteration = true; - let previousVerificationKey = VerificationKey.makeFake(); - let output: ProofOutput = { + let output: KernelProofOutput = { publicInputs: PrivateKernelCircuitPublicInputs.empty(), - proof: makeEmptyProof(), + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; const noteHashLeafIndexMap = collectNoteHashLeafIndexMap(executionResult); @@ -76,7 +78,7 @@ export class KernelProver { ); const publicCallRequests = currentExecution.enqueuedPublicFunctionCalls.map(result => result.toCallRequest()); - const proof = await this.proofCreator.createAppCircuitProof( + const proofOutput = await this.proofCreator.createAppCircuitProof( currentExecution.partialWitness, currentExecution.acir, ); @@ -85,7 +87,8 @@ export class KernelProver { currentExecution, privateCallRequests, publicCallRequests, - proof, + proofOutput.proof, + proofOutput.verificationKey, ); const hints = buildPrivateKernelInnerHints( @@ -98,11 +101,11 @@ export class KernelProver { pushTestData('private-kernel-inputs-init', proofInput); output = await this.proofCreator.createProofInit(proofInput); } else { - const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(previousVerificationKey); + const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(output.verificationKey); const previousKernelData = new PrivateKernelData( output.publicInputs, output.proof, - previousVerificationKey, + output.verificationKey, Number(previousVkMembershipWitness.leafIndex), assertLength(previousVkMembershipWitness.siblingPath, VK_TREE_HEIGHT), ); @@ -111,14 +114,13 @@ export class KernelProver { output = await this.proofCreator.createProofInner(proofInput); } firstIteration = false; - previousVerificationKey = privateCallData.vk; } - const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(previousVerificationKey); + const previousVkMembershipWitness = await this.oracle.getVkMembershipWitness(output.verificationKey); const previousKernelData = new PrivateKernelData( output.publicInputs, output.proof, - previousVerificationKey, + output.verificationKey, Number(previousVkMembershipWitness.leafIndex), assertLength(previousVkMembershipWitness.siblingPath, VK_TREE_HEIGHT), ); @@ -138,10 +140,11 @@ export class KernelProver { } private async createPrivateCallData( - { callStackItem, vk }: ExecutionResult, + { callStackItem }: ExecutionResult, privateCallRequests: CallRequest[], publicCallRequests: CallRequest[], - proof: Proof, + proof: RecursiveProof, + vk: VerificationKeyAsFields, ) { const { contractAddress, functionData } = callStackItem; @@ -172,7 +175,7 @@ export class KernelProver { privateCallStack, publicCallStack, proof, - vk: VerificationKey.fromBuffer(vk), + vk, publicKeysHash, contractClassArtifactHash, contractClassPublicBytecodeCommitment, diff --git a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts b/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts index ba408e4cf77..04af3cad3ed 100644 --- a/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts +++ b/yarn-project/pxe/src/kernel_prover/proving_data_oracle.ts @@ -8,7 +8,7 @@ import { type NOTE_HASH_TREE_HEIGHT, type Point, type VK_TREE_HEIGHT, - type VerificationKey, + type VerificationKeyAsFields, } from '@aztec/circuits.js'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -50,7 +50,7 @@ export interface ProvingDataOracle { * @param vk - The VerificationKey for which the membership witness is needed. * @returns A Promise that resolves to the MembershipWitness instance. */ - getVkMembershipWitness(vk: VerificationKey): Promise>; + getVkMembershipWitness(vk: VerificationKeyAsFields): Promise>; /** * Get the note membership witness for a note in the note hash tree at the given leaf index. diff --git a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts index 4880e937eeb..6b3a29e72b8 100644 --- a/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/test/test_circuit_prover.ts @@ -1,20 +1,22 @@ import { type CircuitSimulationStats } from '@aztec/circuit-types/stats'; import { + NESTED_RECURSIVE_PROOF_LENGTH, type PrivateCircuitPublicInputs, type PrivateKernelCircuitPublicInputs, type PrivateKernelInitCircuitPrivateInputs, type PrivateKernelInnerCircuitPrivateInputs, type PrivateKernelTailCircuitPrivateInputs, type PrivateKernelTailCircuitPublicInputs, - Proof, - makeEmptyProof, + RECURSIVE_PROOF_LENGTH, + VerificationKeyAsFields, + makeRecursiveProof, } from '@aztec/circuits.js'; import { siloNoteHash } from '@aztec/circuits.js/hash'; import { createDebugLogger } from '@aztec/foundation/log'; import { elapsed } from '@aztec/foundation/timer'; import { executeInit, executeInner, executeTail, executeTailForPublic } from '@aztec/noir-protocol-circuits-types'; -import { type ProofCreator, type ProofOutput } from '../interface/proof_creator.js'; +import { type AppCircuitProofOutput, type KernelProofOutput, type ProofCreator } from '../interface/proof_creator.js'; /** * Test Proof Creator executes circuit simulations and provides fake proofs. @@ -32,7 +34,7 @@ export class TestProofCreator implements ProofCreator { public async createProofInit( privateInputs: PrivateKernelInitCircuitPrivateInputs, - ): Promise> { + ): Promise> { const [duration, result] = await elapsed(() => executeInit(privateInputs)); this.log.debug(`Simulated private kernel init`, { eventName: 'circuit-simulation', @@ -41,17 +43,12 @@ export class TestProofCreator implements ProofCreator { inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; + return this.makeEmptyKernelProofOutput(result); } public async createProofInner( privateInputs: PrivateKernelInnerCircuitPrivateInputs, - ): Promise> { + ): Promise> { const [duration, result] = await elapsed(() => executeInner(privateInputs)); this.log.debug(`Simulated private kernel inner`, { eventName: 'circuit-simulation', @@ -60,17 +57,12 @@ export class TestProofCreator implements ProofCreator { inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); - - return { - publicInputs: result, - proof: proof, - }; + return this.makeEmptyKernelProofOutput(result); } public async createProofTail( privateInputs: PrivateKernelTailCircuitPrivateInputs, - ): Promise> { + ): Promise> { const isForPublic = privateInputs.isForPublic(); const [duration, result] = await elapsed(() => isForPublic ? executeTailForPublic(privateInputs) : executeTail(privateInputs), @@ -82,15 +74,23 @@ export class TestProofCreator implements ProofCreator { inputSize: privateInputs.toBuffer().length, outputSize: result.toBuffer().length, } satisfies CircuitSimulationStats); - const proof = makeEmptyProof(); + return this.makeEmptyKernelProofOutput(result); + } - return { - publicInputs: result, - proof: proof, + createAppCircuitProof(_1: Map, _2: Buffer): Promise { + const appCircuitProofOutput: AppCircuitProofOutput = { + proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), }; + return Promise.resolve(appCircuitProofOutput); } - createAppCircuitProof(_1: Map, _2: Buffer): Promise { - return Promise.resolve(new Proof(Buffer.alloc(0))); + private makeEmptyKernelProofOutput(publicInputs: PublicInputsType) { + const kernelProofOutput: KernelProofOutput = { + publicInputs, + proof: makeRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + verificationKey: VerificationKeyAsFields.makeEmpty(), + }; + return kernelProofOutput; } } diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 1f0e859fa8f..02ff03a95ee 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -678,7 +678,7 @@ export class PXEService implements PXE { const tx = new Tx( publicInputs, - proof, + proof.binaryProof, encryptedLogs, unencryptedLogs, enqueuedPublicFunctions, From 8079f601a23219ddd96f01064d0c31c6e8109471 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Thu, 9 May 2024 12:12:33 +0100 Subject: [PATCH 17/43] chore(dsl): Update backend gateCount command to query a Program in a single request (#6228) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Resolves https://github.com/AztecProtocol/aztec-packages/issues/6168 This PR does a very minor update to simply now go through the entire list of functions contained inside of a Program and generate a basic circuit report for each functions. Currently `nargo info` now takes in a JSON report that it works with instead of an individual printed gate count. This PR also does some initial work on making a gate report that is ready for noir-lang/noir-gates-diff. This is yet to be updated but has most of the initial skeleton needed to get a gates report for an entire workspace. Also, once https://github.com/noir-lang/noir/pull/4975 is merged and synced into this repo we can remove the `bb info` command and rename `bb gates` -> `bb info` Nargo info still works as expected: Screenshot 2024-05-08 at 2 55 32 PM --------- Co-authored-by: Tom French --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 38 ++++++++++++++---- .../dsl/acir_format/acir_format.hpp | 2 + .../dsl/acir_format/acir_format.test.cpp | 6 +++ .../acir_format/acir_to_constraint_buf.hpp | 1 + .../acir_format/bigint_constraint.test.cpp | 5 +++ .../dsl/acir_format/block_constraint.test.cpp | 1 + .../dsl/acir_format/ec_operations.test.cpp | 1 + .../dsl/acir_format/ecdsa_secp256k1.test.cpp | 3 ++ .../dsl/acir_format/ecdsa_secp256r1.test.cpp | 4 ++ .../acir_format/poseidon2_constraint.test.cpp | 1 + .../acir_format/recursion_constraint.test.cpp | 2 + .../acir_format/sha256_constraint.test.cpp | 1 + noir-projects/gates_report.sh | 39 +++++++++++++++++++ noir/noir-repo/Cargo.lock | 1 + .../tooling/backend_interface/Cargo.toml | 1 + .../backend_interface/src/cli/gates.rs | 32 ++++++++------- .../tooling/backend_interface/src/cli/mod.rs | 2 + .../backend_interface/src/proof_system.rs | 9 +++-- .../mock_backend/src/gates_cmd.rs | 4 +- .../tooling/nargo_cli/src/cli/info_cmd.rs | 18 +++++---- 20 files changed, 139 insertions(+), 32 deletions(-) create mode 100755 noir-projects/gates_report.sh diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 9db639ea0a5..820cc522ad7 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -296,19 +296,43 @@ void prove(const std::string& bytecodePath, const std::string& witnessPath, cons * @brief Computes the number of Barretenberg specific gates needed to create a proof for the specific ACIR circuit * * Communication: - * - stdout: The number of gates is written to stdout + * - stdout: A JSON string of the number of ACIR opcodes and final backend circuit size * * @param bytecodePath Path to the file containing the serialized circuit */ void gateCount(const std::string& bytecodePath) { - auto constraint_system = get_constraint_system(bytecodePath); - acir_proofs::AcirComposer acir_composer(0, verbose); - acir_composer.create_circuit(constraint_system); - auto gate_count = acir_composer.get_total_circuit_size(); + // All circuit reports will be built into the string below + std::string functions_string = "{\"functions\": [\n "; + auto constraint_systems = get_constraint_systems(bytecodePath); + size_t i = 0; + for (auto constraint_system : constraint_systems) { + acir_proofs::AcirComposer acir_composer(0, verbose); + acir_composer.create_circuit(constraint_system); + auto circuit_size = acir_composer.get_total_circuit_size(); + + // Build individual circuit report + auto result_string = format("{\n \"acir_opcodes\": ", + constraint_system.num_acir_opcodes, + ",\n \"circuit_size\": ", + circuit_size, + "\n }"); + + // Attach a comma if we still circuit reports to generate + if (i != (constraint_systems.size() - 1)) { + result_string = format(result_string, ","); + } - writeUint64AsRawBytesToStdout(static_cast(gate_count)); - vinfo("gate count: ", gate_count); + functions_string = format(functions_string, result_string); + + i++; + } + functions_string = format(functions_string, "\n]}"); + + const char* jsonData = functions_string.c_str(); + size_t length = strlen(jsonData); + std::vector data(jsonData, jsonData + length); + writeRawBytesToStdout(data); } /** diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp index 8b7823260d0..9add17a1451 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp @@ -32,6 +32,8 @@ struct AcirFormat { // to be able to verify SNARKs on Ethereum. bool recursive; + uint32_t num_acir_opcodes; + std::vector public_inputs; std::vector logic_constraints; diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp index 2d23b057c64..038db2a28f9 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp @@ -32,6 +32,7 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) AcirFormat constraint_system{ .varnum = 4, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -149,6 +150,7 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) AcirFormat constraint_system{ .varnum = 6, .recursive = false, + .num_acir_opcodes = 7, .public_inputs = { 1 }, .logic_constraints = { logic_constraint }, .range_constraints = { range_a, range_b }, @@ -218,6 +220,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifyPass) }; AcirFormat constraint_system{ .varnum = 81, .recursive = false, + .num_acir_opcodes = 75, .public_inputs = {}, .logic_constraints = {}, .range_constraints = range_constraints, @@ -314,6 +317,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) AcirFormat constraint_system{ .varnum = 81, .recursive = false, + .num_acir_opcodes = 75, .public_inputs = {}, .logic_constraints = {}, .range_constraints = range_constraints, @@ -429,6 +433,7 @@ TEST_F(AcirFormatTests, TestVarKeccak) AcirFormat constraint_system{ .varnum = 36, .recursive = false, + .num_acir_opcodes = 6, .public_inputs = {}, .logic_constraints = {}, .range_constraints = { range_a, range_b, range_c, range_d }, @@ -477,6 +482,7 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) AcirFormat constraint_system{ .varnum = 51, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp index 110087d40af..3e77b60d689 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp @@ -477,6 +477,7 @@ AcirFormat circuit_serde_to_acir_format(Program::Circuit const& circuit) // `varnum` is the true number of variables, thus we add one to the index which starts at zero af.varnum = circuit.current_witness_index + 1; af.recursive = circuit.recursive; + af.num_acir_opcodes = static_cast(circuit.opcodes.size()); af.public_inputs = join({ map(circuit.public_parameters.value, [](auto e) { return e.value; }), map(circuit.return_values.value, [](auto e) { return e.value; }) }); std::map block_id_to_block_constraint; diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp index 863737703ef..1cc86262bd1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp @@ -169,6 +169,7 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), .recursive = false, + .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -238,6 +239,7 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) AcirFormat constraint_system{ .varnum = 5, .recursive = false, + .num_acir_opcodes = 3, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -292,6 +294,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), .recursive = false, + .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -350,6 +353,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), .recursive = false, + .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -429,6 +433,7 @@ TEST_F(BigIntTests, TestBigIntDIV) AcirFormat constraint_system{ .varnum = 5, .recursive = false, + .num_acir_opcodes = 4, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp index 7cb3e5955bd..5d649d8feb3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp @@ -111,6 +111,7 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 7, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp index fb676af0a8b..65be4aaae55 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp @@ -51,6 +51,7 @@ TEST_F(EcOperations, TestECOperations) AcirFormat constraint_system{ .varnum = static_cast(num_variables + 1), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp index 20dddfe4abe..61782002c85 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp @@ -91,6 +91,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -141,6 +142,7 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -186,6 +188,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp index 6217149fdf0..de1d0931d8c 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp @@ -125,6 +125,7 @@ TEST(ECDSASecp256r1, test_hardcoded) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -177,6 +178,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -227,6 +229,7 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, @@ -272,6 +275,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) AcirFormat constraint_system{ .varnum = static_cast(num_variables), .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp index d35a9d36974..4922c63cd69 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp @@ -31,6 +31,7 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) AcirFormat constraint_system{ .varnum = 9, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp index 0b12a411951..b837f94ba2a 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp @@ -83,6 +83,7 @@ Builder create_inner_circuit() AcirFormat constraint_system{ .varnum = 6, .recursive = true, + .num_acir_opcodes = 7, .public_inputs = { 1, 2 }, .logic_constraints = { logic_constraint }, .range_constraints = { range_a, range_b }, @@ -241,6 +242,7 @@ Builder create_outer_circuit(std::vector& inner_circuits) AcirFormat constraint_system{ .varnum = static_cast(witness.size()), .recursive = false, + .num_acir_opcodes = static_cast(recursion_constraints.size()), .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp index 4b78a9550e7..5af032bedd1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp @@ -33,6 +33,7 @@ TEST_F(Sha256Tests, TestSha256Compression) AcirFormat constraint_system{ .varnum = 34, .recursive = false, + .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, .range_constraints = {}, diff --git a/noir-projects/gates_report.sh b/noir-projects/gates_report.sh new file mode 100755 index 00000000000..affbf07d1f4 --- /dev/null +++ b/noir-projects/gates_report.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +set -eu + +# TODO(https://github.com/noir-lang/noir/issues/4962): This script is still yet to be integrated with noir-lang/noir-gates-diff +# The script needs some slight updating as `nargo info` expects a complete JSON object, while this script expects a single object field +# representing a list of circuit reports for a program. +# The ACIR tests in barretenberg also expect every target bytecode to have the name `acir.gz` while this script expects the same name of the package +echo "Compile noir-protocol-circuits for gates report..." +cd noir-protocol-circuits +PROTOCOL_CIRCUITS_DIR=$PWD + +# Compile programs into artifacts that the backend expects +NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo} +$NARGO compile --only-acir + +BB_BIN=${BB_BIN:-../../barretenberg/cpp/build/bin/bb} + +echo "{\"programs\": [" > gates_report.json + +# Bound for checking where to place last parentheses +NUM_ARTIFACTS=$(ls -1q "$PROTOCOL_CIRCUITS_DIR/target"/*.gz | wc -l) + +ITER="1" +for pathname in "$PROTOCOL_CIRCUITS_DIR/target"/*.gz; do + ARTIFACT_NAME=$(basename -s .gz "$pathname") + + echo "{\"package_name\": \"$ARTIFACT_NAME\"," >> gates_report.json + $BB_BIN gates -b "./target/$ARTIFACT_NAME.gz" >> gates_report.json + + if (($ITER == $NUM_ARTIFACTS)); then + echo "}" >> gates_report.json + else + echo "}, " >> gates_report.json + fi + + ITER=$(( $ITER + 1 )) +done + +echo "]}" >> gates_report.json \ No newline at end of file diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index a8c63c032aa..859579c077f 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -462,6 +462,7 @@ dependencies = [ "dirs", "flate2", "reqwest", + "serde", "serde_json", "tar", "tempfile", diff --git a/noir/noir-repo/tooling/backend_interface/Cargo.toml b/noir/noir-repo/tooling/backend_interface/Cargo.toml index f6b5d5d0132..b731c138c7d 100644 --- a/noir/noir-repo/tooling/backend_interface/Cargo.toml +++ b/noir/noir-repo/tooling/backend_interface/Cargo.toml @@ -13,6 +13,7 @@ license.workspace = true acvm.workspace = true dirs.workspace = true thiserror.workspace = true +serde.workspace = true serde_json.workspace = true bb_abstraction_leaks.workspace = true tracing.workspace = true diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs b/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs index aca05f0232a..9e12596bfd7 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/gates.rs @@ -1,3 +1,4 @@ +use serde::Deserialize; use std::path::{Path, PathBuf}; use crate::BackendError; @@ -12,8 +13,19 @@ pub(crate) struct GatesCommand { pub(crate) bytecode_path: PathBuf, } +#[derive(Deserialize)] +struct GatesResponse { + functions: Vec, +} + +#[derive(Deserialize)] +pub struct CircuitReport { + pub acir_opcodes: u32, + pub circuit_size: u32, +} + impl GatesCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { + pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { let output = std::process::Command::new(binary_path) .arg("gates") .arg("-c") @@ -25,19 +37,11 @@ impl GatesCommand { if !output.status.success() { return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); } - // Note: barretenberg includes the newline, so that subsequent prints to stdout - // are not on the same line as the gates output. - - const EXPECTED_BYTES: usize = 8; - let gates_bytes: [u8; EXPECTED_BYTES] = - output.stdout.as_slice().try_into().map_err(|_| { - BackendError::UnexpectedNumberOfBytes(EXPECTED_BYTES, output.stdout.clone()) - })?; - // Convert bytes to u64 in little-endian format - let value = u64::from_le_bytes(gates_bytes); + let gates_info: GatesResponse = + serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - Ok(value as u32) + Ok(gates_info.functions) } } @@ -58,7 +62,9 @@ fn gate_command() -> Result<(), BackendError> { let output = gate_command.run(backend.binary_path())?; // Mock backend always returns zero gates. - assert_eq!(output, 0); + assert_eq!(output.len(), 1); + assert_eq!(output[0].acir_opcodes, 123); + assert_eq!(output[0].circuit_size, 125); Ok(()) } diff --git a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs index df43bd5cc2f..16a9517e129 100644 --- a/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs +++ b/noir/noir-repo/tooling/backend_interface/src/cli/mod.rs @@ -18,6 +18,8 @@ pub(crate) use version::VersionCommand; pub(crate) use vk_as_fields::VkAsFieldsCommand; pub(crate) use write_vk::WriteVkCommand; +pub(crate) use gates::CircuitReport; + #[test] fn no_command_provided_works() -> Result<(), crate::BackendError> { // This is a simple test to check that the binaries work diff --git a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs index 20a6dcf70f1..ffd46acef0e 100644 --- a/noir/noir-repo/tooling/backend_interface/src/proof_system.rs +++ b/noir/noir-repo/tooling/backend_interface/src/proof_system.rs @@ -11,13 +11,16 @@ use tempfile::tempdir; use tracing::warn; use crate::cli::{ - GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, - WriteVkCommand, + CircuitReport, GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, + VkAsFieldsCommand, WriteVkCommand, }; use crate::{Backend, BackendError}; impl Backend { - pub fn get_exact_circuit_size(&self, program: &Program) -> Result { + pub fn get_exact_circuit_sizes( + &self, + program: &Program, + ) -> Result, BackendError> { let binary_path = self.assert_binary_exists()?; self.assert_correct_version()?; diff --git a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs index 3cc397d3292..0cebfbca42d 100644 --- a/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs +++ b/noir/noir-repo/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs @@ -14,5 +14,7 @@ pub(crate) struct GatesCommand { pub(crate) fn run(args: GatesCommand) { assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - std::io::stdout().write_all(&0u64.to_le_bytes()).unwrap(); + let response: &str = r#"{ "functions": [{"acir_opcodes": 123, "circuit_size": 125 }] }"#; + + std::io::stdout().write_all(response.as_bytes()).unwrap(); } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index 1ae2d5db104..f8f645d3c3a 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,6 +1,6 @@ use std::collections::HashMap; -use acvm::acir::circuit::{ExpressionWidth, Program}; +use acvm::acir::circuit::ExpressionWidth; use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; @@ -283,10 +283,15 @@ impl From for Vec { fn count_opcodes_and_gates_in_program( backend: &Backend, - compiled_program: ProgramArtifact, + mut compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, ) -> Result { + // Unconstrained functions do not matter to a backend circuit count so we clear them + // before sending a serialized program to the backend + compiled_program.bytecode.unconstrained_functions.clear(); + + let program_circuit_sizes = backend.get_exact_circuit_sizes(&compiled_program.bytecode)?; let functions = compiled_program .bytecode .functions @@ -295,12 +300,9 @@ fn count_opcodes_and_gates_in_program( .map(|(i, function)| -> Result<_, BackendError> { Ok(FunctionInfo { name: compiled_program.names[i].clone(), + // Required while mock backend doesn't return correct circuit size. acir_opcodes: function.opcodes.len(), - // Unconstrained functions do not matter to a backend circuit count so we pass nothing here - circuit_size: backend.get_exact_circuit_size(&Program { - functions: vec![function], - unconstrained_functions: Vec::new(), - })?, + circuit_size: program_circuit_sizes[i].circuit_size, }) }) .collect::>()?; @@ -321,7 +323,7 @@ fn count_opcodes_and_gates_in_contract( name: function.name, // TODO(https://github.com/noir-lang/noir/issues/4720) acir_opcodes: function.bytecode.functions[0].opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, + circuit_size: backend.get_exact_circuit_sizes(&function.bytecode)?[0].circuit_size, }) }) .collect::>()?; From 8e111f8bab5a0348fe8c7185f89e979541f91a67 Mon Sep 17 00:00:00 2001 From: Ilyas Ridhuan Date: Thu, 9 May 2024 12:16:05 +0100 Subject: [PATCH 18/43] feat: div opcode (#6053) Please read [contributing guidelines](CONTRIBUTING.md) and remove this line. --------- Co-authored-by: Jean M <132435771+jeanmon@users.noreply.github.com> --- barretenberg/cpp/pil/avm/avm_alu.pil | 123 +++++- barretenberg/cpp/pil/avm/avm_main.pil | 35 +- .../relations/generated/avm/avm_alu.hpp | 318 +++++++++++-- .../relations/generated/avm/avm_main.hpp | 28 +- .../relations/generated/avm/declare_views.hpp | 44 ++ .../generated/avm/lookup_div_u16_0.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_1.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_2.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_3.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_4.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_5.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_6.hpp | 166 +++++++ .../generated/avm/lookup_div_u16_7.hpp | 166 +++++++ .../vm/avm_trace/avm_alu_trace.cpp | 111 ++++- .../vm/avm_trace/avm_alu_trace.hpp | 16 + .../vm/avm_trace/avm_execution.cpp | 7 + .../barretenberg/vm/avm_trace/avm_trace.cpp | 120 ++++- .../barretenberg/vm/avm_trace/avm_trace.hpp | 3 + .../vm/generated/avm_circuit_builder.hpp | 116 ++++- .../barretenberg/vm/generated/avm_flavor.hpp | 417 +++++++++++++++++- .../barretenberg/vm/generated/avm_prover.cpp | 79 ++++ .../vm/generated/avm_verifier.cpp | 68 +++ .../vm/tests/avm_arithmetic.test.cpp | 90 +++- .../vm/tests/avm_bitwise.test.cpp | 158 +++---- .../vm/tests/avm_comparison.test.cpp | 11 - .../barretenberg/vm/tests/helpers.test.cpp | 12 + .../barretenberg/vm/tests/helpers.test.hpp | 4 + 27 files changed, 2897 insertions(+), 191 deletions(-) create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp diff --git a/barretenberg/cpp/pil/avm/avm_alu.pil b/barretenberg/cpp/pil/avm/avm_alu.pil index 35771b3fb48..5d6db0544ba 100644 --- a/barretenberg/cpp/pil/avm/avm_alu.pil +++ b/barretenberg/cpp/pil/avm/avm_alu.pil @@ -64,7 +64,7 @@ namespace avm_alu(256); pol commit cf; // Compute predicate telling whether there is a row entry in the ALU table. - alu_sel = op_add + op_sub + op_mul + op_not + op_eq + op_cast + op_lt + op_lte + op_shr + op_shl; + alu_sel = op_add + op_sub + op_mul + op_not + op_eq + op_cast + op_lt + op_lte + op_shr + op_shl + op_div; cmp_sel = op_lt + op_lte; shift_sel = op_shl + op_shr; @@ -317,9 +317,9 @@ namespace avm_alu(256); // First condition is if borrow = 0, second condition is if borrow = 1 // This underflow check is done by the 128-bit check that is performed on each of these lo and hi limbs. #[SUB_LO_1] - (p_sub_a_lo - (53438638232309528389504892708671455232 - a_lo + p_a_borrow * 2 ** 128)) * (cmp_sel + op_cast) = 0; + (p_sub_a_lo - (53438638232309528389504892708671455232 - a_lo + p_a_borrow * 2 ** 128)) * (cmp_sel + op_cast + op_div_std) = 0; #[SUB_HI_1] - (p_sub_a_hi - (64323764613183177041862057485226039389 - a_hi - p_a_borrow)) * (cmp_sel + op_cast) = 0; + (p_sub_a_hi - (64323764613183177041862057485226039389 - a_hi - p_a_borrow)) * (cmp_sel + op_cast + op_div_std) = 0; pol commit p_sub_b_lo; pol commit p_sub_b_hi; @@ -438,13 +438,13 @@ namespace avm_alu(256); cmp_rng_ctr * ((1 - rng_chk_sel) * (1 - op_eq_diff_inv) + op_eq_diff_inv) - rng_chk_sel = 0; // We perform a range check if we have some range checks remaining or we are performing a comparison op - pol RNG_CHK_OP = rng_chk_sel + cmp_sel + op_cast + op_cast_prev + shift_lt_bit_len; + pol RNG_CHK_OP = rng_chk_sel + cmp_sel + op_cast + op_cast_prev + shift_lt_bit_len + op_div; pol commit rng_chk_lookup_selector; // TODO: Possible optimisation here if we swap the op_shl and op_shr with shift_lt_bit_len. // Shift_lt_bit_len is a more restrictive form therefore we can avoid performing redundant range checks when we know the result == 0. #[RNG_CHK_LOOKUP_SELECTOR] - rng_chk_lookup_selector' = cmp_sel' + rng_chk_sel' + op_add' + op_sub' + op_mul' + op_mul * u128_tag + op_cast' + op_cast_prev' + op_shl' + op_shr'; + rng_chk_lookup_selector' = cmp_sel' + rng_chk_sel' + op_add' + op_sub' + op_mul' + op_mul * u128_tag + op_cast' + op_cast_prev' + op_shl' + op_shr' + op_div'; // Perform 128-bit range check on lo part #[LOWER_CMP_RNG_CHK] @@ -622,3 +622,116 @@ namespace avm_alu(256); #[SHL_OUTPUT] op_shl * (ic - (b_lo * two_pow_s * shift_lt_bit_len)) = 0; + // ========= INTEGER DIVISION =============================== + // Operands: ia contains the dividend, ib contains the divisor, and ic contains the quotient (i.e. the result). + // All operands are restricted to be up to 128. + // The logic for integer division is to assert the correctness of this relationship: + // dividend - remainder = divisor * quotient ==> ia - remainder = ib * ic; where remainder < ib + // We do this using the following steps + // (1) The only non-trivial division is the situation where ia > ib && ib > 0 + // (a) if ia == ib => ic = 1 and remainder = 0 --> we can handle this as part of the standard division + // (b) if ia < ib => ic = 0 and remainder = ia --> isolating this case eliminates the risk of ia - remainder underflowing as remainder < ib < ia + // (c) if ib == 0 => error_tag = 1 --> Handled in main trace + // (2) Given ib and ic are restricted to U128, at most ib * ic will produce a 256-bit number. + // (3) We use the primality check from cmp to check that this product has not overflowed the field. + // The Primality check takes a field element as input and ouputs two 128-bit limbs. + // i.e. it checks that the field element, represented with two 128-bit limbs lies in [0, p). + // (a) Given x, PC(x) -> [x_lo, x_hi], where x_lo < 2**128 && x_hi < 2**128 && x == x_lo + x_hi * 2**128 + // (b) Additionally produces a witness that the x < (p - 1) + // p_sub_x_lo = p_lo - x_lo + borrow * 2**128 < 2**128 + // p_sub_x_hi = p_hi - x_hi - borrow < 2**128 + // (c) Range checks over 128-bits are applied to x_lo, x_hi, p_sub_x_lo, and p_sub_x_hi. + + // Range check the remainder < divisor. + pol commit remainder; + // The op_div boolean must be set based on which division case it is. + op_div = op_div_std + op_div_a_lt_b; + + // ======= Handling ia < ib ===== + // Boolean if ia < ib ==> ic = 0; + pol commit op_div_a_lt_b; + op_div_a_lt_b * (1 - op_div_a_lt_b) = 0; + // To show this, we constrain ib - ia - 1 to be within 128 bits. + // Since we need a range check we use the existing a_lo column that is range checked over 128 bits. + op_div_a_lt_b * (a_lo - (ib - ia - 1)) = 0; + op_div_a_lt_b * ic = 0; // ic = 0 + op_div_a_lt_b * (ia - remainder) = 0; // remainder = a, might not be needed. + + + // ====== Handling ia >= ib ===== + pol commit op_div_std; + op_div_std * (1 - op_div_std) = 0; + pol commit divisor_lo; // b + pol commit divisor_hi; + op_div_std * (ib - divisor_lo - 2**64 * divisor_hi) = 0; + pol commit quotient_lo; // c + pol commit quotient_hi; + op_div_std * (ic - quotient_lo - 2**64 * quotient_hi) = 0; + + // Multiplying the limbs gives us the following relations. + // (1) divisor_lo * quotient_lo --> Represents the bottom 128 bits of the result, i.e. values between [0, 2**128). + // (2) divisor_lo * quotient_hi + quotient_lo * divisor_hi --> Represents the middle 128 bits of the result, i.e. values between [2**64, 2**196) + // (3) divisor_hi * quotient_hi --> Represents the topmost 128 bits of the result, i.e. values between [2**128, 2**256). + + // We simplify (2) by further decomposing it into two limbs of 64 bits and adding the upper 64 bit to (3) + // divisor_lo * quotient_hi + quotient_lo * divisor_hi = partial_prod_lo + 2**64 * partial_prod_hi + // Need to range check that these are 64 bits + pol commit partial_prod_lo; + pol commit partial_prod_hi; + divisor_hi * quotient_lo + divisor_lo * quotient_hi = partial_prod_lo + 2**64 * partial_prod_hi; + + pol PRODUCT = divisor_lo * quotient_lo + 2**64 * partial_prod_lo + 2**128 * (partial_prod_hi + divisor_hi * quotient_hi); + + // a_lo and a_hi contains the hi and lo limbs of PRODUCT + // p_sub_a_lo and p_sub_a_hi contain the primality checks + #[ALU_PROD_DIV] + op_div_std * (PRODUCT - (a_lo + 2 ** 128 * a_hi)) = 0; + // Range checks already performed via a_lo and a_hi + // Primality checks already performed above via p_sub_a_lo and p_sub_a_hi + + // Range check remainder < ib and put the value in b_hi, it has to fit into a 128 bit range check + #[REMAINDER_RANGE_CHK] + op_div_std * (b_hi - (ib - remainder - 1)) = 0; + + // We need to perform 3 x 256-bit range checks: (a_lo, a_hi), (b_lo, b_hi), and (p_sub_a_lo, p_sub_a_hi) + // One range check happens in-line with the division + #[CMP_CTR_REL_3] + (cmp_rng_ctr' - 2) * op_div_std = 0; + + // If we have more range checks left we cannot do more divisions operations that might truncate the steps + rng_chk_sel * op_div_std = 0; + + // Check PRODUCT = ia - remainder + #[DIVISION_RELATION] + op_div_std * (PRODUCT - (ia - remainder)) = 0; + + // === DIVISION 64-BIT RANGE CHECKS + // 64-bit decompositions and implicit 64-bit range checks for each limb, + // TODO: We need extra slice registers because we are performing an additional 64-bit range check in the same row, look into re-using old columns or refactoring + // range checks to be more modular. + // boolean to account for the division-specific 64-bit range checks. + pol commit div_rng_chk_selector; + div_rng_chk_selector * (1 - div_rng_chk_selector) = 0; + // div_rng_chk_selector && div_rng_chk_selector' = 1 if op_div_std = 1 + div_rng_chk_selector * div_rng_chk_selector' = op_div_std; + + pol commit div_u16_r0; + pol commit div_u16_r1; + pol commit div_u16_r2; + pol commit div_u16_r3; + pol commit div_u16_r4; + pol commit div_u16_r5; + pol commit div_u16_r6; + pol commit div_u16_r7; + + divisor_lo = op_div_std * (div_u16_r0 + div_u16_r1 * 2**16 + div_u16_r2 * 2**32 + div_u16_r3 * 2**48); + divisor_hi = op_div_std * (div_u16_r4 + div_u16_r5 * 2**16 + div_u16_r6 * 2**32 + div_u16_r7 * 2**48); + quotient_lo = op_div_std * (div_u16_r0' + div_u16_r1' * 2**16 + div_u16_r2' * 2**32 + div_u16_r3' * 2**48); + quotient_hi = op_div_std * (div_u16_r4' + div_u16_r5' * 2**16 + div_u16_r6' * 2**32 + div_u16_r7' * 2**48); + + // We need an extra 128 bits to do 2 more 64-bit range checks. We use b_lo (128 bits) to store partial_prod_lo(64 bits) and partial_prod_hi(64 bits. + // Use a shift to access the slices (b_lo is moved into the alu slice registers on the next row anyways as part of the SHIFT_RELS_0 relations) + pol NEXT_SUM_64_LO = u8_r0' + u8_r1' * 2**8 + u16_r0' * 2**16 + u16_r1' * 2**32 + u16_r2' * 2**48; + pol NEXT_SUM_128_HI = u16_r3' + u16_r4' * 2**16 + u16_r5' * 2**32 + u16_r6' * 2**48; + partial_prod_lo = op_div_std * NEXT_SUM_64_LO; + partial_prod_hi = op_div_std * NEXT_SUM_128_HI; diff --git a/barretenberg/cpp/pil/avm/avm_main.pil b/barretenberg/cpp/pil/avm/avm_main.pil index 4306643acf3..8d9f3010ec4 100644 --- a/barretenberg/cpp/pil/avm/avm_main.pil +++ b/barretenberg/cpp/pil/avm/avm_main.pil @@ -197,15 +197,16 @@ namespace avm_main(256); #[SUBOP_FDIV] sel_op_fdiv * (1 - op_err) * (ic * ib - ia) = 0; - // When sel_op_fdiv == 1, we want ib == 0 <==> op_err == 1 + // When sel_op_fdiv == 1 or sel_op_div, we want ib == 0 <==> op_err == 1 // This can be achieved with the 2 following relations. // inv is an extra witness to show that we can invert ib, i.e., inv = ib^(-1) // If ib == 0, we have to set inv = 1 to satisfy the second relation, // because op_err == 1 from the first relation. + // TODO: Update the name of these relations once negative tests are updated #[SUBOP_FDIV_ZERO_ERR1] - sel_op_fdiv * (ib * inv - 1 + op_err) = 0; + (sel_op_fdiv + sel_op_div) * (ib * inv - 1 + op_err) = 0; #[SUBOP_FDIV_ZERO_ERR2] - sel_op_fdiv * op_err * (1 - inv) = 0; + (sel_op_fdiv + sel_op_div) * op_err * (1 - inv) = 0; // Enforcement that instruction tags are FF (tag constant 6). // TODO: These 2 conditions might be removed and enforced through @@ -222,7 +223,7 @@ namespace avm_main(256); // that exactly one sel_op_XXX must be true. // At this time, we have only division producing an error. #[SUBOP_ERROR_RELEVANT_OP] - op_err * (sel_op_fdiv - 1) = 0; + op_err * ((sel_op_fdiv + sel_op_div) - 1) = 0; // TODO: constraint that we stop execution at the first error (tag_err or op_err) // An error can only happen at the last sub-operation row. @@ -322,7 +323,7 @@ namespace avm_main(256); // Predicate to activate the copy of intermediate registers to ALU table. If tag_err == 1, // the operation is not copied to the ALU table. - alu_sel = ALU_ALL_SEL * (1 - tag_err); + alu_sel = ALU_ALL_SEL * (1 - tag_err) * (1 - op_err); // Dispatch the correct in_tag for alu ALU_R_TAG_SEL * (alu_in_tag - r_in_tag) = 0; @@ -472,3 +473,27 @@ namespace avm_main(256); #[LOOKUP_U16_14] avm_alu.rng_chk_lookup_selector {avm_alu.u16_r14 } in sel_rng_16 { clk }; + // ==== Additional row range checks for division + #[LOOKUP_DIV_U16_0] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r0} in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_1] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r1 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_2] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r2 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_3] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r3 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_4] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r4 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_5] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r5 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_6] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r6 } in sel_rng_16 { clk }; + + #[LOOKUP_DIV_U16_7] + avm_alu.div_rng_chk_selector {avm_alu.div_u16_r7 } in sel_rng_16 { clk }; diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp index 95ced4b652b..2022d640185 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_alu.hpp @@ -23,6 +23,26 @@ template struct Avm_aluRow { FF avm_alu_cmp_rng_ctr_shift{}; FF avm_alu_cmp_sel{}; FF avm_alu_cmp_sel_shift{}; + FF avm_alu_div_rng_chk_selector{}; + FF avm_alu_div_rng_chk_selector_shift{}; + FF avm_alu_div_u16_r0{}; + FF avm_alu_div_u16_r0_shift{}; + FF avm_alu_div_u16_r1{}; + FF avm_alu_div_u16_r1_shift{}; + FF avm_alu_div_u16_r2{}; + FF avm_alu_div_u16_r2_shift{}; + FF avm_alu_div_u16_r3{}; + FF avm_alu_div_u16_r3_shift{}; + FF avm_alu_div_u16_r4{}; + FF avm_alu_div_u16_r4_shift{}; + FF avm_alu_div_u16_r5{}; + FF avm_alu_div_u16_r5_shift{}; + FF avm_alu_div_u16_r6{}; + FF avm_alu_div_u16_r6_shift{}; + FF avm_alu_div_u16_r7{}; + FF avm_alu_div_u16_r7_shift{}; + FF avm_alu_divisor_hi{}; + FF avm_alu_divisor_lo{}; FF avm_alu_ff_tag{}; FF avm_alu_ia{}; FF avm_alu_ib{}; @@ -34,6 +54,10 @@ template struct Avm_aluRow { FF avm_alu_op_cast_prev{}; FF avm_alu_op_cast_prev_shift{}; FF avm_alu_op_cast_shift{}; + FF avm_alu_op_div{}; + FF avm_alu_op_div_a_lt_b{}; + FF avm_alu_op_div_shift{}; + FF avm_alu_op_div_std{}; FF avm_alu_op_eq{}; FF avm_alu_op_eq_diff_inv{}; FF avm_alu_op_lt{}; @@ -57,6 +81,11 @@ template struct Avm_aluRow { FF avm_alu_p_sub_b_hi_shift{}; FF avm_alu_p_sub_b_lo{}; FF avm_alu_p_sub_b_lo_shift{}; + FF avm_alu_partial_prod_hi{}; + FF avm_alu_partial_prod_lo{}; + FF avm_alu_quotient_hi{}; + FF avm_alu_quotient_lo{}; + FF avm_alu_remainder{}; FF avm_alu_res_hi{}; FF avm_alu_res_lo{}; FF avm_alu_rng_chk_lookup_selector_shift{}; @@ -228,6 +257,18 @@ inline std::string get_relation_label_avm_alu(int index) case 64: return "SHL_OUTPUT"; + + case 74: + return "ALU_PROD_DIV"; + + case 75: + return "REMAINDER_RANGE_CHK"; + + case 76: + return "CMP_CTR_REL_3"; + + case 78: + return "DIVISION_RELATION"; } return std::to_string(index); } @@ -236,9 +277,10 @@ template class avm_aluImpl { public: using FF = FF_; - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ - 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5, 5, 5, 6, 6, 8, 3, 4, 4, 5, 4, 4, 3, 4, 3, 3, 4, 3, 6, - 5, 3, 3, 3, 3, 4, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 2, 5, 3, 3, 4, 4, 4, 4, 4, 3, 5, 5, 4, 5, 5, + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5, 5, 5, 6, 6, 8, 3, 4, 4, 5, 4, 4, 3, 4, 3, + 3, 4, 3, 6, 5, 3, 3, 3, 3, 4, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 2, 5, 3, 3, 4, 4, 4, 4, + 4, 3, 5, 5, 4, 5, 5, 2, 3, 3, 3, 3, 3, 4, 4, 3, 5, 3, 3, 3, 5, 3, 3, 4, 4, 4, 4, 4, 4, }; template @@ -252,13 +294,15 @@ template class avm_aluImpl { { Avm_DECLARE_VIEWS(0); - auto tmp = (avm_alu_alu_sel - - (((((((((avm_alu_op_add + avm_alu_op_sub) + avm_alu_op_mul) + avm_alu_op_not) + avm_alu_op_eq) + - avm_alu_op_cast) + - avm_alu_op_lt) + - avm_alu_op_lte) + - avm_alu_op_shr) + - avm_alu_op_shl)); + auto tmp = + (avm_alu_alu_sel - + ((((((((((avm_alu_op_add + avm_alu_op_sub) + avm_alu_op_mul) + avm_alu_op_not) + avm_alu_op_eq) + + avm_alu_op_cast) + + avm_alu_op_lt) + + avm_alu_op_lte) + + avm_alu_op_shr) + + avm_alu_op_shl) + + avm_alu_op_div)); tmp *= scaling_factor; std::get<0>(evals) += tmp; } @@ -575,7 +619,7 @@ template class avm_aluImpl { auto tmp = ((avm_alu_p_sub_a_lo - ((-avm_alu_a_lo + FF(uint256_t{ 4891460686036598784UL, 2896914383306846353UL, 0UL, 0UL })) + (avm_alu_p_a_borrow * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL })))) * - (avm_alu_cmp_sel + avm_alu_op_cast)); + ((avm_alu_cmp_sel + avm_alu_op_cast) + avm_alu_op_div_std)); tmp *= scaling_factor; std::get<27>(evals) += tmp; } @@ -586,7 +630,7 @@ template class avm_aluImpl { auto tmp = ((avm_alu_p_sub_a_hi - ((-avm_alu_a_hi + FF(uint256_t{ 13281191951274694749UL, 3486998266802970665UL, 0UL, 0UL })) - avm_alu_p_a_borrow)) * - (avm_alu_cmp_sel + avm_alu_op_cast)); + ((avm_alu_cmp_sel + avm_alu_op_cast) + avm_alu_op_div_std)); tmp *= scaling_factor; std::get<28>(evals) += tmp; } @@ -694,14 +738,15 @@ template class avm_aluImpl { Avm_DECLARE_VIEWS(39); auto tmp = (avm_alu_rng_chk_lookup_selector_shift - - (((((((((avm_alu_cmp_sel_shift + avm_alu_rng_chk_sel_shift) + avm_alu_op_add_shift) + - avm_alu_op_sub_shift) + - avm_alu_op_mul_shift) + - (avm_alu_op_mul * avm_alu_u128_tag)) + - avm_alu_op_cast_shift) + - avm_alu_op_cast_prev_shift) + - avm_alu_op_shl_shift) + - avm_alu_op_shr_shift)); + ((((((((((avm_alu_cmp_sel_shift + avm_alu_rng_chk_sel_shift) + avm_alu_op_add_shift) + + avm_alu_op_sub_shift) + + avm_alu_op_mul_shift) + + (avm_alu_op_mul * avm_alu_u128_tag)) + + avm_alu_op_cast_shift) + + avm_alu_op_cast_prev_shift) + + avm_alu_op_shl_shift) + + avm_alu_op_shr_shift) + + avm_alu_op_div_shift)); tmp *= scaling_factor; std::get<39>(evals) += tmp; } @@ -709,16 +754,17 @@ template class avm_aluImpl { { Avm_DECLARE_VIEWS(40); - auto tmp = - (avm_alu_a_lo - (((((((((avm_alu_u8_r0 + (avm_alu_u8_r1 * FF(256))) + (avm_alu_u16_r0 * FF(65536))) + - (avm_alu_u16_r1 * FF(4294967296UL))) + - (avm_alu_u16_r2 * FF(281474976710656UL))) + - (avm_alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + - (avm_alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + - (avm_alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + - (avm_alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) * - ((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + - avm_alu_shift_lt_bit_len))); + auto tmp = (avm_alu_a_lo - + (((((((((avm_alu_u8_r0 + (avm_alu_u8_r1 * FF(256))) + (avm_alu_u16_r0 * FF(65536))) + + (avm_alu_u16_r1 * FF(4294967296UL))) + + (avm_alu_u16_r2 * FF(281474976710656UL))) + + (avm_alu_u16_r3 * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + (avm_alu_u16_r4 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + + (avm_alu_u16_r5 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + + (avm_alu_u16_r6 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) * + (((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + + avm_alu_shift_lt_bit_len) + + avm_alu_op_div))); tmp *= scaling_factor; std::get<40>(evals) += tmp; } @@ -733,8 +779,9 @@ template class avm_aluImpl { (avm_alu_u16_r12 * FF(uint256_t{ 0UL, 65536UL, 0UL, 0UL }))) + (avm_alu_u16_r13 * FF(uint256_t{ 0UL, 4294967296UL, 0UL, 0UL }))) + (avm_alu_u16_r14 * FF(uint256_t{ 0UL, 281474976710656UL, 0UL, 0UL }))) * - ((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + - avm_alu_shift_lt_bit_len))); + (((((avm_alu_rng_chk_sel + avm_alu_cmp_sel) + avm_alu_op_cast) + avm_alu_op_cast_prev) + + avm_alu_shift_lt_bit_len) + + avm_alu_op_div))); tmp *= scaling_factor; std::get<41>(evals) += tmp; } @@ -958,6 +1005,213 @@ template class avm_aluImpl { tmp *= scaling_factor; std::get<64>(evals) += tmp; } + // Contribution 65 + { + Avm_DECLARE_VIEWS(65); + + auto tmp = (avm_alu_op_div - (avm_alu_op_div_std + avm_alu_op_div_a_lt_b)); + tmp *= scaling_factor; + std::get<65>(evals) += tmp; + } + // Contribution 66 + { + Avm_DECLARE_VIEWS(66); + + auto tmp = (avm_alu_op_div_a_lt_b * (-avm_alu_op_div_a_lt_b + FF(1))); + tmp *= scaling_factor; + std::get<66>(evals) += tmp; + } + // Contribution 67 + { + Avm_DECLARE_VIEWS(67); + + auto tmp = (avm_alu_op_div_a_lt_b * (avm_alu_a_lo - ((avm_alu_ib - avm_alu_ia) - FF(1)))); + tmp *= scaling_factor; + std::get<67>(evals) += tmp; + } + // Contribution 68 + { + Avm_DECLARE_VIEWS(68); + + auto tmp = (avm_alu_op_div_a_lt_b * avm_alu_ic); + tmp *= scaling_factor; + std::get<68>(evals) += tmp; + } + // Contribution 69 + { + Avm_DECLARE_VIEWS(69); + + auto tmp = (avm_alu_op_div_a_lt_b * (avm_alu_ia - avm_alu_remainder)); + tmp *= scaling_factor; + std::get<69>(evals) += tmp; + } + // Contribution 70 + { + Avm_DECLARE_VIEWS(70); + + auto tmp = (avm_alu_op_div_std * (-avm_alu_op_div_std + FF(1))); + tmp *= scaling_factor; + std::get<70>(evals) += tmp; + } + // Contribution 71 + { + Avm_DECLARE_VIEWS(71); + + auto tmp = (avm_alu_op_div_std * ((avm_alu_ib - avm_alu_divisor_lo) - + (avm_alu_divisor_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + tmp *= scaling_factor; + std::get<71>(evals) += tmp; + } + // Contribution 72 + { + Avm_DECLARE_VIEWS(72); + + auto tmp = (avm_alu_op_div_std * ((avm_alu_ic - avm_alu_quotient_lo) - + (avm_alu_quotient_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + tmp *= scaling_factor; + std::get<72>(evals) += tmp; + } + // Contribution 73 + { + Avm_DECLARE_VIEWS(73); + + auto tmp = (((avm_alu_divisor_hi * avm_alu_quotient_lo) + (avm_alu_divisor_lo * avm_alu_quotient_hi)) - + (avm_alu_partial_prod_lo + (avm_alu_partial_prod_hi * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL })))); + tmp *= scaling_factor; + std::get<73>(evals) += tmp; + } + // Contribution 74 + { + Avm_DECLARE_VIEWS(74); + + auto tmp = (avm_alu_op_div_std * ((((avm_alu_divisor_lo * avm_alu_quotient_lo) + + (avm_alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + ((avm_alu_partial_prod_hi + (avm_alu_divisor_hi * avm_alu_quotient_hi)) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + (avm_alu_a_lo + (avm_alu_a_hi * FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))))); + tmp *= scaling_factor; + std::get<74>(evals) += tmp; + } + // Contribution 75 + { + Avm_DECLARE_VIEWS(75); + + auto tmp = (avm_alu_op_div_std * (avm_alu_b_hi - ((avm_alu_ib - avm_alu_remainder) - FF(1)))); + tmp *= scaling_factor; + std::get<75>(evals) += tmp; + } + // Contribution 76 + { + Avm_DECLARE_VIEWS(76); + + auto tmp = ((avm_alu_cmp_rng_ctr_shift - FF(2)) * avm_alu_op_div_std); + tmp *= scaling_factor; + std::get<76>(evals) += tmp; + } + // Contribution 77 + { + Avm_DECLARE_VIEWS(77); + + auto tmp = (avm_alu_rng_chk_sel * avm_alu_op_div_std); + tmp *= scaling_factor; + std::get<77>(evals) += tmp; + } + // Contribution 78 + { + Avm_DECLARE_VIEWS(78); + + auto tmp = (avm_alu_op_div_std * ((((avm_alu_divisor_lo * avm_alu_quotient_lo) + + (avm_alu_partial_prod_lo * FF(uint256_t{ 0UL, 1UL, 0UL, 0UL }))) + + ((avm_alu_partial_prod_hi + (avm_alu_divisor_hi * avm_alu_quotient_hi)) * + FF(uint256_t{ 0UL, 0UL, 1UL, 0UL }))) - + (avm_alu_ia - avm_alu_remainder))); + tmp *= scaling_factor; + std::get<78>(evals) += tmp; + } + // Contribution 79 + { + Avm_DECLARE_VIEWS(79); + + auto tmp = (avm_alu_div_rng_chk_selector * (-avm_alu_div_rng_chk_selector + FF(1))); + tmp *= scaling_factor; + std::get<79>(evals) += tmp; + } + // Contribution 80 + { + Avm_DECLARE_VIEWS(80); + + auto tmp = ((avm_alu_div_rng_chk_selector * avm_alu_div_rng_chk_selector_shift) - avm_alu_op_div_std); + tmp *= scaling_factor; + std::get<80>(evals) += tmp; + } + // Contribution 81 + { + Avm_DECLARE_VIEWS(81); + + auto tmp = + (avm_alu_divisor_lo - (avm_alu_op_div_std * (((avm_alu_div_u16_r0 + (avm_alu_div_u16_r1 * FF(65536))) + + (avm_alu_div_u16_r2 * FF(4294967296UL))) + + (avm_alu_div_u16_r3 * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<81>(evals) += tmp; + } + // Contribution 82 + { + Avm_DECLARE_VIEWS(82); + + auto tmp = + (avm_alu_divisor_hi - (avm_alu_op_div_std * (((avm_alu_div_u16_r4 + (avm_alu_div_u16_r5 * FF(65536))) + + (avm_alu_div_u16_r6 * FF(4294967296UL))) + + (avm_alu_div_u16_r7 * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<82>(evals) += tmp; + } + // Contribution 83 + { + Avm_DECLARE_VIEWS(83); + + auto tmp = (avm_alu_quotient_lo - + (avm_alu_op_div_std * (((avm_alu_div_u16_r0_shift + (avm_alu_div_u16_r1_shift * FF(65536))) + + (avm_alu_div_u16_r2_shift * FF(4294967296UL))) + + (avm_alu_div_u16_r3_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<83>(evals) += tmp; + } + // Contribution 84 + { + Avm_DECLARE_VIEWS(84); + + auto tmp = (avm_alu_quotient_hi - + (avm_alu_op_div_std * (((avm_alu_div_u16_r4_shift + (avm_alu_div_u16_r5_shift * FF(65536))) + + (avm_alu_div_u16_r6_shift * FF(4294967296UL))) + + (avm_alu_div_u16_r7_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<84>(evals) += tmp; + } + // Contribution 85 + { + Avm_DECLARE_VIEWS(85); + + auto tmp = + (avm_alu_partial_prod_lo - + (avm_alu_op_div_std * + ((((avm_alu_u8_r0_shift + (avm_alu_u8_r1_shift * FF(256))) + (avm_alu_u16_r0_shift * FF(65536))) + + (avm_alu_u16_r1_shift * FF(4294967296UL))) + + (avm_alu_u16_r2_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<85>(evals) += tmp; + } + // Contribution 86 + { + Avm_DECLARE_VIEWS(86); + + auto tmp = (avm_alu_partial_prod_hi - + (avm_alu_op_div_std * (((avm_alu_u16_r3_shift + (avm_alu_u16_r4_shift * FF(65536))) + + (avm_alu_u16_r5_shift * FF(4294967296UL))) + + (avm_alu_u16_r6_shift * FF(281474976710656UL))))); + tmp *= scaling_factor; + std::get<86>(evals) += tmp; + } } }; diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp index 681210ee41d..27319832d00 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/avm_main.hpp @@ -131,7 +131,7 @@ template class avm_mainImpl { static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, + 3, 3, 5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 4, 4, 3, 3, 3, 3, 3, 4, 3, 3, 3, 2, }; template @@ -451,7 +451,8 @@ template class avm_mainImpl { { Avm_DECLARE_VIEWS(38); - auto tmp = (avm_main_sel_op_fdiv * (((avm_main_ib * avm_main_inv) - FF(1)) + avm_main_op_err)); + auto tmp = ((avm_main_sel_op_fdiv + avm_main_sel_op_div) * + (((avm_main_ib * avm_main_inv) - FF(1)) + avm_main_op_err)); tmp *= scaling_factor; std::get<38>(evals) += tmp; } @@ -459,7 +460,7 @@ template class avm_mainImpl { { Avm_DECLARE_VIEWS(39); - auto tmp = ((avm_main_sel_op_fdiv * avm_main_op_err) * (-avm_main_inv + FF(1))); + auto tmp = (((avm_main_sel_op_fdiv + avm_main_sel_op_div) * avm_main_op_err) * (-avm_main_inv + FF(1))); tmp *= scaling_factor; std::get<39>(evals) += tmp; } @@ -483,7 +484,7 @@ template class avm_mainImpl { { Avm_DECLARE_VIEWS(42); - auto tmp = (avm_main_op_err * (avm_main_sel_op_fdiv - FF(1))); + auto tmp = (avm_main_op_err * ((avm_main_sel_op_fdiv + avm_main_sel_op_div) - FF(1))); tmp *= scaling_factor; std::get<42>(evals) += tmp; } @@ -676,15 +677,16 @@ template class avm_mainImpl { auto tmp = (avm_main_alu_sel - - (((((((((((avm_main_sel_op_add + avm_main_sel_op_sub) + avm_main_sel_op_mul) + avm_main_sel_op_div) + - avm_main_sel_op_not) + - avm_main_sel_op_eq) + - avm_main_sel_op_lt) + - avm_main_sel_op_lte) + - avm_main_sel_op_shr) + - avm_main_sel_op_shl) + - avm_main_sel_op_cast) * - (-avm_main_tag_err + FF(1)))); + ((((((((((((avm_main_sel_op_add + avm_main_sel_op_sub) + avm_main_sel_op_mul) + avm_main_sel_op_div) + + avm_main_sel_op_not) + + avm_main_sel_op_eq) + + avm_main_sel_op_lt) + + avm_main_sel_op_lte) + + avm_main_sel_op_shr) + + avm_main_sel_op_shl) + + avm_main_sel_op_cast) * + (-avm_main_tag_err + FF(1))) * + (-avm_main_op_err + FF(1)))); tmp *= scaling_factor; std::get<64>(evals) += tmp; } diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp index 5225e83adfa..8b595c5ab27 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp @@ -14,6 +14,17 @@ [[maybe_unused]] auto avm_alu_clk = View(new_term.avm_alu_clk); \ [[maybe_unused]] auto avm_alu_cmp_rng_ctr = View(new_term.avm_alu_cmp_rng_ctr); \ [[maybe_unused]] auto avm_alu_cmp_sel = View(new_term.avm_alu_cmp_sel); \ + [[maybe_unused]] auto avm_alu_div_rng_chk_selector = View(new_term.avm_alu_div_rng_chk_selector); \ + [[maybe_unused]] auto avm_alu_div_u16_r0 = View(new_term.avm_alu_div_u16_r0); \ + [[maybe_unused]] auto avm_alu_div_u16_r1 = View(new_term.avm_alu_div_u16_r1); \ + [[maybe_unused]] auto avm_alu_div_u16_r2 = View(new_term.avm_alu_div_u16_r2); \ + [[maybe_unused]] auto avm_alu_div_u16_r3 = View(new_term.avm_alu_div_u16_r3); \ + [[maybe_unused]] auto avm_alu_div_u16_r4 = View(new_term.avm_alu_div_u16_r4); \ + [[maybe_unused]] auto avm_alu_div_u16_r5 = View(new_term.avm_alu_div_u16_r5); \ + [[maybe_unused]] auto avm_alu_div_u16_r6 = View(new_term.avm_alu_div_u16_r6); \ + [[maybe_unused]] auto avm_alu_div_u16_r7 = View(new_term.avm_alu_div_u16_r7); \ + [[maybe_unused]] auto avm_alu_divisor_hi = View(new_term.avm_alu_divisor_hi); \ + [[maybe_unused]] auto avm_alu_divisor_lo = View(new_term.avm_alu_divisor_lo); \ [[maybe_unused]] auto avm_alu_ff_tag = View(new_term.avm_alu_ff_tag); \ [[maybe_unused]] auto avm_alu_ia = View(new_term.avm_alu_ia); \ [[maybe_unused]] auto avm_alu_ib = View(new_term.avm_alu_ib); \ @@ -23,6 +34,8 @@ [[maybe_unused]] auto avm_alu_op_cast = View(new_term.avm_alu_op_cast); \ [[maybe_unused]] auto avm_alu_op_cast_prev = View(new_term.avm_alu_op_cast_prev); \ [[maybe_unused]] auto avm_alu_op_div = View(new_term.avm_alu_op_div); \ + [[maybe_unused]] auto avm_alu_op_div_a_lt_b = View(new_term.avm_alu_op_div_a_lt_b); \ + [[maybe_unused]] auto avm_alu_op_div_std = View(new_term.avm_alu_op_div_std); \ [[maybe_unused]] auto avm_alu_op_eq = View(new_term.avm_alu_op_eq); \ [[maybe_unused]] auto avm_alu_op_eq_diff_inv = View(new_term.avm_alu_op_eq_diff_inv); \ [[maybe_unused]] auto avm_alu_op_lt = View(new_term.avm_alu_op_lt); \ @@ -38,6 +51,11 @@ [[maybe_unused]] auto avm_alu_p_sub_a_lo = View(new_term.avm_alu_p_sub_a_lo); \ [[maybe_unused]] auto avm_alu_p_sub_b_hi = View(new_term.avm_alu_p_sub_b_hi); \ [[maybe_unused]] auto avm_alu_p_sub_b_lo = View(new_term.avm_alu_p_sub_b_lo); \ + [[maybe_unused]] auto avm_alu_partial_prod_hi = View(new_term.avm_alu_partial_prod_hi); \ + [[maybe_unused]] auto avm_alu_partial_prod_lo = View(new_term.avm_alu_partial_prod_lo); \ + [[maybe_unused]] auto avm_alu_quotient_hi = View(new_term.avm_alu_quotient_hi); \ + [[maybe_unused]] auto avm_alu_quotient_lo = View(new_term.avm_alu_quotient_lo); \ + [[maybe_unused]] auto avm_alu_remainder = View(new_term.avm_alu_remainder); \ [[maybe_unused]] auto avm_alu_res_hi = View(new_term.avm_alu_res_hi); \ [[maybe_unused]] auto avm_alu_res_lo = View(new_term.avm_alu_res_lo); \ [[maybe_unused]] auto avm_alu_rng_chk_lookup_selector = View(new_term.avm_alu_rng_chk_lookup_selector); \ @@ -215,6 +233,14 @@ [[maybe_unused]] auto lookup_u16_12 = View(new_term.lookup_u16_12); \ [[maybe_unused]] auto lookup_u16_13 = View(new_term.lookup_u16_13); \ [[maybe_unused]] auto lookup_u16_14 = View(new_term.lookup_u16_14); \ + [[maybe_unused]] auto lookup_div_u16_0 = View(new_term.lookup_div_u16_0); \ + [[maybe_unused]] auto lookup_div_u16_1 = View(new_term.lookup_div_u16_1); \ + [[maybe_unused]] auto lookup_div_u16_2 = View(new_term.lookup_div_u16_2); \ + [[maybe_unused]] auto lookup_div_u16_3 = View(new_term.lookup_div_u16_3); \ + [[maybe_unused]] auto lookup_div_u16_4 = View(new_term.lookup_div_u16_4); \ + [[maybe_unused]] auto lookup_div_u16_5 = View(new_term.lookup_div_u16_5); \ + [[maybe_unused]] auto lookup_div_u16_6 = View(new_term.lookup_div_u16_6); \ + [[maybe_unused]] auto lookup_div_u16_7 = View(new_term.lookup_div_u16_7); \ [[maybe_unused]] auto lookup_byte_lengths_counts = View(new_term.lookup_byte_lengths_counts); \ [[maybe_unused]] auto lookup_byte_operations_counts = View(new_term.lookup_byte_operations_counts); \ [[maybe_unused]] auto incl_main_tag_err_counts = View(new_term.incl_main_tag_err_counts); \ @@ -240,6 +266,14 @@ [[maybe_unused]] auto lookup_u16_12_counts = View(new_term.lookup_u16_12_counts); \ [[maybe_unused]] auto lookup_u16_13_counts = View(new_term.lookup_u16_13_counts); \ [[maybe_unused]] auto lookup_u16_14_counts = View(new_term.lookup_u16_14_counts); \ + [[maybe_unused]] auto lookup_div_u16_0_counts = View(new_term.lookup_div_u16_0_counts); \ + [[maybe_unused]] auto lookup_div_u16_1_counts = View(new_term.lookup_div_u16_1_counts); \ + [[maybe_unused]] auto lookup_div_u16_2_counts = View(new_term.lookup_div_u16_2_counts); \ + [[maybe_unused]] auto lookup_div_u16_3_counts = View(new_term.lookup_div_u16_3_counts); \ + [[maybe_unused]] auto lookup_div_u16_4_counts = View(new_term.lookup_div_u16_4_counts); \ + [[maybe_unused]] auto lookup_div_u16_5_counts = View(new_term.lookup_div_u16_5_counts); \ + [[maybe_unused]] auto lookup_div_u16_6_counts = View(new_term.lookup_div_u16_6_counts); \ + [[maybe_unused]] auto lookup_div_u16_7_counts = View(new_term.lookup_div_u16_7_counts); \ [[maybe_unused]] auto avm_alu_a_hi_shift = View(new_term.avm_alu_a_hi_shift); \ [[maybe_unused]] auto avm_alu_a_lo_shift = View(new_term.avm_alu_a_lo_shift); \ [[maybe_unused]] auto avm_alu_alu_sel_shift = View(new_term.avm_alu_alu_sel_shift); \ @@ -247,9 +281,19 @@ [[maybe_unused]] auto avm_alu_b_lo_shift = View(new_term.avm_alu_b_lo_shift); \ [[maybe_unused]] auto avm_alu_cmp_rng_ctr_shift = View(new_term.avm_alu_cmp_rng_ctr_shift); \ [[maybe_unused]] auto avm_alu_cmp_sel_shift = View(new_term.avm_alu_cmp_sel_shift); \ + [[maybe_unused]] auto avm_alu_div_rng_chk_selector_shift = View(new_term.avm_alu_div_rng_chk_selector_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r0_shift = View(new_term.avm_alu_div_u16_r0_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r1_shift = View(new_term.avm_alu_div_u16_r1_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r2_shift = View(new_term.avm_alu_div_u16_r2_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r3_shift = View(new_term.avm_alu_div_u16_r3_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r4_shift = View(new_term.avm_alu_div_u16_r4_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r5_shift = View(new_term.avm_alu_div_u16_r5_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r6_shift = View(new_term.avm_alu_div_u16_r6_shift); \ + [[maybe_unused]] auto avm_alu_div_u16_r7_shift = View(new_term.avm_alu_div_u16_r7_shift); \ [[maybe_unused]] auto avm_alu_op_add_shift = View(new_term.avm_alu_op_add_shift); \ [[maybe_unused]] auto avm_alu_op_cast_prev_shift = View(new_term.avm_alu_op_cast_prev_shift); \ [[maybe_unused]] auto avm_alu_op_cast_shift = View(new_term.avm_alu_op_cast_shift); \ + [[maybe_unused]] auto avm_alu_op_div_shift = View(new_term.avm_alu_op_div_shift); \ [[maybe_unused]] auto avm_alu_op_mul_shift = View(new_term.avm_alu_op_mul_shift); \ [[maybe_unused]] auto avm_alu_op_shl_shift = View(new_term.avm_alu_op_shl_shift); \ [[maybe_unused]] auto avm_alu_op_shr_shift = View(new_term.avm_alu_op_shr_shift); \ diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp new file mode 100644 index 00000000000..67284e42972 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_0.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_0_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_0, + in.lookup_div_u16_0_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r0, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_0, + in.lookup_div_u16_0_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r0, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_0_relation = GenericLookupRelation; +template using lookup_div_u16_0 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp new file mode 100644 index 00000000000..38c6fd614f8 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_1.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_1_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_1, + in.lookup_div_u16_1_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r1, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_1, + in.lookup_div_u16_1_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r1, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_1_relation = GenericLookupRelation; +template using lookup_div_u16_1 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp new file mode 100644 index 00000000000..36c347a5ba9 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_2.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_2_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_2, + in.lookup_div_u16_2_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r2, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_2, + in.lookup_div_u16_2_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r2, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_2_relation = GenericLookupRelation; +template using lookup_div_u16_2 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp new file mode 100644 index 00000000000..e167bae69bb --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_3.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_3_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_3, + in.lookup_div_u16_3_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r3, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_3, + in.lookup_div_u16_3_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r3, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_3_relation = GenericLookupRelation; +template using lookup_div_u16_3 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp new file mode 100644 index 00000000000..6248bc098d6 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_4.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_4_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_4, + in.lookup_div_u16_4_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r4, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_4, + in.lookup_div_u16_4_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r4, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_4_relation = GenericLookupRelation; +template using lookup_div_u16_4 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp new file mode 100644 index 00000000000..052eafcaa3b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_5.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_5_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_5, + in.lookup_div_u16_5_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r5, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_5, + in.lookup_div_u16_5_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r5, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_5_relation = GenericLookupRelation; +template using lookup_div_u16_5 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp new file mode 100644 index 00000000000..c52d71bdb99 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_6.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_6_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_6, + in.lookup_div_u16_6_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r6, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_6, + in.lookup_div_u16_6_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r6, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_6_relation = GenericLookupRelation; +template using lookup_div_u16_6 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp new file mode 100644 index 00000000000..dde1e6f54b4 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/lookup_div_u16_7.hpp @@ -0,0 +1,166 @@ + + +#pragma once + +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" + +#include +#include + +namespace bb { + +/** + * @brief This class contains an example of how to set LookupSettings classes used by the + * GenericLookupRelationImpl class to specify a scaled lookup + * + * @details To create your own lookup: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your lookup + * 3) Update "DECLARE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_LOOKUP_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to + * include the new settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class lookup_div_u16_7_lookup_settings { + public: + /** + * @brief The number of read terms (how many lookups we perform) in each row + * + */ + static constexpr size_t READ_TERMS = 1; + /** + * @brief The number of write terms (how many additions to the lookup table we make) in each row + * + */ + static constexpr size_t WRITE_TERMS = 1; + + /** + * @brief The type of READ_TERM used for each read index (basic and scaled) + * + */ + static constexpr size_t READ_TERM_TYPES[READ_TERMS] = { 0 }; + + /** + * @brief They type of WRITE_TERM used for each write index + * + */ + static constexpr size_t WRITE_TERM_TYPES[WRITE_TERMS] = { 0 }; + + /** + * @brief How many values represent a single lookup object. This value is used by the automatic read term + * implementation in the relation in case the lookup is a basic or scaled tuple and in the write term if it's a + * basic tuple + * + */ + static constexpr size_t LOOKUP_TUPLE_SIZE = 1; + + /** + * @brief The polynomial degree of the relation telling us if the inverse polynomial value needs to be computed + * + */ + static constexpr size_t INVERSE_EXISTS_POLYNOMIAL_DEGREE = 4; + + /** + * @brief The degree of the read term if implemented arbitrarily. This value is not used by basic and scaled read + * terms, but will cause compilation error if not defined + * + */ + static constexpr size_t READ_TERM_DEGREE = 0; + + /** + * @brief The degree of the write term if implemented arbitrarily. This value is not used by the basic write + * term, but will cause compilation error if not defined + * + */ + + static constexpr size_t WRITE_TERM_DEGREE = 0; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial exists at this index. + * Otherwise the value needs to be set to zero. + * + * @details If this is true then the lookup takes place in this row + * + */ + + template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.avm_alu_div_rng_chk_selector == 1 || in.avm_main_sel_rng_16 == 1); + } + + /** + * @brief Subprocedure for computing the value deciding if the inverse polynomial value needs to be checked in this + * row + * + * @tparam Accumulator Type specified by the lookup relation + * @tparam AllEntities Values/Univariates of all entities row + * @param in Value/Univariate of all entities at row/edge + * @return Accumulator + */ + + template + static inline auto compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + const auto is_operation = View(in.avm_alu_div_rng_chk_selector); + const auto is_table_entry = View(in.avm_main_sel_rng_16); + return (is_operation + is_table_entry - is_operation * is_table_entry); + } + + /** + * @brief Get all the entities for the lookup when need to update them + * + * @details The generic structure of this tuple is described in ./generic_lookup_relation.hpp . The following is + description for the current case: + The entities are returned as a tuple of references in the following order (this is for ): + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that specifies how many times the lookup table entry at this row has been looked up + * - READ_TERMS entities/polynomials that enable individual lookup operations + * - The entity/polynomial that enables adding an entry to the lookup table in this row + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the basic tuple being looked up as the first read term + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the previous accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the shifts in the second read term (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing the current accumulators in the second read term + (scaled tuple) + * - LOOKUP_TUPLE_SIZE entities/polynomials representing basic tuples added to the table + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_7, + in.lookup_div_u16_7_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r7, + in.avm_main_clk); + } + + /** + * @brief Get all the entities for the lookup when we only need to read them + * @details Same as in get_const_entities, but nonconst + * + * @return All the entities needed for the lookup + */ + + template static inline auto get_nonconst_entities(AllEntities& in) + { + + return std::forward_as_tuple(in.lookup_div_u16_7, + in.lookup_div_u16_7_counts, + in.avm_alu_div_rng_chk_selector, + in.avm_main_sel_rng_16, + in.avm_alu_div_u16_r7, + in.avm_main_clk); + } +}; + +template using lookup_div_u16_7_relation = GenericLookupRelation; +template using lookup_div_u16_7 = GenericLookup; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp index 497d4143f44..9a055c79aba 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.cpp @@ -1,4 +1,5 @@ #include "avm_alu_trace.hpp" +#include "barretenberg/numeric/uint256/uint256.hpp" namespace bb::avm_trace { @@ -50,7 +51,7 @@ bool AvmAluTraceBuilder::is_range_check_required() const bool AvmAluTraceBuilder::is_alu_row_enabled(AvmAluTraceBuilder::AluTraceEntry const& r) { return (r.alu_op_add || r.alu_op_sub || r.alu_op_mul || r.alu_op_eq || r.alu_op_not || r.alu_op_lt || - r.alu_op_lte || r.alu_op_shr || r.alu_op_shl || r.alu_op_cast); + r.alu_op_lte || r.alu_op_shr || r.alu_op_shl || r.alu_op_cast || r.alu_op_div); } /** @@ -468,11 +469,11 @@ std::tuple> AvmAluTraceBuilder::to_al } /** - * @brief This is a helper function that is used to generate the range check entries for the comparison operation - * (LT/LTE opcodes). This additionally increments the counts for the corresponding range lookups entries. + * @brief This is a helper function that is used to generate the range check entries for operations that require + * multi-row range checks This additionally increments the counts for the corresponding range lookups entries. * @param row The initial row where the comparison operation was performed * @param hi_lo_limbs The vector of 128-bit limbs hi and lo pairs of limbs that will be range checked. - * @return A vector of AluTraceEntry rows for the range checks for the comparison operation. + * @return A vector of AluTraceEntry rows for the range checks for the operation. */ std::vector AvmAluTraceBuilder::cmp_range_check_helper( AvmAluTraceBuilder::AluTraceEntry row, std::vector hi_lo_limbs) @@ -544,7 +545,7 @@ std::tuple gt_witness(uint256_t const& a, uint256_t // where q = 1 if a > b and q = 0 if a <= b std::tuple gt_or_lte_witness(uint256_t const& a, uint256_t const& b) { - uint256_t two_pow_128 = uint256_t(1) << uint256_t(128); + uint256_t two_pow_126 = uint256_t(1) << uint256_t(128); auto [a_lo, a_hi] = decompose(a, 128); auto [b_lo, b_hi] = decompose(b, 128); bool isGT = a > b; @@ -553,7 +554,7 @@ std::tuple gt_or_lte_witness(uint256_t const& a, uin } bool borrow = b_lo < a_lo; auto borrow_u256 = uint256_t(static_cast(borrow)); - uint256_t r_lo = b_lo - a_lo + borrow_u256 * two_pow_128; + uint256_t r_lo = b_lo - a_lo + borrow_u256 * two_pow_126; uint256_t r_hi = b_hi - a_hi - borrow_u256; return std::make_tuple(r_lo, r_hi, borrow); } @@ -963,4 +964,102 @@ FF AvmAluTraceBuilder::op_shl(FF const& a, FF const& b, AvmMemoryTag in_tag, uin }); return c; } +FF AvmAluTraceBuilder::op_div(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk) +{ + uint256_t a_u256{ a }; + uint256_t b_u256{ b }; + uint256_t c_u256 = a_u256 / b_u256; + uint256_t rem_u256 = a_u256 % b_u256; + + // If dividing by zero, don't add any rows in the ALU, the error will be handled in the main trace + if (b_u256 == 0) { + return 0; + } + + if (a_u256 < b_u256) { + // If a < b, the result is trivially 0 + uint256_t rng_chk_lo = b_u256 - a_u256 - 1; + auto [u8_r0, u8_r1, u16_reg] = to_alu_slice_registers(rng_chk_lo); + alu_trace.push_back(AvmAluTraceBuilder::AluTraceEntry({ + .alu_clk = clk, + .alu_op_div = true, + .alu_u8_tag = in_tag == AvmMemoryTag::U8, + .alu_u16_tag = in_tag == AvmMemoryTag::U16, + .alu_u32_tag = in_tag == AvmMemoryTag::U32, + .alu_u64_tag = in_tag == AvmMemoryTag::U64, + .alu_u128_tag = in_tag == AvmMemoryTag::U128, + .alu_ia = a, + .alu_ib = b, + .alu_ic = 0, + .alu_u8_r0 = u8_r0, + .alu_u8_r1 = u8_r1, + .alu_u16_reg = u16_reg, + .hi_lo_limbs = { rng_chk_lo, 0, 0, 0, 0, 0 }, + .remainder = a, + + })); + return 0; + } + // Decompose a and primality check that b*c < p when a is a 256-bit integer + auto [a_lo, a_hi] = decompose(b_u256 * c_u256, 128); + auto [p_sub_a_lo, p_sub_a_hi, p_a_borrow] = gt_witness(FF::modulus, b_u256 * c_u256); + // Decompose the divisor + auto [divisor_lo, divisor_hi] = decompose(b_u256, 64); + // Decompose the quotient + auto [quotient_lo, quotient_hi] = decompose(c_u256, 64); + uint256_t partial_prod = divisor_lo * quotient_hi + divisor_hi * quotient_lo; + // Decompose the partial product + auto [partial_prod_lo, partial_prod_hi] = decompose(partial_prod, 64); + + FF b_hi = b_u256 - rem_u256 - 1; + + // 64 bit range checks for the divisor and quotient limbs + // Spread over two rows + std::array div_u64_rng_chk; + std::array div_u64_rng_chk_shifted; + for (size_t i = 0; i < 4; i++) { + div_u64_rng_chk.at(i) = uint16_t(divisor_lo >> (16 * i)); + div_u64_rng_chk.at(i + 4) = uint16_t(divisor_hi >> (16 * i)); + div_u64_range_chk_counters[i][uint16_t(divisor_lo >> (16 * i))]++; + div_u64_range_chk_counters[i + 4][uint16_t(divisor_hi >> (16 * i))]++; + + div_u64_rng_chk_shifted.at(i) = uint16_t(quotient_lo >> (16 * i)); + div_u64_rng_chk_shifted.at(i + 4) = uint16_t(quotient_hi >> (16 * i)); + div_u64_range_chk_counters[i][uint16_t(quotient_lo >> (16 * i))]++; + div_u64_range_chk_counters[i + 4][uint16_t(quotient_hi >> (16 * i))]++; + } + + // Each hi and lo limb is range checked over 128 bits + // Load the range check values into the ALU registers + auto hi_lo_limbs = std::vector{ a_lo, a_hi, partial_prod, b_hi, p_sub_a_lo, p_sub_a_hi }; + AvmAluTraceBuilder::AluTraceEntry row{ + .alu_clk = clk, + .alu_op_div = true, + .alu_u8_tag = in_tag == AvmMemoryTag::U8, + .alu_u16_tag = in_tag == AvmMemoryTag::U16, + .alu_u32_tag = in_tag == AvmMemoryTag::U32, + .alu_u64_tag = in_tag == AvmMemoryTag::U64, + .alu_u128_tag = in_tag == AvmMemoryTag::U128, + .alu_ia = a, + .alu_ib = b, + .alu_ic = FF{ c_u256 }, + .remainder = rem_u256, + .divisor_lo = divisor_lo, + .divisor_hi = divisor_hi, + .quotient_lo = quotient_lo, + .quotient_hi = quotient_hi, + .partial_prod_lo = partial_prod_lo, + .partial_prod_hi = partial_prod_hi, + .div_u64_range_chk_sel = true, + .div_u64_range_chk = div_u64_rng_chk, + + }; + // We perform the range checks here + std::vector rows = cmp_range_check_helper(row, hi_lo_limbs); + // Add the range checks for the quotient limbs in the row after the division operation + rows.at(1).div_u64_range_chk = div_u64_rng_chk_shifted; + rows.at(1).div_u64_range_chk_sel = true; + alu_trace.insert(alu_trace.end(), rows.begin(), rows.end()); + return c_u256; +} } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp index e01e8e53b4b..42d2a550fea 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_alu_trace.hpp @@ -21,6 +21,7 @@ class AvmAluTraceBuilder { bool alu_op_cast_prev = false; bool alu_op_shr = false; bool alu_op_shl = false; + bool alu_op_div = false; bool alu_ff_tag = false; bool alu_u8_tag = false; @@ -55,11 +56,25 @@ class AvmAluTraceBuilder { uint8_t mem_tag_bits = 0; uint8_t mem_tag_sub_shift = 0; bool shift_lt_bit_len = true; + FF quot_div_rem_lo{}; + FF quot_div_rem_hi{}; + + // Div Operations + FF remainder{}; + FF divisor_lo{}; + FF divisor_hi{}; + FF quotient_lo{}; + FF quotient_hi{}; + FF partial_prod_lo{}; + FF partial_prod_hi{}; + bool div_u64_range_chk_sel = false; + std::array div_u64_range_chk{}; }; std::array, 2> u8_range_chk_counters; std::array, 2> u8_pow_2_counters; std::array, 15> u16_range_chk_counters; + std::array, 8> div_u64_range_chk_counters; AvmAluTraceBuilder(); void reset(); @@ -75,6 +90,7 @@ class AvmAluTraceBuilder { FF op_cast(FF const& a, AvmMemoryTag in_tag, uint32_t clk); FF op_shr(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk); FF op_shl(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk); + FF op_div(FF const& a, FF const& b, AvmMemoryTag in_tag, uint32_t clk); bool is_range_check_required() const; static bool is_alu_row_enabled(AvmAluTraceBuilder::AluTraceEntry const& r); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp index 7cf6154fac8..c472af776a3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp @@ -110,6 +110,13 @@ std::vector Execution::gen_trace(std::vector const& instructio std::get(inst.operands.at(2)), std::get(inst.operands.at(3))); break; + case OpCode::DIV: + trace_builder.op_div(std::get(inst.operands.at(0)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4)), + std::get(inst.operands.at(1))); + break; // Compute - Comparators case OpCode::EQ: trace_builder.op_eq(std::get(inst.operands.at(0)), diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp index 39f54fe80b8..6ec6782aa6a 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp @@ -1118,6 +1118,84 @@ void AvmTraceBuilder::op_cast(uint8_t indirect, uint32_t a_offset, uint32_t dst_ .avm_main_w_in_tag = FF(static_cast(dst_tag)), }); } +/** + * @brief Integer division with direct or indirect memory access. + * + * @param indirect A byte encoding information about indirect/direct memory access. + * @param a_offset An index in memory pointing to the first operand of the division. + * @param b_offset An index in memory pointing to the second operand of the division. + * @param dst_offset An index in memory pointing to the output of the division. + * @param in_tag The instruction memory tag of the operands. + */ +void AvmTraceBuilder::op_div( + uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t dst_offset, AvmMemoryTag in_tag) +{ + auto clk = static_cast(main_trace.size()); + + auto const res = resolve_ind_three(clk, indirect, a_offset, b_offset, dst_offset); + bool tag_match = res.tag_match; + + // Reading from memory and loading into ia resp. ib. + auto read_a = + mem_trace_builder.read_and_load_from_memory(clk, IntermRegister::IA, res.direct_a_offset, in_tag, in_tag); + auto read_b = + mem_trace_builder.read_and_load_from_memory(clk, IntermRegister::IB, res.direct_b_offset, in_tag, in_tag); + tag_match = read_a.tag_match && read_b.tag_match; + + // a / b = c + FF a = read_a.val; + FF b = read_b.val; + + // In case of a memory tag error, we do not perform the computation. + // Therefore, we do not create any entry in ALU table and store the value 0 as + // output (c) in memory. + FF c; + FF inv; + FF error; + + if (!b.is_zero()) { + // If b is not zero, we prove it is not by providing its inverse as well + inv = b.invert(); + c = tag_match ? alu_trace_builder.op_div(a, b, in_tag, clk) : FF(0); + error = 0; + } else { + inv = 1; + c = 0; + error = 1; + } + + // Write into memory value c from intermediate register ic. + mem_trace_builder.write_into_memory(clk, IntermRegister::IC, res.direct_c_offset, c, in_tag, in_tag); + + main_trace.push_back(Row{ + .avm_main_clk = clk, + .avm_main_alu_in_tag = FF(static_cast(in_tag)), + .avm_main_ia = a, + .avm_main_ib = b, + .avm_main_ic = c, + .avm_main_ind_a = res.indirect_flag_a ? FF(a_offset) : FF(0), + .avm_main_ind_b = res.indirect_flag_b ? FF(b_offset) : FF(0), + .avm_main_ind_c = res.indirect_flag_c ? FF(dst_offset) : FF(0), + .avm_main_ind_op_a = FF(static_cast(res.indirect_flag_a)), + .avm_main_ind_op_b = FF(static_cast(res.indirect_flag_b)), + .avm_main_ind_op_c = FF(static_cast(res.indirect_flag_c)), + .avm_main_internal_return_ptr = FF(internal_return_ptr), + .avm_main_inv = tag_match ? inv : FF(1), + .avm_main_mem_idx_a = FF(res.direct_a_offset), + .avm_main_mem_idx_b = FF(res.direct_b_offset), + .avm_main_mem_idx_c = FF(res.direct_c_offset), + .avm_main_mem_op_a = FF(1), + .avm_main_mem_op_b = FF(1), + .avm_main_mem_op_c = FF(1), + .avm_main_op_err = tag_match ? error : FF(1), + .avm_main_pc = FF(pc++), + .avm_main_r_in_tag = FF(static_cast(in_tag)), + .avm_main_rwc = FF(1), + .avm_main_sel_op_div = FF(1), + .avm_main_tag_err = FF(static_cast(!tag_match)), + .avm_main_w_in_tag = FF(static_cast(in_tag)), + }); +} /** * @brief CALLDATACOPY opcode with direct or indirect memory access, i.e., @@ -1657,6 +1735,7 @@ std::vector AvmTraceBuilder::finalize() dest.avm_alu_rng_chk_sel = FF(static_cast(src.rng_chk_sel)); dest.avm_alu_op_shr = FF(static_cast(src.alu_op_shr)); dest.avm_alu_op_shl = FF(static_cast(src.alu_op_shl)); + dest.avm_alu_op_div = FF(static_cast(src.alu_op_div)); dest.avm_alu_ff_tag = FF(static_cast(src.alu_ff_tag)); dest.avm_alu_u8_tag = FF(static_cast(src.alu_u8_tag)); @@ -1694,6 +1773,15 @@ std::vector AvmTraceBuilder::finalize() dest.avm_alu_u16_r13 = FF(src.alu_u16_reg.at(13)); dest.avm_alu_u16_r14 = FF(src.alu_u16_reg.at(14)); + dest.avm_alu_div_rng_chk_selector = FF(static_cast(src.div_u64_range_chk_sel)); + dest.avm_alu_div_u16_r0 = FF(src.div_u64_range_chk.at(0)); + dest.avm_alu_div_u16_r1 = FF(src.div_u64_range_chk.at(1)); + dest.avm_alu_div_u16_r2 = FF(src.div_u64_range_chk.at(2)); + dest.avm_alu_div_u16_r3 = FF(src.div_u64_range_chk.at(3)); + dest.avm_alu_div_u16_r4 = FF(src.div_u64_range_chk.at(4)); + dest.avm_alu_div_u16_r5 = FF(src.div_u64_range_chk.at(5)); + dest.avm_alu_div_u16_r6 = FF(src.div_u64_range_chk.at(6)); + dest.avm_alu_div_u16_r7 = FF(src.div_u64_range_chk.at(7)); dest.avm_alu_op_eq_diff_inv = FF(src.alu_op_eq_diff_inv); // Not all rows in ALU are enabled with a selector. For instance, @@ -1716,10 +1804,27 @@ std::vector AvmTraceBuilder::finalize() dest.avm_alu_p_a_borrow = FF(static_cast(src.p_a_borrow)); dest.avm_alu_p_b_borrow = FF(static_cast(src.p_b_borrow)); dest.avm_alu_borrow = FF(static_cast(src.borrow)); - dest.avm_alu_rng_chk_sel = FF(static_cast(src.rng_chk_sel)); dest.avm_alu_cmp_rng_ctr = FF(static_cast(src.cmp_rng_ctr)); dest.avm_alu_rng_chk_lookup_selector = FF(1); } + if (dest.avm_alu_op_div == FF(1)) { + dest.avm_alu_op_div_std = uint256_t(src.alu_ia) >= uint256_t(src.alu_ib); + dest.avm_alu_op_div_a_lt_b = uint256_t(src.alu_ia) < uint256_t(src.alu_ib); + dest.avm_alu_rng_chk_lookup_selector = FF(1); + dest.avm_alu_a_lo = FF(src.hi_lo_limbs.at(0)); + dest.avm_alu_a_hi = FF(src.hi_lo_limbs.at(1)); + dest.avm_alu_b_lo = FF(src.hi_lo_limbs.at(2)); + dest.avm_alu_b_hi = FF(src.hi_lo_limbs.at(3)); + dest.avm_alu_p_sub_a_lo = FF(src.hi_lo_limbs.at(4)); + dest.avm_alu_p_sub_a_hi = FF(src.hi_lo_limbs.at(5)); + dest.avm_alu_remainder = src.remainder; + dest.avm_alu_divisor_lo = src.divisor_lo; + dest.avm_alu_divisor_hi = src.divisor_hi; + dest.avm_alu_quotient_lo = src.quotient_lo; + dest.avm_alu_quotient_hi = src.quotient_hi; + dest.avm_alu_partial_prod_lo = src.partial_prod_lo; + dest.avm_alu_partial_prod_hi = src.partial_prod_hi; + } if (dest.avm_alu_op_add == FF(1) || dest.avm_alu_op_sub == FF(1) || dest.avm_alu_op_mul == FF(1)) { dest.avm_alu_rng_chk_lookup_selector = FF(1); @@ -1763,8 +1868,8 @@ std::vector AvmTraceBuilder::finalize() if ((r.avm_main_sel_op_add == FF(1) || r.avm_main_sel_op_sub == FF(1) || r.avm_main_sel_op_mul == FF(1) || r.avm_main_sel_op_eq == FF(1) || r.avm_main_sel_op_not == FF(1) || r.avm_main_sel_op_lt == FF(1) || r.avm_main_sel_op_lte == FF(1) || r.avm_main_sel_op_cast == FF(1) || r.avm_main_sel_op_shr == FF(1) || - r.avm_main_sel_op_shl == FF(1)) && - r.avm_main_tag_err == FF(0)) { + r.avm_main_sel_op_shl == FF(1) || r.avm_main_sel_op_div == FF(1)) && + r.avm_main_tag_err == FF(0) && r.avm_main_op_err == FF(0)) { r.avm_main_alu_sel = FF(1); } @@ -1800,6 +1905,15 @@ std::vector AvmTraceBuilder::finalize() r.lookup_mem_rng_chk_hi_counts = mem_rng_check_hi_counts[static_cast(i)]; r.lookup_mem_rng_chk_lo_counts = mem_rng_check_lo_counts[static_cast(i)]; + r.lookup_div_u16_0_counts = alu_trace_builder.div_u64_range_chk_counters[0][static_cast(i)]; + r.lookup_div_u16_1_counts = alu_trace_builder.div_u64_range_chk_counters[1][static_cast(i)]; + r.lookup_div_u16_2_counts = alu_trace_builder.div_u64_range_chk_counters[2][static_cast(i)]; + r.lookup_div_u16_3_counts = alu_trace_builder.div_u64_range_chk_counters[3][static_cast(i)]; + r.lookup_div_u16_4_counts = alu_trace_builder.div_u64_range_chk_counters[4][static_cast(i)]; + r.lookup_div_u16_5_counts = alu_trace_builder.div_u64_range_chk_counters[5][static_cast(i)]; + r.lookup_div_u16_6_counts = alu_trace_builder.div_u64_range_chk_counters[6][static_cast(i)]; + r.lookup_div_u16_7_counts = alu_trace_builder.div_u64_range_chk_counters[7][static_cast(i)]; + r.avm_main_clk = FF(static_cast(i)); r.avm_main_sel_rng_16 = FF(1); } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp index d6ba959df17..88b3ced5578 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp @@ -82,6 +82,9 @@ class AvmTraceBuilder { // store the result in address given by dst_offset. void op_cast(uint8_t indirect, uint32_t a_offset, uint32_t dst_offset, AvmMemoryTag dst_tag); + // Integer Division with direct or indirect memory access. + void op_div(uint8_t indirect, uint32_t a_offset, uint32_t b_offset, uint32_t dst_offset, AvmMemoryTag in_tag); + // Jump to a given program counter. void jump(uint32_t jmp_dest); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp index 8e46d8e00f4..f8b40095206 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp @@ -19,6 +19,14 @@ #include "barretenberg/relations/generated/avm/incl_mem_tag_err.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_lengths.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_operations.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_0.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_1.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_2.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_3.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_4.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_5.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_6.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_7.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_hi.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_lo.hpp" #include "barretenberg/relations/generated/avm/lookup_pow_2_0.hpp" @@ -67,6 +75,17 @@ template struct AvmFullRow { FF avm_alu_clk{}; FF avm_alu_cmp_rng_ctr{}; FF avm_alu_cmp_sel{}; + FF avm_alu_div_rng_chk_selector{}; + FF avm_alu_div_u16_r0{}; + FF avm_alu_div_u16_r1{}; + FF avm_alu_div_u16_r2{}; + FF avm_alu_div_u16_r3{}; + FF avm_alu_div_u16_r4{}; + FF avm_alu_div_u16_r5{}; + FF avm_alu_div_u16_r6{}; + FF avm_alu_div_u16_r7{}; + FF avm_alu_divisor_hi{}; + FF avm_alu_divisor_lo{}; FF avm_alu_ff_tag{}; FF avm_alu_ia{}; FF avm_alu_ib{}; @@ -76,6 +95,8 @@ template struct AvmFullRow { FF avm_alu_op_cast{}; FF avm_alu_op_cast_prev{}; FF avm_alu_op_div{}; + FF avm_alu_op_div_a_lt_b{}; + FF avm_alu_op_div_std{}; FF avm_alu_op_eq{}; FF avm_alu_op_eq_diff_inv{}; FF avm_alu_op_lt{}; @@ -91,6 +112,11 @@ template struct AvmFullRow { FF avm_alu_p_sub_a_lo{}; FF avm_alu_p_sub_b_hi{}; FF avm_alu_p_sub_b_lo{}; + FF avm_alu_partial_prod_hi{}; + FF avm_alu_partial_prod_lo{}; + FF avm_alu_quotient_hi{}; + FF avm_alu_quotient_lo{}; + FF avm_alu_remainder{}; FF avm_alu_res_hi{}; FF avm_alu_res_lo{}; FF avm_alu_rng_chk_lookup_selector{}; @@ -268,6 +294,14 @@ template struct AvmFullRow { FF lookup_u16_12{}; FF lookup_u16_13{}; FF lookup_u16_14{}; + FF lookup_div_u16_0{}; + FF lookup_div_u16_1{}; + FF lookup_div_u16_2{}; + FF lookup_div_u16_3{}; + FF lookup_div_u16_4{}; + FF lookup_div_u16_5{}; + FF lookup_div_u16_6{}; + FF lookup_div_u16_7{}; FF lookup_byte_lengths_counts{}; FF lookup_byte_operations_counts{}; FF incl_main_tag_err_counts{}; @@ -293,6 +327,14 @@ template struct AvmFullRow { FF lookup_u16_12_counts{}; FF lookup_u16_13_counts{}; FF lookup_u16_14_counts{}; + FF lookup_div_u16_0_counts{}; + FF lookup_div_u16_1_counts{}; + FF lookup_div_u16_2_counts{}; + FF lookup_div_u16_3_counts{}; + FF lookup_div_u16_4_counts{}; + FF lookup_div_u16_5_counts{}; + FF lookup_div_u16_6_counts{}; + FF lookup_div_u16_7_counts{}; FF avm_alu_a_hi_shift{}; FF avm_alu_a_lo_shift{}; FF avm_alu_alu_sel_shift{}; @@ -300,9 +342,19 @@ template struct AvmFullRow { FF avm_alu_b_lo_shift{}; FF avm_alu_cmp_rng_ctr_shift{}; FF avm_alu_cmp_sel_shift{}; + FF avm_alu_div_rng_chk_selector_shift{}; + FF avm_alu_div_u16_r0_shift{}; + FF avm_alu_div_u16_r1_shift{}; + FF avm_alu_div_u16_r2_shift{}; + FF avm_alu_div_u16_r3_shift{}; + FF avm_alu_div_u16_r4_shift{}; + FF avm_alu_div_u16_r5_shift{}; + FF avm_alu_div_u16_r6_shift{}; + FF avm_alu_div_u16_r7_shift{}; FF avm_alu_op_add_shift{}; FF avm_alu_op_cast_prev_shift{}; FF avm_alu_op_cast_shift{}; + FF avm_alu_op_div_shift{}; FF avm_alu_op_mul_shift{}; FF avm_alu_op_shl_shift{}; FF avm_alu_op_shr_shift{}; @@ -347,8 +399,8 @@ class AvmCircuitBuilder { using Polynomial = Flavor::Polynomial; using ProverPolynomials = Flavor::ProverPolynomials; - static constexpr size_t num_fixed_columns = 280; - static constexpr size_t num_polys = 238; + static constexpr size_t num_fixed_columns = 324; + static constexpr size_t num_polys = 272; std::vector rows; void set_trace(std::vector&& trace) { rows = std::move(trace); } @@ -376,6 +428,17 @@ class AvmCircuitBuilder { polys.avm_alu_clk[i] = rows[i].avm_alu_clk; polys.avm_alu_cmp_rng_ctr[i] = rows[i].avm_alu_cmp_rng_ctr; polys.avm_alu_cmp_sel[i] = rows[i].avm_alu_cmp_sel; + polys.avm_alu_div_rng_chk_selector[i] = rows[i].avm_alu_div_rng_chk_selector; + polys.avm_alu_div_u16_r0[i] = rows[i].avm_alu_div_u16_r0; + polys.avm_alu_div_u16_r1[i] = rows[i].avm_alu_div_u16_r1; + polys.avm_alu_div_u16_r2[i] = rows[i].avm_alu_div_u16_r2; + polys.avm_alu_div_u16_r3[i] = rows[i].avm_alu_div_u16_r3; + polys.avm_alu_div_u16_r4[i] = rows[i].avm_alu_div_u16_r4; + polys.avm_alu_div_u16_r5[i] = rows[i].avm_alu_div_u16_r5; + polys.avm_alu_div_u16_r6[i] = rows[i].avm_alu_div_u16_r6; + polys.avm_alu_div_u16_r7[i] = rows[i].avm_alu_div_u16_r7; + polys.avm_alu_divisor_hi[i] = rows[i].avm_alu_divisor_hi; + polys.avm_alu_divisor_lo[i] = rows[i].avm_alu_divisor_lo; polys.avm_alu_ff_tag[i] = rows[i].avm_alu_ff_tag; polys.avm_alu_ia[i] = rows[i].avm_alu_ia; polys.avm_alu_ib[i] = rows[i].avm_alu_ib; @@ -385,6 +448,8 @@ class AvmCircuitBuilder { polys.avm_alu_op_cast[i] = rows[i].avm_alu_op_cast; polys.avm_alu_op_cast_prev[i] = rows[i].avm_alu_op_cast_prev; polys.avm_alu_op_div[i] = rows[i].avm_alu_op_div; + polys.avm_alu_op_div_a_lt_b[i] = rows[i].avm_alu_op_div_a_lt_b; + polys.avm_alu_op_div_std[i] = rows[i].avm_alu_op_div_std; polys.avm_alu_op_eq[i] = rows[i].avm_alu_op_eq; polys.avm_alu_op_eq_diff_inv[i] = rows[i].avm_alu_op_eq_diff_inv; polys.avm_alu_op_lt[i] = rows[i].avm_alu_op_lt; @@ -400,6 +465,11 @@ class AvmCircuitBuilder { polys.avm_alu_p_sub_a_lo[i] = rows[i].avm_alu_p_sub_a_lo; polys.avm_alu_p_sub_b_hi[i] = rows[i].avm_alu_p_sub_b_hi; polys.avm_alu_p_sub_b_lo[i] = rows[i].avm_alu_p_sub_b_lo; + polys.avm_alu_partial_prod_hi[i] = rows[i].avm_alu_partial_prod_hi; + polys.avm_alu_partial_prod_lo[i] = rows[i].avm_alu_partial_prod_lo; + polys.avm_alu_quotient_hi[i] = rows[i].avm_alu_quotient_hi; + polys.avm_alu_quotient_lo[i] = rows[i].avm_alu_quotient_lo; + polys.avm_alu_remainder[i] = rows[i].avm_alu_remainder; polys.avm_alu_res_hi[i] = rows[i].avm_alu_res_hi; polys.avm_alu_res_lo[i] = rows[i].avm_alu_res_lo; polys.avm_alu_rng_chk_lookup_selector[i] = rows[i].avm_alu_rng_chk_lookup_selector; @@ -567,6 +637,14 @@ class AvmCircuitBuilder { polys.lookup_u16_12_counts[i] = rows[i].lookup_u16_12_counts; polys.lookup_u16_13_counts[i] = rows[i].lookup_u16_13_counts; polys.lookup_u16_14_counts[i] = rows[i].lookup_u16_14_counts; + polys.lookup_div_u16_0_counts[i] = rows[i].lookup_div_u16_0_counts; + polys.lookup_div_u16_1_counts[i] = rows[i].lookup_div_u16_1_counts; + polys.lookup_div_u16_2_counts[i] = rows[i].lookup_div_u16_2_counts; + polys.lookup_div_u16_3_counts[i] = rows[i].lookup_div_u16_3_counts; + polys.lookup_div_u16_4_counts[i] = rows[i].lookup_div_u16_4_counts; + polys.lookup_div_u16_5_counts[i] = rows[i].lookup_div_u16_5_counts; + polys.lookup_div_u16_6_counts[i] = rows[i].lookup_div_u16_6_counts; + polys.lookup_div_u16_7_counts[i] = rows[i].lookup_div_u16_7_counts; } polys.avm_alu_a_hi_shift = Polynomial(polys.avm_alu_a_hi.shifted()); @@ -576,9 +654,19 @@ class AvmCircuitBuilder { polys.avm_alu_b_lo_shift = Polynomial(polys.avm_alu_b_lo.shifted()); polys.avm_alu_cmp_rng_ctr_shift = Polynomial(polys.avm_alu_cmp_rng_ctr.shifted()); polys.avm_alu_cmp_sel_shift = Polynomial(polys.avm_alu_cmp_sel.shifted()); + polys.avm_alu_div_rng_chk_selector_shift = Polynomial(polys.avm_alu_div_rng_chk_selector.shifted()); + polys.avm_alu_div_u16_r0_shift = Polynomial(polys.avm_alu_div_u16_r0.shifted()); + polys.avm_alu_div_u16_r1_shift = Polynomial(polys.avm_alu_div_u16_r1.shifted()); + polys.avm_alu_div_u16_r2_shift = Polynomial(polys.avm_alu_div_u16_r2.shifted()); + polys.avm_alu_div_u16_r3_shift = Polynomial(polys.avm_alu_div_u16_r3.shifted()); + polys.avm_alu_div_u16_r4_shift = Polynomial(polys.avm_alu_div_u16_r4.shifted()); + polys.avm_alu_div_u16_r5_shift = Polynomial(polys.avm_alu_div_u16_r5.shifted()); + polys.avm_alu_div_u16_r6_shift = Polynomial(polys.avm_alu_div_u16_r6.shifted()); + polys.avm_alu_div_u16_r7_shift = Polynomial(polys.avm_alu_div_u16_r7.shifted()); polys.avm_alu_op_add_shift = Polynomial(polys.avm_alu_op_add.shifted()); polys.avm_alu_op_cast_prev_shift = Polynomial(polys.avm_alu_op_cast_prev.shifted()); polys.avm_alu_op_cast_shift = Polynomial(polys.avm_alu_op_cast.shifted()); + polys.avm_alu_op_div_shift = Polynomial(polys.avm_alu_op_div.shifted()); polys.avm_alu_op_mul_shift = Polynomial(polys.avm_alu_op_mul.shifted()); polys.avm_alu_op_shl_shift = Polynomial(polys.avm_alu_op_shl.shifted()); polys.avm_alu_op_shr_shift = Polynomial(polys.avm_alu_op_shr.shifted()); @@ -805,6 +893,30 @@ class AvmCircuitBuilder { if (!evaluate_logderivative.template operator()>("LOOKUP_U16_14")) { return false; } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_0")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_1")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_2")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_3")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_4")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_5")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_6")) { + return false; + } + if (!evaluate_logderivative.template operator()>("LOOKUP_DIV_U16_7")) { + return false; + } return true; } diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp index 08c3cffd783..1921397837f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp @@ -21,6 +21,14 @@ #include "barretenberg/relations/generated/avm/incl_mem_tag_err.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_lengths.hpp" #include "barretenberg/relations/generated/avm/lookup_byte_operations.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_0.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_1.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_2.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_3.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_4.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_5.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_6.hpp" +#include "barretenberg/relations/generated/avm/lookup_div_u16_7.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_hi.hpp" #include "barretenberg/relations/generated/avm/lookup_mem_rng_chk_lo.hpp" #include "barretenberg/relations/generated/avm/lookup_pow_2_0.hpp" @@ -73,11 +81,11 @@ class AvmFlavor { using RelationSeparator = FF; static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 2; - static constexpr size_t NUM_WITNESS_ENTITIES = 236; + static constexpr size_t NUM_WITNESS_ENTITIES = 270; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 280; + static constexpr size_t NUM_ALL_ENTITIES = 324; using GrandProductRelations = std::tuple, perm_main_bin_relation, @@ -113,7 +121,15 @@ class AvmFlavor { lookup_u16_11_relation, lookup_u16_12_relation, lookup_u16_13_relation, - lookup_u16_14_relation>; + lookup_u16_14_relation, + lookup_div_u16_0_relation, + lookup_div_u16_1_relation, + lookup_div_u16_2_relation, + lookup_div_u16_3_relation, + lookup_div_u16_4_relation, + lookup_div_u16_5_relation, + lookup_div_u16_6_relation, + lookup_div_u16_7_relation>; using Relations = std::tuple, Avm_vm::avm_binary, @@ -153,7 +169,15 @@ class AvmFlavor { lookup_u16_11_relation, lookup_u16_12_relation, lookup_u16_13_relation, - lookup_u16_14_relation>; + lookup_u16_14_relation, + lookup_div_u16_0_relation, + lookup_div_u16_1_relation, + lookup_div_u16_2_relation, + lookup_div_u16_3_relation, + lookup_div_u16_4_relation, + lookup_div_u16_5_relation, + lookup_div_u16_6_relation, + lookup_div_u16_7_relation>; static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); @@ -197,6 +221,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -206,6 +241,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -221,6 +258,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -398,6 +440,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -422,7 +472,15 @@ class AvmFlavor { lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, - lookup_u16_14_counts) + lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts) RefVector get_wires() { @@ -436,6 +494,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -445,6 +514,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -460,6 +531,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -637,6 +713,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -661,7 +745,15 @@ class AvmFlavor { lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, - lookup_u16_14_counts }; + lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts }; }; }; @@ -680,6 +772,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -689,6 +792,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -704,6 +809,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -881,6 +991,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -906,6 +1024,14 @@ class AvmFlavor { lookup_u16_12_counts, lookup_u16_13_counts, lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts, avm_alu_a_hi_shift, avm_alu_a_lo_shift, avm_alu_alu_sel_shift, @@ -913,9 +1039,19 @@ class AvmFlavor { avm_alu_b_lo_shift, avm_alu_cmp_rng_ctr_shift, avm_alu_cmp_sel_shift, + avm_alu_div_rng_chk_selector_shift, + avm_alu_div_u16_r0_shift, + avm_alu_div_u16_r1_shift, + avm_alu_div_u16_r2_shift, + avm_alu_div_u16_r3_shift, + avm_alu_div_u16_r4_shift, + avm_alu_div_u16_r5_shift, + avm_alu_div_u16_r6_shift, + avm_alu_div_u16_r7_shift, avm_alu_op_add_shift, avm_alu_op_cast_prev_shift, avm_alu_op_cast_shift, + avm_alu_op_div_shift, avm_alu_op_mul_shift, avm_alu_op_shl_shift, avm_alu_op_shr_shift, @@ -963,6 +1099,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -972,6 +1119,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -987,6 +1136,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -1164,6 +1318,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -1189,6 +1351,14 @@ class AvmFlavor { lookup_u16_12_counts, lookup_u16_13_counts, lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts, avm_alu_a_hi_shift, avm_alu_a_lo_shift, avm_alu_alu_sel_shift, @@ -1196,9 +1366,19 @@ class AvmFlavor { avm_alu_b_lo_shift, avm_alu_cmp_rng_ctr_shift, avm_alu_cmp_sel_shift, + avm_alu_div_rng_chk_selector_shift, + avm_alu_div_u16_r0_shift, + avm_alu_div_u16_r1_shift, + avm_alu_div_u16_r2_shift, + avm_alu_div_u16_r3_shift, + avm_alu_div_u16_r4_shift, + avm_alu_div_u16_r5_shift, + avm_alu_div_u16_r6_shift, + avm_alu_div_u16_r7_shift, avm_alu_op_add_shift, avm_alu_op_cast_prev_shift, avm_alu_op_cast_shift, + avm_alu_op_div_shift, avm_alu_op_mul_shift, avm_alu_op_shl_shift, avm_alu_op_shr_shift, @@ -1246,6 +1426,17 @@ class AvmFlavor { avm_alu_clk, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, + avm_alu_divisor_hi, + avm_alu_divisor_lo, avm_alu_ff_tag, avm_alu_ia, avm_alu_ib, @@ -1255,6 +1446,8 @@ class AvmFlavor { avm_alu_op_cast, avm_alu_op_cast_prev, avm_alu_op_div, + avm_alu_op_div_a_lt_b, + avm_alu_op_div_std, avm_alu_op_eq, avm_alu_op_eq_diff_inv, avm_alu_op_lt, @@ -1270,6 +1463,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo, avm_alu_p_sub_b_hi, avm_alu_p_sub_b_lo, + avm_alu_partial_prod_hi, + avm_alu_partial_prod_lo, + avm_alu_quotient_hi, + avm_alu_quotient_lo, + avm_alu_remainder, avm_alu_res_hi, avm_alu_res_lo, avm_alu_rng_chk_lookup_selector, @@ -1447,6 +1645,14 @@ class AvmFlavor { lookup_u16_12, lookup_u16_13, lookup_u16_14, + lookup_div_u16_0, + lookup_div_u16_1, + lookup_div_u16_2, + lookup_div_u16_3, + lookup_div_u16_4, + lookup_div_u16_5, + lookup_div_u16_6, + lookup_div_u16_7, lookup_byte_lengths_counts, lookup_byte_operations_counts, incl_main_tag_err_counts, @@ -1471,7 +1677,15 @@ class AvmFlavor { lookup_u16_11_counts, lookup_u16_12_counts, lookup_u16_13_counts, - lookup_u16_14_counts }; + lookup_u16_14_counts, + lookup_div_u16_0_counts, + lookup_div_u16_1_counts, + lookup_div_u16_2_counts, + lookup_div_u16_3_counts, + lookup_div_u16_4_counts, + lookup_div_u16_5_counts, + lookup_div_u16_6_counts, + lookup_div_u16_7_counts }; }; RefVector get_to_be_shifted() { @@ -1482,9 +1696,19 @@ class AvmFlavor { avm_alu_b_lo, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, avm_alu_op_add, avm_alu_op_cast_prev, avm_alu_op_cast, + avm_alu_op_div, avm_alu_op_mul, avm_alu_op_shl, avm_alu_op_shr, @@ -1527,9 +1751,19 @@ class AvmFlavor { avm_alu_b_lo_shift, avm_alu_cmp_rng_ctr_shift, avm_alu_cmp_sel_shift, + avm_alu_div_rng_chk_selector_shift, + avm_alu_div_u16_r0_shift, + avm_alu_div_u16_r1_shift, + avm_alu_div_u16_r2_shift, + avm_alu_div_u16_r3_shift, + avm_alu_div_u16_r4_shift, + avm_alu_div_u16_r5_shift, + avm_alu_div_u16_r6_shift, + avm_alu_div_u16_r7_shift, avm_alu_op_add_shift, avm_alu_op_cast_prev_shift, avm_alu_op_cast_shift, + avm_alu_op_div_shift, avm_alu_op_mul_shift, avm_alu_op_shl_shift, avm_alu_op_shr_shift, @@ -1582,9 +1816,19 @@ class AvmFlavor { avm_alu_b_lo, avm_alu_cmp_rng_ctr, avm_alu_cmp_sel, + avm_alu_div_rng_chk_selector, + avm_alu_div_u16_r0, + avm_alu_div_u16_r1, + avm_alu_div_u16_r2, + avm_alu_div_u16_r3, + avm_alu_div_u16_r4, + avm_alu_div_u16_r5, + avm_alu_div_u16_r6, + avm_alu_div_u16_r7, avm_alu_op_add, avm_alu_op_cast_prev, avm_alu_op_cast, + avm_alu_op_div, avm_alu_op_mul, avm_alu_op_shl, avm_alu_op_shr, @@ -1693,6 +1937,22 @@ class AvmFlavor { prover_polynomials, relation_parameters, this->circuit_size); bb::compute_logderivative_inverse>( prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); + bb::compute_logderivative_inverse>( + prover_polynomials, relation_parameters, this->circuit_size); } }; @@ -1765,13 +2025,6 @@ class AvmFlavor { * @details During folding and sumcheck, the prover evaluates the relations on these univariates. */ template using ProverUnivariates = AllEntities>; - /** - * @brief A container for univariates used during Protogalaxy folding and sumcheck with some of the computation - * optmistically ignored - * @details During folding and sumcheck, the prover evaluates the relations on these univariates. - */ - template - using OptimisedProverUnivariates = AllEntities>; /** * @brief A container for univariates produced during the hot loop in sumcheck. @@ -1804,6 +2057,17 @@ class AvmFlavor { Base::avm_alu_clk = "AVM_ALU_CLK"; Base::avm_alu_cmp_rng_ctr = "AVM_ALU_CMP_RNG_CTR"; Base::avm_alu_cmp_sel = "AVM_ALU_CMP_SEL"; + Base::avm_alu_div_rng_chk_selector = "AVM_ALU_DIV_RNG_CHK_SELECTOR"; + Base::avm_alu_div_u16_r0 = "AVM_ALU_DIV_U16_R0"; + Base::avm_alu_div_u16_r1 = "AVM_ALU_DIV_U16_R1"; + Base::avm_alu_div_u16_r2 = "AVM_ALU_DIV_U16_R2"; + Base::avm_alu_div_u16_r3 = "AVM_ALU_DIV_U16_R3"; + Base::avm_alu_div_u16_r4 = "AVM_ALU_DIV_U16_R4"; + Base::avm_alu_div_u16_r5 = "AVM_ALU_DIV_U16_R5"; + Base::avm_alu_div_u16_r6 = "AVM_ALU_DIV_U16_R6"; + Base::avm_alu_div_u16_r7 = "AVM_ALU_DIV_U16_R7"; + Base::avm_alu_divisor_hi = "AVM_ALU_DIVISOR_HI"; + Base::avm_alu_divisor_lo = "AVM_ALU_DIVISOR_LO"; Base::avm_alu_ff_tag = "AVM_ALU_FF_TAG"; Base::avm_alu_ia = "AVM_ALU_IA"; Base::avm_alu_ib = "AVM_ALU_IB"; @@ -1813,6 +2077,8 @@ class AvmFlavor { Base::avm_alu_op_cast = "AVM_ALU_OP_CAST"; Base::avm_alu_op_cast_prev = "AVM_ALU_OP_CAST_PREV"; Base::avm_alu_op_div = "AVM_ALU_OP_DIV"; + Base::avm_alu_op_div_a_lt_b = "AVM_ALU_OP_DIV_A_LT_B"; + Base::avm_alu_op_div_std = "AVM_ALU_OP_DIV_STD"; Base::avm_alu_op_eq = "AVM_ALU_OP_EQ"; Base::avm_alu_op_eq_diff_inv = "AVM_ALU_OP_EQ_DIFF_INV"; Base::avm_alu_op_lt = "AVM_ALU_OP_LT"; @@ -1828,6 +2094,11 @@ class AvmFlavor { Base::avm_alu_p_sub_a_lo = "AVM_ALU_P_SUB_A_LO"; Base::avm_alu_p_sub_b_hi = "AVM_ALU_P_SUB_B_HI"; Base::avm_alu_p_sub_b_lo = "AVM_ALU_P_SUB_B_LO"; + Base::avm_alu_partial_prod_hi = "AVM_ALU_PARTIAL_PROD_HI"; + Base::avm_alu_partial_prod_lo = "AVM_ALU_PARTIAL_PROD_LO"; + Base::avm_alu_quotient_hi = "AVM_ALU_QUOTIENT_HI"; + Base::avm_alu_quotient_lo = "AVM_ALU_QUOTIENT_LO"; + Base::avm_alu_remainder = "AVM_ALU_REMAINDER"; Base::avm_alu_res_hi = "AVM_ALU_RES_HI"; Base::avm_alu_res_lo = "AVM_ALU_RES_LO"; Base::avm_alu_rng_chk_lookup_selector = "AVM_ALU_RNG_CHK_LOOKUP_SELECTOR"; @@ -2005,6 +2276,14 @@ class AvmFlavor { Base::lookup_u16_12 = "LOOKUP_U16_12"; Base::lookup_u16_13 = "LOOKUP_U16_13"; Base::lookup_u16_14 = "LOOKUP_U16_14"; + Base::lookup_div_u16_0 = "LOOKUP_DIV_U16_0"; + Base::lookup_div_u16_1 = "LOOKUP_DIV_U16_1"; + Base::lookup_div_u16_2 = "LOOKUP_DIV_U16_2"; + Base::lookup_div_u16_3 = "LOOKUP_DIV_U16_3"; + Base::lookup_div_u16_4 = "LOOKUP_DIV_U16_4"; + Base::lookup_div_u16_5 = "LOOKUP_DIV_U16_5"; + Base::lookup_div_u16_6 = "LOOKUP_DIV_U16_6"; + Base::lookup_div_u16_7 = "LOOKUP_DIV_U16_7"; Base::lookup_byte_lengths_counts = "LOOKUP_BYTE_LENGTHS_COUNTS"; Base::lookup_byte_operations_counts = "LOOKUP_BYTE_OPERATIONS_COUNTS"; Base::incl_main_tag_err_counts = "INCL_MAIN_TAG_ERR_COUNTS"; @@ -2030,6 +2309,14 @@ class AvmFlavor { Base::lookup_u16_12_counts = "LOOKUP_U16_12_COUNTS"; Base::lookup_u16_13_counts = "LOOKUP_U16_13_COUNTS"; Base::lookup_u16_14_counts = "LOOKUP_U16_14_COUNTS"; + Base::lookup_div_u16_0_counts = "LOOKUP_DIV_U16_0_COUNTS"; + Base::lookup_div_u16_1_counts = "LOOKUP_DIV_U16_1_COUNTS"; + Base::lookup_div_u16_2_counts = "LOOKUP_DIV_U16_2_COUNTS"; + Base::lookup_div_u16_3_counts = "LOOKUP_DIV_U16_3_COUNTS"; + Base::lookup_div_u16_4_counts = "LOOKUP_DIV_U16_4_COUNTS"; + Base::lookup_div_u16_5_counts = "LOOKUP_DIV_U16_5_COUNTS"; + Base::lookup_div_u16_6_counts = "LOOKUP_DIV_U16_6_COUNTS"; + Base::lookup_div_u16_7_counts = "LOOKUP_DIV_U16_7_COUNTS"; }; }; @@ -2059,6 +2346,17 @@ class AvmFlavor { Commitment avm_alu_clk; Commitment avm_alu_cmp_rng_ctr; Commitment avm_alu_cmp_sel; + Commitment avm_alu_div_rng_chk_selector; + Commitment avm_alu_div_u16_r0; + Commitment avm_alu_div_u16_r1; + Commitment avm_alu_div_u16_r2; + Commitment avm_alu_div_u16_r3; + Commitment avm_alu_div_u16_r4; + Commitment avm_alu_div_u16_r5; + Commitment avm_alu_div_u16_r6; + Commitment avm_alu_div_u16_r7; + Commitment avm_alu_divisor_hi; + Commitment avm_alu_divisor_lo; Commitment avm_alu_ff_tag; Commitment avm_alu_ia; Commitment avm_alu_ib; @@ -2068,6 +2366,8 @@ class AvmFlavor { Commitment avm_alu_op_cast; Commitment avm_alu_op_cast_prev; Commitment avm_alu_op_div; + Commitment avm_alu_op_div_a_lt_b; + Commitment avm_alu_op_div_std; Commitment avm_alu_op_eq; Commitment avm_alu_op_eq_diff_inv; Commitment avm_alu_op_lt; @@ -2083,6 +2383,11 @@ class AvmFlavor { Commitment avm_alu_p_sub_a_lo; Commitment avm_alu_p_sub_b_hi; Commitment avm_alu_p_sub_b_lo; + Commitment avm_alu_partial_prod_hi; + Commitment avm_alu_partial_prod_lo; + Commitment avm_alu_quotient_hi; + Commitment avm_alu_quotient_lo; + Commitment avm_alu_remainder; Commitment avm_alu_res_hi; Commitment avm_alu_res_lo; Commitment avm_alu_rng_chk_lookup_selector; @@ -2260,6 +2565,14 @@ class AvmFlavor { Commitment lookup_u16_12; Commitment lookup_u16_13; Commitment lookup_u16_14; + Commitment lookup_div_u16_0; + Commitment lookup_div_u16_1; + Commitment lookup_div_u16_2; + Commitment lookup_div_u16_3; + Commitment lookup_div_u16_4; + Commitment lookup_div_u16_5; + Commitment lookup_div_u16_6; + Commitment lookup_div_u16_7; Commitment lookup_byte_lengths_counts; Commitment lookup_byte_operations_counts; Commitment incl_main_tag_err_counts; @@ -2285,6 +2598,14 @@ class AvmFlavor { Commitment lookup_u16_12_counts; Commitment lookup_u16_13_counts; Commitment lookup_u16_14_counts; + Commitment lookup_div_u16_0_counts; + Commitment lookup_div_u16_1_counts; + Commitment lookup_div_u16_2_counts; + Commitment lookup_div_u16_3_counts; + Commitment lookup_div_u16_4_counts; + Commitment lookup_div_u16_5_counts; + Commitment lookup_div_u16_6_counts; + Commitment lookup_div_u16_7_counts; std::vector> sumcheck_univariates; std::array sumcheck_evaluations; @@ -2314,6 +2635,17 @@ class AvmFlavor { avm_alu_clk = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_cmp_rng_ctr = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_cmp_sel = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_rng_chk_selector = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r0 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r1 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r2 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r3 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r4 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r5 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r6 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_div_u16_r7 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_divisor_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_divisor_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_ff_tag = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_ia = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_ib = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2323,6 +2655,8 @@ class AvmFlavor { avm_alu_op_cast = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_cast_prev = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_div = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_op_div_a_lt_b = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_op_div_std = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_eq = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_eq_diff_inv = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_op_lt = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2338,6 +2672,11 @@ class AvmFlavor { avm_alu_p_sub_a_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_p_sub_b_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_p_sub_b_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_partial_prod_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_partial_prod_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_quotient_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_quotient_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + avm_alu_remainder = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_res_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_res_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); avm_alu_rng_chk_lookup_selector = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2516,6 +2855,14 @@ class AvmFlavor { lookup_u16_12 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_13 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_14 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_0 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_1 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_2 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_3 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_4 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_5 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_6 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_7 = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_byte_lengths_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_byte_operations_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); incl_main_tag_err_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -2541,6 +2888,14 @@ class AvmFlavor { lookup_u16_12_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_13_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); lookup_u16_14_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_0_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_1_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_2_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_3_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_4_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_5_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_6_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + lookup_div_u16_7_counts = deserialize_from_buffer(Transcript::proof_data, num_frs_read); for (size_t i = 0; i < log_n; ++i) { sumcheck_univariates.emplace_back( @@ -2574,6 +2929,17 @@ class AvmFlavor { serialize_to_buffer(avm_alu_clk, Transcript::proof_data); serialize_to_buffer(avm_alu_cmp_rng_ctr, Transcript::proof_data); serialize_to_buffer(avm_alu_cmp_sel, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_rng_chk_selector, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r0, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r1, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r2, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r3, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r4, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r5, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r6, Transcript::proof_data); + serialize_to_buffer(avm_alu_div_u16_r7, Transcript::proof_data); + serialize_to_buffer(avm_alu_divisor_hi, Transcript::proof_data); + serialize_to_buffer(avm_alu_divisor_lo, Transcript::proof_data); serialize_to_buffer(avm_alu_ff_tag, Transcript::proof_data); serialize_to_buffer(avm_alu_ia, Transcript::proof_data); serialize_to_buffer(avm_alu_ib, Transcript::proof_data); @@ -2583,6 +2949,8 @@ class AvmFlavor { serialize_to_buffer(avm_alu_op_cast, Transcript::proof_data); serialize_to_buffer(avm_alu_op_cast_prev, Transcript::proof_data); serialize_to_buffer(avm_alu_op_div, Transcript::proof_data); + serialize_to_buffer(avm_alu_op_div_a_lt_b, Transcript::proof_data); + serialize_to_buffer(avm_alu_op_div_std, Transcript::proof_data); serialize_to_buffer(avm_alu_op_eq, Transcript::proof_data); serialize_to_buffer(avm_alu_op_eq_diff_inv, Transcript::proof_data); serialize_to_buffer(avm_alu_op_lt, Transcript::proof_data); @@ -2598,6 +2966,11 @@ class AvmFlavor { serialize_to_buffer(avm_alu_p_sub_a_lo, Transcript::proof_data); serialize_to_buffer(avm_alu_p_sub_b_hi, Transcript::proof_data); serialize_to_buffer(avm_alu_p_sub_b_lo, Transcript::proof_data); + serialize_to_buffer(avm_alu_partial_prod_hi, Transcript::proof_data); + serialize_to_buffer(avm_alu_partial_prod_lo, Transcript::proof_data); + serialize_to_buffer(avm_alu_quotient_hi, Transcript::proof_data); + serialize_to_buffer(avm_alu_quotient_lo, Transcript::proof_data); + serialize_to_buffer(avm_alu_remainder, Transcript::proof_data); serialize_to_buffer(avm_alu_res_hi, Transcript::proof_data); serialize_to_buffer(avm_alu_res_lo, Transcript::proof_data); serialize_to_buffer(avm_alu_rng_chk_lookup_selector, Transcript::proof_data); @@ -2775,6 +3148,14 @@ class AvmFlavor { serialize_to_buffer(lookup_u16_12, Transcript::proof_data); serialize_to_buffer(lookup_u16_13, Transcript::proof_data); serialize_to_buffer(lookup_u16_14, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_0, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_1, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_2, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_3, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_4, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_5, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_6, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_7, Transcript::proof_data); serialize_to_buffer(lookup_byte_lengths_counts, Transcript::proof_data); serialize_to_buffer(lookup_byte_operations_counts, Transcript::proof_data); serialize_to_buffer(incl_main_tag_err_counts, Transcript::proof_data); @@ -2800,6 +3181,14 @@ class AvmFlavor { serialize_to_buffer(lookup_u16_12_counts, Transcript::proof_data); serialize_to_buffer(lookup_u16_13_counts, Transcript::proof_data); serialize_to_buffer(lookup_u16_14_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_0_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_1_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_2_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_3_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_4_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_5_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_6_counts, Transcript::proof_data); + serialize_to_buffer(lookup_div_u16_7_counts, Transcript::proof_data); for (size_t i = 0; i < log_n; ++i) { serialize_to_buffer(sumcheck_univariates[i], Transcript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp index 064a1e7e3ac..feb378a1326 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp @@ -69,6 +69,17 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.avm_alu_clk = commitment_key->commit(key->avm_alu_clk); witness_commitments.avm_alu_cmp_rng_ctr = commitment_key->commit(key->avm_alu_cmp_rng_ctr); witness_commitments.avm_alu_cmp_sel = commitment_key->commit(key->avm_alu_cmp_sel); + witness_commitments.avm_alu_div_rng_chk_selector = commitment_key->commit(key->avm_alu_div_rng_chk_selector); + witness_commitments.avm_alu_div_u16_r0 = commitment_key->commit(key->avm_alu_div_u16_r0); + witness_commitments.avm_alu_div_u16_r1 = commitment_key->commit(key->avm_alu_div_u16_r1); + witness_commitments.avm_alu_div_u16_r2 = commitment_key->commit(key->avm_alu_div_u16_r2); + witness_commitments.avm_alu_div_u16_r3 = commitment_key->commit(key->avm_alu_div_u16_r3); + witness_commitments.avm_alu_div_u16_r4 = commitment_key->commit(key->avm_alu_div_u16_r4); + witness_commitments.avm_alu_div_u16_r5 = commitment_key->commit(key->avm_alu_div_u16_r5); + witness_commitments.avm_alu_div_u16_r6 = commitment_key->commit(key->avm_alu_div_u16_r6); + witness_commitments.avm_alu_div_u16_r7 = commitment_key->commit(key->avm_alu_div_u16_r7); + witness_commitments.avm_alu_divisor_hi = commitment_key->commit(key->avm_alu_divisor_hi); + witness_commitments.avm_alu_divisor_lo = commitment_key->commit(key->avm_alu_divisor_lo); witness_commitments.avm_alu_ff_tag = commitment_key->commit(key->avm_alu_ff_tag); witness_commitments.avm_alu_ia = commitment_key->commit(key->avm_alu_ia); witness_commitments.avm_alu_ib = commitment_key->commit(key->avm_alu_ib); @@ -78,6 +89,8 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.avm_alu_op_cast = commitment_key->commit(key->avm_alu_op_cast); witness_commitments.avm_alu_op_cast_prev = commitment_key->commit(key->avm_alu_op_cast_prev); witness_commitments.avm_alu_op_div = commitment_key->commit(key->avm_alu_op_div); + witness_commitments.avm_alu_op_div_a_lt_b = commitment_key->commit(key->avm_alu_op_div_a_lt_b); + witness_commitments.avm_alu_op_div_std = commitment_key->commit(key->avm_alu_op_div_std); witness_commitments.avm_alu_op_eq = commitment_key->commit(key->avm_alu_op_eq); witness_commitments.avm_alu_op_eq_diff_inv = commitment_key->commit(key->avm_alu_op_eq_diff_inv); witness_commitments.avm_alu_op_lt = commitment_key->commit(key->avm_alu_op_lt); @@ -93,6 +106,11 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.avm_alu_p_sub_a_lo = commitment_key->commit(key->avm_alu_p_sub_a_lo); witness_commitments.avm_alu_p_sub_b_hi = commitment_key->commit(key->avm_alu_p_sub_b_hi); witness_commitments.avm_alu_p_sub_b_lo = commitment_key->commit(key->avm_alu_p_sub_b_lo); + witness_commitments.avm_alu_partial_prod_hi = commitment_key->commit(key->avm_alu_partial_prod_hi); + witness_commitments.avm_alu_partial_prod_lo = commitment_key->commit(key->avm_alu_partial_prod_lo); + witness_commitments.avm_alu_quotient_hi = commitment_key->commit(key->avm_alu_quotient_hi); + witness_commitments.avm_alu_quotient_lo = commitment_key->commit(key->avm_alu_quotient_lo); + witness_commitments.avm_alu_remainder = commitment_key->commit(key->avm_alu_remainder); witness_commitments.avm_alu_res_hi = commitment_key->commit(key->avm_alu_res_hi); witness_commitments.avm_alu_res_lo = commitment_key->commit(key->avm_alu_res_lo); witness_commitments.avm_alu_rng_chk_lookup_selector = commitment_key->commit(key->avm_alu_rng_chk_lookup_selector); @@ -261,6 +279,14 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.lookup_u16_12_counts = commitment_key->commit(key->lookup_u16_12_counts); witness_commitments.lookup_u16_13_counts = commitment_key->commit(key->lookup_u16_13_counts); witness_commitments.lookup_u16_14_counts = commitment_key->commit(key->lookup_u16_14_counts); + witness_commitments.lookup_div_u16_0_counts = commitment_key->commit(key->lookup_div_u16_0_counts); + witness_commitments.lookup_div_u16_1_counts = commitment_key->commit(key->lookup_div_u16_1_counts); + witness_commitments.lookup_div_u16_2_counts = commitment_key->commit(key->lookup_div_u16_2_counts); + witness_commitments.lookup_div_u16_3_counts = commitment_key->commit(key->lookup_div_u16_3_counts); + witness_commitments.lookup_div_u16_4_counts = commitment_key->commit(key->lookup_div_u16_4_counts); + witness_commitments.lookup_div_u16_5_counts = commitment_key->commit(key->lookup_div_u16_5_counts); + witness_commitments.lookup_div_u16_6_counts = commitment_key->commit(key->lookup_div_u16_6_counts); + witness_commitments.lookup_div_u16_7_counts = commitment_key->commit(key->lookup_div_u16_7_counts); // Send all commitments to the verifier transcript->send_to_verifier(commitment_labels.avm_alu_a_hi, witness_commitments.avm_alu_a_hi); @@ -273,6 +299,18 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.avm_alu_clk, witness_commitments.avm_alu_clk); transcript->send_to_verifier(commitment_labels.avm_alu_cmp_rng_ctr, witness_commitments.avm_alu_cmp_rng_ctr); transcript->send_to_verifier(commitment_labels.avm_alu_cmp_sel, witness_commitments.avm_alu_cmp_sel); + transcript->send_to_verifier(commitment_labels.avm_alu_div_rng_chk_selector, + witness_commitments.avm_alu_div_rng_chk_selector); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r0, witness_commitments.avm_alu_div_u16_r0); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r1, witness_commitments.avm_alu_div_u16_r1); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r2, witness_commitments.avm_alu_div_u16_r2); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r3, witness_commitments.avm_alu_div_u16_r3); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r4, witness_commitments.avm_alu_div_u16_r4); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r5, witness_commitments.avm_alu_div_u16_r5); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r6, witness_commitments.avm_alu_div_u16_r6); + transcript->send_to_verifier(commitment_labels.avm_alu_div_u16_r7, witness_commitments.avm_alu_div_u16_r7); + transcript->send_to_verifier(commitment_labels.avm_alu_divisor_hi, witness_commitments.avm_alu_divisor_hi); + transcript->send_to_verifier(commitment_labels.avm_alu_divisor_lo, witness_commitments.avm_alu_divisor_lo); transcript->send_to_verifier(commitment_labels.avm_alu_ff_tag, witness_commitments.avm_alu_ff_tag); transcript->send_to_verifier(commitment_labels.avm_alu_ia, witness_commitments.avm_alu_ia); transcript->send_to_verifier(commitment_labels.avm_alu_ib, witness_commitments.avm_alu_ib); @@ -282,6 +320,8 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.avm_alu_op_cast, witness_commitments.avm_alu_op_cast); transcript->send_to_verifier(commitment_labels.avm_alu_op_cast_prev, witness_commitments.avm_alu_op_cast_prev); transcript->send_to_verifier(commitment_labels.avm_alu_op_div, witness_commitments.avm_alu_op_div); + transcript->send_to_verifier(commitment_labels.avm_alu_op_div_a_lt_b, witness_commitments.avm_alu_op_div_a_lt_b); + transcript->send_to_verifier(commitment_labels.avm_alu_op_div_std, witness_commitments.avm_alu_op_div_std); transcript->send_to_verifier(commitment_labels.avm_alu_op_eq, witness_commitments.avm_alu_op_eq); transcript->send_to_verifier(commitment_labels.avm_alu_op_eq_diff_inv, witness_commitments.avm_alu_op_eq_diff_inv); transcript->send_to_verifier(commitment_labels.avm_alu_op_lt, witness_commitments.avm_alu_op_lt); @@ -297,6 +337,13 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.avm_alu_p_sub_a_lo, witness_commitments.avm_alu_p_sub_a_lo); transcript->send_to_verifier(commitment_labels.avm_alu_p_sub_b_hi, witness_commitments.avm_alu_p_sub_b_hi); transcript->send_to_verifier(commitment_labels.avm_alu_p_sub_b_lo, witness_commitments.avm_alu_p_sub_b_lo); + transcript->send_to_verifier(commitment_labels.avm_alu_partial_prod_hi, + witness_commitments.avm_alu_partial_prod_hi); + transcript->send_to_verifier(commitment_labels.avm_alu_partial_prod_lo, + witness_commitments.avm_alu_partial_prod_lo); + transcript->send_to_verifier(commitment_labels.avm_alu_quotient_hi, witness_commitments.avm_alu_quotient_hi); + transcript->send_to_verifier(commitment_labels.avm_alu_quotient_lo, witness_commitments.avm_alu_quotient_lo); + transcript->send_to_verifier(commitment_labels.avm_alu_remainder, witness_commitments.avm_alu_remainder); transcript->send_to_verifier(commitment_labels.avm_alu_res_hi, witness_commitments.avm_alu_res_hi); transcript->send_to_verifier(commitment_labels.avm_alu_res_lo, witness_commitments.avm_alu_res_lo); transcript->send_to_verifier(commitment_labels.avm_alu_rng_chk_lookup_selector, @@ -484,6 +531,22 @@ void AvmProver::execute_wire_commitments_round() transcript->send_to_verifier(commitment_labels.lookup_u16_12_counts, witness_commitments.lookup_u16_12_counts); transcript->send_to_verifier(commitment_labels.lookup_u16_13_counts, witness_commitments.lookup_u16_13_counts); transcript->send_to_verifier(commitment_labels.lookup_u16_14_counts, witness_commitments.lookup_u16_14_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_0_counts, + witness_commitments.lookup_div_u16_0_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_1_counts, + witness_commitments.lookup_div_u16_1_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_2_counts, + witness_commitments.lookup_div_u16_2_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_3_counts, + witness_commitments.lookup_div_u16_3_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_4_counts, + witness_commitments.lookup_div_u16_4_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_5_counts, + witness_commitments.lookup_div_u16_5_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_6_counts, + witness_commitments.lookup_div_u16_6_counts); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_7_counts, + witness_commitments.lookup_div_u16_7_counts); } void AvmProver::execute_log_derivative_inverse_round() @@ -531,6 +594,14 @@ void AvmProver::execute_log_derivative_inverse_round() witness_commitments.lookup_u16_12 = commitment_key->commit(key->lookup_u16_12); witness_commitments.lookup_u16_13 = commitment_key->commit(key->lookup_u16_13); witness_commitments.lookup_u16_14 = commitment_key->commit(key->lookup_u16_14); + witness_commitments.lookup_div_u16_0 = commitment_key->commit(key->lookup_div_u16_0); + witness_commitments.lookup_div_u16_1 = commitment_key->commit(key->lookup_div_u16_1); + witness_commitments.lookup_div_u16_2 = commitment_key->commit(key->lookup_div_u16_2); + witness_commitments.lookup_div_u16_3 = commitment_key->commit(key->lookup_div_u16_3); + witness_commitments.lookup_div_u16_4 = commitment_key->commit(key->lookup_div_u16_4); + witness_commitments.lookup_div_u16_5 = commitment_key->commit(key->lookup_div_u16_5); + witness_commitments.lookup_div_u16_6 = commitment_key->commit(key->lookup_div_u16_6); + witness_commitments.lookup_div_u16_7 = commitment_key->commit(key->lookup_div_u16_7); // Send all commitments to the verifier transcript->send_to_verifier(commitment_labels.perm_main_alu, witness_commitments.perm_main_alu); @@ -568,6 +639,14 @@ void AvmProver::execute_log_derivative_inverse_round() transcript->send_to_verifier(commitment_labels.lookup_u16_12, witness_commitments.lookup_u16_12); transcript->send_to_verifier(commitment_labels.lookup_u16_13, witness_commitments.lookup_u16_13); transcript->send_to_verifier(commitment_labels.lookup_u16_14, witness_commitments.lookup_u16_14); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_0, witness_commitments.lookup_div_u16_0); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_1, witness_commitments.lookup_div_u16_1); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_2, witness_commitments.lookup_div_u16_2); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_3, witness_commitments.lookup_div_u16_3); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_4, witness_commitments.lookup_div_u16_4); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_5, witness_commitments.lookup_div_u16_5); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_6, witness_commitments.lookup_div_u16_6); + transcript->send_to_verifier(commitment_labels.lookup_div_u16_7, witness_commitments.lookup_div_u16_7); } /** diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 89f357cc400..ecce0af1b4d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -64,6 +64,28 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) transcript->template receive_from_prover(commitment_labels.avm_alu_cmp_rng_ctr); commitments.avm_alu_cmp_sel = transcript->template receive_from_prover(commitment_labels.avm_alu_cmp_sel); + commitments.avm_alu_div_rng_chk_selector = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_rng_chk_selector); + commitments.avm_alu_div_u16_r0 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r0); + commitments.avm_alu_div_u16_r1 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r1); + commitments.avm_alu_div_u16_r2 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r2); + commitments.avm_alu_div_u16_r3 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r3); + commitments.avm_alu_div_u16_r4 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r4); + commitments.avm_alu_div_u16_r5 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r5); + commitments.avm_alu_div_u16_r6 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r6); + commitments.avm_alu_div_u16_r7 = + transcript->template receive_from_prover(commitment_labels.avm_alu_div_u16_r7); + commitments.avm_alu_divisor_hi = + transcript->template receive_from_prover(commitment_labels.avm_alu_divisor_hi); + commitments.avm_alu_divisor_lo = + transcript->template receive_from_prover(commitment_labels.avm_alu_divisor_lo); commitments.avm_alu_ff_tag = transcript->template receive_from_prover(commitment_labels.avm_alu_ff_tag); commitments.avm_alu_ia = transcript->template receive_from_prover(commitment_labels.avm_alu_ia); commitments.avm_alu_ib = transcript->template receive_from_prover(commitment_labels.avm_alu_ib); @@ -75,6 +97,10 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) commitments.avm_alu_op_cast_prev = transcript->template receive_from_prover(commitment_labels.avm_alu_op_cast_prev); commitments.avm_alu_op_div = transcript->template receive_from_prover(commitment_labels.avm_alu_op_div); + commitments.avm_alu_op_div_a_lt_b = + transcript->template receive_from_prover(commitment_labels.avm_alu_op_div_a_lt_b); + commitments.avm_alu_op_div_std = + transcript->template receive_from_prover(commitment_labels.avm_alu_op_div_std); commitments.avm_alu_op_eq = transcript->template receive_from_prover(commitment_labels.avm_alu_op_eq); commitments.avm_alu_op_eq_diff_inv = transcript->template receive_from_prover(commitment_labels.avm_alu_op_eq_diff_inv); @@ -97,6 +123,16 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) transcript->template receive_from_prover(commitment_labels.avm_alu_p_sub_b_hi); commitments.avm_alu_p_sub_b_lo = transcript->template receive_from_prover(commitment_labels.avm_alu_p_sub_b_lo); + commitments.avm_alu_partial_prod_hi = + transcript->template receive_from_prover(commitment_labels.avm_alu_partial_prod_hi); + commitments.avm_alu_partial_prod_lo = + transcript->template receive_from_prover(commitment_labels.avm_alu_partial_prod_lo); + commitments.avm_alu_quotient_hi = + transcript->template receive_from_prover(commitment_labels.avm_alu_quotient_hi); + commitments.avm_alu_quotient_lo = + transcript->template receive_from_prover(commitment_labels.avm_alu_quotient_lo); + commitments.avm_alu_remainder = + transcript->template receive_from_prover(commitment_labels.avm_alu_remainder); commitments.avm_alu_res_hi = transcript->template receive_from_prover(commitment_labels.avm_alu_res_hi); commitments.avm_alu_res_lo = transcript->template receive_from_prover(commitment_labels.avm_alu_res_lo); commitments.avm_alu_rng_chk_lookup_selector = @@ -389,6 +425,22 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) transcript->template receive_from_prover(commitment_labels.lookup_u16_13_counts); commitments.lookup_u16_14_counts = transcript->template receive_from_prover(commitment_labels.lookup_u16_14_counts); + commitments.lookup_div_u16_0_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_0_counts); + commitments.lookup_div_u16_1_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_1_counts); + commitments.lookup_div_u16_2_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_2_counts); + commitments.lookup_div_u16_3_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_3_counts); + commitments.lookup_div_u16_4_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_4_counts); + commitments.lookup_div_u16_5_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_5_counts); + commitments.lookup_div_u16_6_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_6_counts); + commitments.lookup_div_u16_7_counts = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_7_counts); auto [beta, gamm] = transcript->template get_challenges("beta", "gamma"); relation_parameters.beta = beta; @@ -444,6 +496,22 @@ bool AvmVerifier::verify_proof(const HonkProof& proof) commitments.lookup_u16_12 = transcript->template receive_from_prover(commitment_labels.lookup_u16_12); commitments.lookup_u16_13 = transcript->template receive_from_prover(commitment_labels.lookup_u16_13); commitments.lookup_u16_14 = transcript->template receive_from_prover(commitment_labels.lookup_u16_14); + commitments.lookup_div_u16_0 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_0); + commitments.lookup_div_u16_1 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_1); + commitments.lookup_div_u16_2 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_2); + commitments.lookup_div_u16_3 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_3); + commitments.lookup_div_u16_4 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_4); + commitments.lookup_div_u16_5 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_5); + commitments.lookup_div_u16_6 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_6); + commitments.lookup_div_u16_7 = + transcript->template receive_from_prover(commitment_labels.lookup_div_u16_7); // Execute Sumcheck Verifier const size_t log_circuit_size = numeric::get_msb(circuit_size); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp index 30601dd613e..c0754b31d4c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_arithmetic.test.cpp @@ -1,6 +1,7 @@ #include "avm_common.test.hpp" #include "barretenberg/numeric/uint128/uint128.hpp" #include "barretenberg/vm/avm_trace/avm_common.hpp" +#include "barretenberg/vm/tests/helpers.test.hpp" #include namespace tests_avm { @@ -167,6 +168,35 @@ size_t common_validate_eq(std::vector const& trace, return static_cast(alu_row - trace.begin()); } +size_t common_validate_div(std::vector const& trace, + FF const& a, + FF const& b, + FF const& c, + FF const& addr_a, + FF const& addr_b, + FF const& addr_c, + avm_trace::AvmMemoryTag const tag) +{ + // Find the first row enabling the division selector + auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.avm_main_sel_op_div == FF(1); }); + + // Find the corresponding Alu trace row + auto clk = row->avm_main_clk; + auto alu_row = std::ranges::find_if(trace.begin(), trace.end(), [clk](Row r) { return r.avm_alu_clk == clk; }); + + // Check that both rows were found + EXPECT_TRUE(row != trace.end()); + EXPECT_TRUE(alu_row != trace.end()); + + common_validate_arithmetic_op(*row, *alu_row, a, b, c, addr_a, addr_b, addr_c, tag); + EXPECT_EQ(row->avm_main_w_in_tag, FF(static_cast(tag))); + + // Check that division selector is set. + EXPECT_EQ(alu_row->avm_alu_op_div, FF(1)); + + return static_cast(alu_row - trace.begin()); +} + // Generate a trace with an EQ opcode operation. std::vector gen_trace_eq(uint128_t const& a, uint128_t const& b, @@ -282,6 +312,7 @@ class AvmArithmeticTestsU16 : public AvmArithmeticTests {}; class AvmArithmeticTestsU32 : public AvmArithmeticTests {}; class AvmArithmeticTestsU64 : public AvmArithmeticTests {}; class AvmArithmeticTestsU128 : public AvmArithmeticTests {}; +class AvmArithmeticTestsDiv : public AvmArithmeticTests, public testing::WithParamInterface {}; class AvmArithmeticNegativeTestsFF : public AvmArithmeticTests {}; class AvmArithmeticNegativeTestsU8 : public AvmArithmeticTests {}; @@ -290,6 +321,18 @@ class AvmArithmeticNegativeTestsU32 : public AvmArithmeticTests {}; class AvmArithmeticNegativeTestsU64 : public AvmArithmeticTests {}; class AvmArithmeticNegativeTestsU128 : public AvmArithmeticTests {}; +std::vector uint_mem_tags{ + { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } +}; +std::vector> positive_op_div_test_values = { { + { FF(10), FF(5), FF(2) }, + { FF(5323), FF(5323), FF(1) }, + { FF(13793), FF(10590617LLU), FF(0) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), FF(1526) }, + { uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + uint256_t::from_uint128(uint128_t{ 0xb900000000000001 }), + uint256_t::from_uint128(uint128_t{ 0x162c4ad3b97863a1 }) }, +} }; /****************************************************************************** * * POSITIVE TESTS @@ -334,7 +377,7 @@ TEST_F(AvmArithmeticTestsFF, addition) EXPECT_EQ(alu_row.avm_alu_cf, FF(0)); EXPECT_EQ(alu_row.avm_alu_u8_r0, FF(0)); - validate_trace(std::move(trace)); + validate_trace(std::move(trace), true); } // Test on basic subtraction over finite field type. @@ -549,6 +592,51 @@ TEST_F(AvmArithmeticTestsFF, nonEquality) validate_trace(std::move(trace)); } +TEST_P(AvmArithmeticTestsDiv, division) +{ + const auto [operands, mem_tag] = GetParam(); + const auto [a, b, output] = operands; + auto trace_builder = avm_trace::AvmTraceBuilder(); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); + trace_builder.op_div(0, 0, 1, 2, mem_tag); + trace_builder.return_op(0, 0, 0); + auto trace = trace_builder.finalize(); + + common_validate_div(trace, a, b, output, 0, 1, 2, mem_tag); + // auto alu_row = trace.at(alu_row_index); + + validate_trace(std::move(trace)); +} +INSTANTIATE_TEST_SUITE_P(AvmArithmeticTestsDiv, + AvmArithmeticTestsDiv, + testing::ValuesIn(gen_three_op_params(positive_op_div_test_values, uint_mem_tags))); + +// Test on division by zero over U128. +// We check that the operator error flag is raised. +TEST_F(AvmArithmeticTests, DivisionByZeroError) +{ + auto trace_builder = avm_trace::AvmTraceBuilder(); + trace_builder.op_set(0, 100, 0, AvmMemoryTag::U128); + trace_builder.op_set(0, 0, 1, AvmMemoryTag::U128); + trace_builder.op_div(0, 0, 1, 2, AvmMemoryTag::U128); + trace_builder.halt(); + auto trace = trace_builder.finalize(); + + // Find the first row enabling the div selector + auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.avm_main_sel_op_div == FF(1); }); + + // Check that the correct result is stored at the expected memory location. + EXPECT_TRUE(row != trace.end()); + EXPECT_EQ(row->avm_main_ic, FF(0)); + EXPECT_EQ(row->avm_main_mem_idx_c, FF(2)); + EXPECT_EQ(row->avm_main_mem_op_c, FF(1)); + EXPECT_EQ(row->avm_main_rwc, FF(1)); + EXPECT_EQ(row->avm_main_op_err, FF(1)); + + validate_trace(std::move(trace)); +} + /****************************************************************************** * Positive Tests - U8 ******************************************************************************/ diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp index 256501f41ea..bbe1ef3e5b0 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp @@ -372,7 +372,6 @@ class AvmBitwiseTests : public ::testing::Test { * ******************************************************************************/ -using ThreeOpParamRow = std::tuple, AvmMemoryTag>; using TwoOpParamRow = std::tuple, AvmMemoryTag>; std::vector mem_tags{ { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } @@ -397,59 +396,51 @@ std::vector gen_two_op_params(std::vector> positive_op_and_test_values = { - { { 1, 1, 1 }, - { 5323, 321, 65 }, - { 13793, 10590617LLU, 4481 }, - { 0x7bff744e3cdf79LLU, 0x14ccccccccb6LLU, 0x14444c0ccc30LLU }, - { (uint128_t{ 0xb900000000000001 } << 64), - (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, - (uint128_t{ 0x1000000000000000 } << 64) } } +std::vector positive_op_and_test_values = { + { { FF(1), FF(1), FF(1) }, + { FF(5323), FF(321), FF(65) }, + { FF(13793), FF(10590617LLU), FF(4481) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), FF(0x14444c0ccc30LLU) }, + { uint256_t::from_uint128(uint128_t{ 0xb900000000000001 } << 64), + uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + uint256_t::from_uint128(uint128_t{ 0x1000000000000000 } << 64) } } }; -std::vector> positive_op_or_test_values = { - { { 1, 1, 1 }, - { 5323, 321, 0x15cb }, - { 13793, 10590617LLU, 0xa1bdf9 }, - { 0x7bff744e3cdf79LLU, 0x14ccccccccb6LLU, 0x7bfffccefcdfffLLU }, - { (uint128_t{ 0xb900000000000000 } << 64), - (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, - (uint128_t{ 0xb906021301080000 } << 64) + uint128_t{ 0x0001080876844827 } } } +std::vector> positive_op_or_test_values = { + { { FF(1), FF(1), FF(1) }, + { FF(5323), FF(321), FF(0x15cb) }, + { FF(13793), FF(10590617LLU), FF(0xa1bdf9) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), FF(0x7bfffccefcdfffLLU) }, + { uint256_t::from_uint128(uint128_t{ 0xb900000000000000 } << 64), + uint256_t::from_uint128(uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, + uint256_t::from_uint128(uint128_t{ 0xb906021301080000 } << 64) + uint128_t{ 0x0001080876844827 } } } }; -std::vector> positive_op_xor_test_values = { - { { 1, 1, 0 }, - { 5323, 321, 0x158a }, - { 13793, 10590617LLU, 0xa1ac78 }, - { 0x7bff744e3cdf79LLU, 0x14ccccccccb6LLU, 0x7bebb882f013cf }, - { (uint128_t{ 0xb900000000000001 } << 64), - (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, - (uint128_t{ 0xa906021301080001 } << 64) + uint128_t{ 0x0001080876844827 } } } +std::vector> positive_op_xor_test_values = { + { { FF(1), FF(1), FF(0) }, + { FF(5323), FF(321), FF(0x158a) }, + { FF(13793), FF(10590617LLU), FF(0xa1ac78) }, + { FF(0x7bff744e3cdf79LLU), FF(0x14ccccccccb6LLU), uint256_t::from_uint128(0x7bebb882f013cf) }, + { uint256_t::from_uint128(uint128_t{ 0xb900000000000001 } << 64), + uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + uint256_t::from_uint128((uint128_t{ 0xa906021301080001 } << 64) + uint128_t{ 0x0001080876844827 }) } } }; -std::vector> positive_op_shr_test_values = { - { { 20, 3, 2 }, - { 5323, 255, 0 }, - { 36148, 13, 4 }, - { 0x7bff744e3cdf79LLU, 64, 0 }, - { (uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }, 123, 2 } } +std::vector> positive_op_shr_test_values = { + { { FF(20), FF(3), FF(2) }, + { FF(5323), FF(255), FF(0) }, + { FF(36148), FF(13), FF(4) }, + { FF(0x7bff744e3cdf79LLU), FF(64), FF(0) }, + { uint256_t::from_uint128((uint128_t{ 0x1006021301080000 } << 64) + uint128_t{ 0x000000000000001080876844827 }), + FF(123), + FF(2) } } }; -std::vector> positive_op_shl_test_values = { - { { 20, 8, 0 }, - { 5323, 10, 11264 }, - { 13793, 255, 0 }, - { 239, 50, 269090077735387136 }, - { 9, 127, (uint128_t{ 0x4800000000000000LLU } << 68) } } +std::vector> positive_op_shl_test_values = { + { { FF(20), FF(8), FF(0) }, + { FF(5323), FF(10), FF(11264) }, + { FF(13793), FF(255), FF(0) }, + { FF(239), FF(50), uint256_t::from_uint128(269090077735387136) }, + { FF(9), FF(127), uint256_t::from_uint128(uint128_t{ 0x4800000000000000LLU } << 68) } } }; -std::vector gen_three_op_params(std::vector> operands, - std::vector mem_tags) -{ - std::vector params; - for (size_t i = 0; i < 5; i++) { - params.emplace_back(operands[i], mem_tags[i]); - } - return params; -} - class AvmBitwiseTestsNot : public AvmBitwiseTests, public testing::WithParamInterface {}; class AvmBitwiseTestsAnd : public AvmBitwiseTests, public testing::WithParamInterface {}; class AvmBitwiseTestsOr : public AvmBitwiseTests, public testing::WithParamInterface {}; @@ -490,16 +481,13 @@ TEST_P(AvmBitwiseTestsAnd, AllAndTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_and(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - FF ff_a = FF(uint256_t::from_uint128(a)); - FF ff_b = FF(uint256_t::from_uint128(b)); - FF ff_output = FF(uint256_t::from_uint128(output)); - common_validate_bit_op(trace, 0, ff_a, ff_b, ff_output, FF(0), FF(1), FF(2), mem_tag); + common_validate_bit_op(trace, 0, a, b, output, FF(0), FF(1), FF(2), mem_tag); validate_trace(std::move(trace), true); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseTests, @@ -510,17 +498,13 @@ TEST_P(AvmBitwiseTestsOr, AllOrTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_or(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - FF ff_a = FF(uint256_t::from_uint128(a)); - FF ff_b = FF(uint256_t::from_uint128(b)); - FF ff_output = FF(uint256_t::from_uint128(output)); - - common_validate_bit_op(trace, 1, ff_a, ff_b, ff_output, FF(0), FF(1), FF(2), mem_tag); + common_validate_bit_op(trace, 1, a, b, output, FF(0), FF(1), FF(2), mem_tag); validate_trace(std::move(trace)); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseTests, @@ -531,17 +515,13 @@ TEST_P(AvmBitwiseTestsXor, AllXorTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_xor(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - FF ff_a = FF(uint256_t::from_uint128(a)); - FF ff_b = FF(uint256_t::from_uint128(b)); - FF ff_output = FF(uint256_t::from_uint128(output)); - - common_validate_bit_op(trace, 2, ff_a, ff_b, ff_output, FF(0), FF(1), FF(2), mem_tag); + common_validate_bit_op(trace, 2, a, b, output, FF(0), FF(1), FF(2), mem_tag); validate_trace(std::move(trace)); } @@ -553,20 +533,12 @@ TEST_P(AvmBitwiseTestsShr, AllShrTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_shr(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - common_validate_shift_op(trace, - uint256_t::from_uint128(a), - uint256_t::from_uint128(b), - uint256_t::from_uint128(output), - FF(0), - FF(1), - FF(2), - mem_tag, - true); + common_validate_shift_op(trace, a, b, output, FF(0), FF(1), FF(2), mem_tag, true); validate_trace(std::move(trace)); } @@ -578,21 +550,13 @@ TEST_P(AvmBitwiseTestsShl, AllShlTest) { const auto [operands, mem_tag] = GetParam(); const auto [a, b, output] = operands; - trace_builder.op_set(0, a, 0, mem_tag); - trace_builder.op_set(0, b, 1, mem_tag); + trace_builder.op_set(0, uint128_t(a), 0, mem_tag); + trace_builder.op_set(0, uint128_t(b), 1, mem_tag); trace_builder.op_shl(0, 0, 1, 2, mem_tag); trace_builder.return_op(0, 2, 1); auto trace = trace_builder.finalize(); - common_validate_shift_op(trace, - uint256_t::from_uint128(a), - uint256_t::from_uint128(b), - uint256_t::from_uint128(output), - FF(0), - FF(1), - FF(2), - mem_tag, - false); + common_validate_shift_op(trace, a, b, output, FF(0), FF(1), FF(2), mem_tag, false); validate_trace(std::move(trace)); } @@ -660,9 +624,8 @@ TEST_P(AvmBitwiseNegativeTestsAnd, AllNegativeTests) trace_builder.op_and(0, 0, 1, 2, mem_tag); trace_builder.halt(); auto trace = trace_builder.finalize(); - FF ff_output = FF(uint256_t::from_uint128(output)); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_and == FF(1); }; - trace = gen_mutated_trace_bit(trace, std::move(select_row), ff_output, failure_mode); + trace = gen_mutated_trace_bit(trace, std::move(select_row), output, failure_mode); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), failure_string); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -681,9 +644,8 @@ TEST_P(AvmBitwiseNegativeTestsOr, AllNegativeTests) trace_builder.op_or(0, 0, 1, 2, mem_tag); trace_builder.halt(); auto trace = trace_builder.finalize(); - FF ff_output = FF(uint256_t::from_uint128(output)); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_or == FF(1); }; - trace = gen_mutated_trace_bit(trace, std::move(select_row), ff_output, failure_mode); + trace = gen_mutated_trace_bit(trace, std::move(select_row), output, failure_mode); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), failure_string); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -701,9 +663,8 @@ TEST_P(AvmBitwiseNegativeTestsXor, AllNegativeTests) trace_builder.op_xor(0, 0, 1, 2, mem_tag); trace_builder.halt(); auto trace = trace_builder.finalize(); - FF ff_output = FF(uint256_t::from_uint128(output)); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_xor == FF(1); }; - trace = gen_mutated_trace_bit(trace, std::move(select_row), ff_output, failure_mode); + trace = gen_mutated_trace_bit(trace, std::move(select_row), output, failure_mode); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), failure_string) } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -723,8 +684,7 @@ TEST_P(AvmBitwiseNegativeTestsShr, AllNegativeTests) auto trace = trace_builder.finalize(); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_shr == FF(1); }; - auto [mutated_trace, str] = gen_mutated_trace_shift( - std::move(trace), std::move(select_row), FF(uint256_t::from_uint128(output)), failure, true); + auto [mutated_trace, str] = gen_mutated_trace_shift(std::move(trace), std::move(select_row), output, failure, true); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(mutated_trace)), str); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, @@ -744,8 +704,8 @@ TEST_P(AvmBitwiseNegativeTestsShl, AllNegativeTests) auto trace = trace_builder.finalize(); std::function&& select_row = [](Row r) { return r.avm_main_sel_op_shl == FF(1); }; - auto [mutated_trace, str] = gen_mutated_trace_shift( - std::move(trace), std::move(select_row), FF(uint256_t::from_uint128(output)), failure, false); + auto [mutated_trace, str] = + gen_mutated_trace_shift(std::move(trace), std::move(select_row), output, failure, false); EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(mutated_trace)), str); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseNegativeTests, diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp index 16ea72cbcaf..26eaf202fe7 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp @@ -57,8 +57,6 @@ void common_validate_cmp(Row const& row, EXPECT_EQ(alu_row.avm_alu_ic, c); } } // namespace -using ThreeOpParam = std::array; -using ThreeOpParamRow = std::tuple; std::vector positive_op_lt_test_values = { { { FF(1), FF(1), FF(0) }, { FF(5323), FF(321), FF(0) }, { FF(13793), FF(10590617LLU), FF(1) }, @@ -77,15 +75,6 @@ std::vector positive_op_lte_test_values = { FF(1) } } }; -std::vector gen_three_op_params(std::vector operands, - std::vector mem_tag_arr) -{ - std::vector params; - for (size_t i = 0; i < 5; i++) { - params.emplace_back(operands[i], mem_tag_arr[i]); - } - return params; -} std::vector mem_tag_arr{ { AvmMemoryTag::U8, AvmMemoryTag::U16, AvmMemoryTag::U32, AvmMemoryTag::U64, AvmMemoryTag::U128 } }; diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp index 290b15585a0..b0dc065027d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp @@ -1,7 +1,18 @@ +#include "barretenberg/vm/tests/helpers.test.hpp" #include "avm_common.test.hpp" #include "barretenberg/vm/generated/avm_flavor.hpp" namespace tests_avm { + +std::vector gen_three_op_params(std::vector operands, + std::vector mem_tags) +{ + std::vector params; + for (size_t i = 0; i < 5; i++) { + params.emplace_back(operands[i], mem_tags[i]); + } + return params; +} /** * @brief Helper routine checking the circuit constraints without proving * @@ -25,6 +36,7 @@ void validate_trace(std::vector&& trace, bool with_proof) EXPECT_TRUE(circuit_builder.check_circuit()); if (with_proof) { + info("With proof"); auto composer = AvmComposer(); auto prover = composer.create_prover(circuit_builder); auto proof = prover.construct_proof(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp index fd1f862404d..5df14f93cd7 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp @@ -15,6 +15,8 @@ namespace tests_avm { using Flavor = bb::AvmFlavor; using FF = Flavor::FF; using Row = bb::AvmFullRow; +using ThreeOpParam = std::array; +using ThreeOpParamRow = std::tuple; // To toggle all relevant unit tests with proving, set the env variable "AVM_TESTS_ENABLE_PROVING". static const bool ENABLE_PROVING = std::getenv("AVM_TESTS_ENABLE_PROVING") != nullptr; @@ -30,5 +32,7 @@ void mutate_ic_in_trace(std::vector& trace, bool alu = false); void clear_range_check_counters(std::vector& trace, uint256_t previous_value); void update_slice_registers(Row& row, uint256_t a); +std::vector gen_three_op_params(std::vector> operands, + std::vector mem_tags); } // namespace tests_avm From cc59981a8f69375c4ca92999a12a955e0d385ada Mon Sep 17 00:00:00 2001 From: Facundo Date: Thu, 9 May 2024 13:05:23 +0100 Subject: [PATCH 19/43] fix(avm-simulator): always set revertReason when reverting (#6297) Part of the current setup seems to assume that a simulation reverts if and only if there's a revertReason. This is why some e2e tests were failing to see the revert (and throw an exception) when the revert message was empty. Example ```ts /** * Makes a processed tx out of source tx. * @param tx - Source tx. * @param kernelOutput - Output of the kernel circuit simulation for this tx. * @param proof - Proof of the kernel circuit for this tx. */ export function makeProcessedTx( tx: Tx, kernelOutput: KernelCircuitPublicInputs, proof: Proof, publicKernelRequests: PublicKernelRequest[], revertReason?: SimulationError, gasUsed: ProcessedTx['gasUsed'] = {}, ): ProcessedTx { return { hash: tx.getTxHash(), data: kernelOutput, proof, encryptedLogs: revertReason ? EncryptedTxL2Logs.empty() : tx.encryptedLogs, unencryptedLogs: revertReason ? UnencryptedTxL2Logs.empty() : tx.unencryptedLogs, isEmpty: false, revertReason, publicKernelRequests, gasUsed, }; } ``` cc @just-mitch because I see his name in some parts of the code. --- .../end-to-end/src/e2e_avm_simulator.test.ts | 7 +++--- .../simulator/src/avm/avm_machine_state.ts | 22 ++++++++++++------- .../simulator/src/avm/avm_simulator.test.ts | 2 +- .../src/public/abstract_phase_manager.ts | 10 ++++++++- yarn-project/simulator/src/public/executor.ts | 4 ---- 5 files changed, 28 insertions(+), 17 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts index 3acebe956a0..56cca9370f4 100644 --- a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts +++ b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts @@ -121,7 +121,7 @@ describe('e2e_avm_simulator', () => { }); }); - describe('ACVM interoperability', () => { + describe.skip('ACVM interoperability', () => { let avmContract: AvmAcvmInteropTestContract; beforeEach(async () => { @@ -136,7 +136,7 @@ describe('e2e_avm_simulator', () => { expect(await avmContract.methods.call_avm_from_acvm().simulate()).toEqual(123456n); }); - it.skip('Can call ACVM function from AVM', async () => { + it('Can call ACVM function from AVM', async () => { expect(await avmContract.methods.call_acvm_from_avm().simulate()).toEqual(123456n); }); @@ -146,7 +146,7 @@ describe('e2e_avm_simulator', () => { await avmContract.methods.assert_unsiloed_nullifier_acvm(nullifier).send().wait(); }); - it.skip('AVM nested call to ACVM sees settled nullifiers', async () => { + it('AVM nested call to ACVM sees settled nullifiers', async () => { const nullifier = new Fr(123456); await avmContract.methods.new_nullifier(nullifier).send().wait(); await avmContract.methods @@ -155,6 +155,7 @@ describe('e2e_avm_simulator', () => { .wait(); }); + // TODO: Enable (or delete) authwit tests once the AVM is fully functional. describe.skip('Authwit', () => { it('Works if authwit provided', async () => { const recipient = AztecAddress.random(); diff --git a/yarn-project/simulator/src/avm/avm_machine_state.ts b/yarn-project/simulator/src/avm/avm_machine_state.ts index ca4b5e72056..0af30ddefb3 100644 --- a/yarn-project/simulator/src/avm/avm_machine_state.ts +++ b/yarn-project/simulator/src/avm/avm_machine_state.ts @@ -136,14 +136,20 @@ export class AvmMachineState { throw new Error('Execution results are not ready! Execution is ongoing.'); } let revertReason = undefined; - if (this.reverted && this.output.length > 0) { - try { - // We remove the first element which is the 'error selector'. - const revertOutput = this.output.slice(1); - // Try to interpret the output as a text string. - revertReason = new Error('Assertion failed: ' + String.fromCharCode(...revertOutput.map(fr => fr.toNumber()))); - } catch (e) { - revertReason = new Error(''); + if (this.reverted) { + if (this.output.length === 0) { + revertReason = new Error('Assertion failed.'); + } else { + try { + // We remove the first element which is the 'error selector'. + const revertOutput = this.output.slice(1); + // Try to interpret the output as a text string. + revertReason = new Error( + 'Assertion failed: ' + String.fromCharCode(...revertOutput.map(fr => fr.toNumber())), + ); + } catch (e) { + revertReason = new Error('Assertion failed: '); + } } } return new AvmContractCallResults(this.reverted, this.output, revertReason); diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 346e2861a8f..a18b4c05e43 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -115,7 +115,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const bytecode = getAvmTestContractBytecode('u128_from_integer_overflow'); const results = await new AvmSimulator(initContext()).executeBytecode(bytecode); expect(results.reverted).toBe(true); - expect(results.revertReason?.message).toEqual(undefined); + expect(results.revertReason?.message).toEqual('Assertion failed.'); // Note: compiler intrinsic messages (like below) are not known to the AVM //expect(results.revertReason?.message).toEqual("Assertion failed: call to assert_max_bit_size 'self.__assert_max_bit_size(bit_size)'"); }); diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index 06e89e93ef9..28d5b40ba9c 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -283,10 +283,18 @@ export abstract class AbstractPhaseManager { ) : current; + // Sanity check for a current upstream assumption. + // Consumers of the result seem to expect "reverted <=> revertReason !== undefined". + const functionSelector = result.execution.functionData.selector.toString(); + if (result.reverted && !result.revertReason) { + throw new Error( + `Simulation of ${result.execution.contractAddress.toString()}:${functionSelector} reverted with no reason.`, + ); + } + // Accumulate gas used in this execution gasUsed = gasUsed.add(Gas.from(result.startGasLeft).sub(Gas.from(result.endGasLeft))); - const functionSelector = result.execution.functionData.selector.toString(); if (result.reverted && !PhaseIsRevertible[this.phase]) { this.log.debug( `Simulation error on ${result.execution.contractAddress.toString()}:${functionSelector} with reason: ${ diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 89951608404..2576bdd29da 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -168,10 +168,6 @@ async function executePublicFunctionAcvm( })(); if (reverted) { - if (!revertReason) { - throw new Error('Reverted but no revert reason'); - } - return { execution, returnValues: [], From f0a1c89a064c1e170db4751be46874f089dd1385 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 9 May 2024 14:29:45 +0100 Subject: [PATCH 20/43] chore: remove `bb info` command (#6276) This command is no longer used and so we can remove it. --- barretenberg/cpp/src/barretenberg/bb/main.cpp | 36 ------------------- 1 file changed, 36 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 820cc522ad7..674e4e67e92 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -508,37 +508,6 @@ void vk_as_fields(const std::string& vk_path, const std::string& output_path) } } -/** - * @brief Returns ACVM related backend information - * - * Communication: - * - stdout: The json string is written to stdout - * - Filesystem: The json string is written to the path specified - * - * @param output_path Path to write the information to - */ -void acvm_info(const std::string& output_path) -{ - - const char* jsonData = R"({ - "language": { - "name" : "PLONK-CSAT", - "width" : 4 - } - })"; - - size_t length = strlen(jsonData); - std::vector data(jsonData, jsonData + length); - - if (output_path == "-") { - writeRawBytesToStdout(data); - vinfo("info written to stdout"); - } else { - write_file(output_path, data); - vinfo("info written to: ", output_path); - } -} - /** * @brief Writes an avm proof and corresponding (incomplete) verification key to files. * @@ -797,11 +766,6 @@ int main(int argc, char* argv[]) writeStringToStdout(BB_VERSION); return 0; } - if (command == "info") { - std::string output_path = get_option(args, "-o", "info.json"); - acvm_info(output_path); - return 0; - } if (command == "prove_and_verify") { return proveAndVerify(bytecode_path, witness_path) ? 0 : 1; } From 95b499bead8b05afcb4cac8c7a12832ce7c7bfcd Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 9 May 2024 14:32:01 +0100 Subject: [PATCH 21/43] chore: skip formatting informattable comments (#6288) I've flipped the config to just skip formatting any comments which would be lost if formatted (really not sure why that was turned on as the default behaviour). --- noir-projects/Earthfile | 5 ++- .../private-kernel-init-simulated/src/main.nr | 2 +- .../src/main.nr | 2 +- .../src/private_kernel_tail.nr | 36 ++++++++++--------- .../src/private_kernel_tail_to_public.nr | 5 +-- .../src/main.nr | 2 +- .../src/main.nr | 2 +- .../public-kernel-app-logic/src/main.nr | 2 +- .../public-kernel-setup-simulated/src/main.nr | 2 +- .../crates/public-kernel-setup/src/main.nr | 2 +- .../public-kernel-tail-simulated/src/main.nr | 2 +- .../crates/public-kernel-tail/src/main.nr | 2 +- .../src/main.nr | 2 +- .../base_or_merge_rollup_public_inputs.nr | 6 ++-- .../src/abis/constant_rollup_data.nr | 2 +- ...ic_kernel_circuit_public_inputs_builder.nr | 3 +- .../rollup_validation_requests.nr | 5 ++- .../validation_requests.nr | 5 +-- .../noir-repo/tooling/nargo_fmt/src/config.rs | 2 +- 19 files changed, 47 insertions(+), 42 deletions(-) diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 907f009151d..108f36f3715 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -33,9 +33,8 @@ test: format: FROM +build - # TODO: https://github.com/noir-lang/noir/issues/4980 - # WORKDIR /usr/src/noir-projects/noir-protocol-circuits - # RUN nargo fmt --check + WORKDIR /usr/src/noir-projects/noir-protocol-circuits + RUN nargo fmt --check WORKDIR /usr/src/noir-projects/noir-contracts RUN nargo fmt --check diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr index 2d0470155e5..a59e08872e9 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-init-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::private_kernel_lib::PrivateKernelInitCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -unconstrained fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +unconstrained fn main(input: PrivateKernelInitCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_initial() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr index f3494a35038..0f58903b973 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-inner-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::private_kernel_lib::PrivateKernelInnerCircuitPrivateInputs; use dep::types::PrivateKernelCircuitPublicInputs; -unconstrained fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { +unconstrained fn main(input: PrivateKernelInnerCircuitPrivateInputs) -> pub PrivateKernelCircuitPublicInputs { input.native_private_kernel_circuit_inner() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index 51d6efd7a07..598dfe018f7 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -1,14 +1,15 @@ use crate::kernel_circuit_public_inputs_composer::KernelCircuitPublicInputsComposer; use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, PrivateValidationRequestProcessor}; use dep::types::{ - abis::{ - private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, kernel_circuit_public_inputs::KernelCircuitPublicInputs, - note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect - }, - constants::{ - MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, - MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX - }, + abis::{ + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, + kernel_circuit_public_inputs::KernelCircuitPublicInputs, note_hash::ScopedNoteHash, + nullifier::ScopedNullifier, side_effect::SideEffect +}, + constants::{ + MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, + MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX +}, grumpkin_private_key::GrumpkinPrivateKey, utils::arrays::array_length }; @@ -94,16 +95,19 @@ mod tests { use dep::types::constants::{ MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, - MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, - DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE + MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX, DA_BYTES_PER_FIELD, DA_GAS_PER_BYTE }; use dep::types::{ abis::{ kernel_circuit_public_inputs::KernelCircuitPublicInputs, max_block_number::MaxBlockNumber, - note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, side_effect::SideEffect, gas::Gas + note_hash::{NoteHash, ScopedNoteHash}, nullifier::{Nullifier, ScopedNullifier}, + side_effect::SideEffect, gas::Gas }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, sha256_to_field, silo_note_hash, silo_nullifier}, + hash::{ + compute_note_hash_nonce, compute_unique_siloed_note_hash, sha256_to_field, silo_note_hash, + silo_nullifier + }, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::{Empty, is_empty, is_empty_array} }; @@ -135,10 +139,7 @@ mod tests { // A helper function that uses the first nullifer in the previous kernel to compute the unique siloed // note_hashes for the given note_hashes. - pub fn compute_output_note_hashes( - self, - note_hashes: [ScopedNoteHash; N] - ) -> [Field; N] { + pub fn compute_output_note_hashes(self, note_hashes: [ScopedNoteHash; N]) -> [Field; N] { let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0); let mut unique_siloed_note_hashes = [0; N]; for i in 0..N { @@ -308,6 +309,7 @@ mod tests { public_inputs.end.unencrypted_log_preimages_length, unencrypted_log_preimages_length + prev_unencrypted_log_preimages_length ); + // noir-fmt:ignore let hash_bytes: [u8; MAX_ENCRYPTED_LOGS_PER_TX * 32] = prev_encrypted_logs_hash .to_be_bytes(32) .append(&[0; MAX_ENCRYPTED_LOGS_PER_TX * 32 - 32]) @@ -315,6 +317,7 @@ mod tests { let expected_encrypted_logs_hash = sha256_to_field(hash_bytes); assert_eq(public_inputs.end.encrypted_logs_hash, expected_encrypted_logs_hash); + // noir-fmt:ignore let hash_bytes: [u8; MAX_UNENCRYPTED_LOGS_PER_TX * 32] = prev_unencrypted_logs_hash .to_be_bytes(32) .append(unencrypted_logs_hash.to_be_bytes(32)) @@ -584,7 +587,6 @@ mod tests { builder.previous_kernel.tx_context.gas_settings.teardown_gas_limits = Gas::new(300, 300); let public_inputs = builder.execute(); - let expected_gas_consumed = Gas::new(300, 300) // teardown gas + Gas::tx_overhead() // tx overhead + Gas::new(DA_GAS_PER_BYTE * DA_BYTES_PER_FIELD * 1, 0); // tx nullifier diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index 7b7e17eba88..9dd2319a041 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -2,8 +2,9 @@ use crate::kernel_circuit_public_inputs_composer::KernelCircuitPublicInputsCompo use dep::reset_kernel_lib::{NoteHashReadRequestHints, NullifierReadRequestHints, PrivateValidationRequestProcessor}; use dep::types::{ abis::{ - private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, - note_hash::ScopedNoteHash, nullifier::ScopedNullifier, side_effect::SideEffect + private_kernel_data::{PrivateKernelData, verify_previous_kernel_proof}, + kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, note_hash::ScopedNoteHash, + nullifier::ScopedNullifier, side_effect::SideEffect }, constants::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr index 6c20fcfdeb0..3683ecbd8cc 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-tail-to-public-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::private_kernel_lib::PrivateKernelTailToPublicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PrivateKernelTailToPublicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr index eaf2169e3a1..8bcc9f1643f 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelAppLogicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_app_logic() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr index fc4185f03b3..1126e42d576 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-app-logic/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelAppLogicCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +fn main(input: PublicKernelAppLogicCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_app_logic() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr index 35f53631a04..be09565d0ac 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelSetupCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_setup() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr index da84636684b..f9b31176fa0 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-setup/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelSetupCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +fn main(input: PublicKernelSetupCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_setup() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr index bd928276f4c..0a9f18ffd54 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelTailCircuitPrivateInputs; use dep::types::KernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { input.public_kernel_tail() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr index 8b6ba443c87..3227791a09a 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-tail/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelTailCircuitPrivateInputs; use dep::types::KernelCircuitPublicInputs; -fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { +fn main(input: PublicKernelTailCircuitPrivateInputs) -> pub KernelCircuitPublicInputs { input.public_kernel_tail() } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr index 55e9d441348..78cb6040500 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-teardown-simulated/src/main.nr @@ -1,6 +1,6 @@ use dep::public_kernel_lib::PublicKernelTeardownCircuitPrivateInputs; use dep::types::PublicKernelCircuitPublicInputs; -unconstrained fn main(input: PublicKernelTeardownCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { +unconstrained fn main(input: PublicKernelTeardownCircuitPrivateInputs) -> pub PublicKernelCircuitPublicInputs { input.public_kernel_teardown() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr index 5e1b9b33dc6..90406f9e18e 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr @@ -1,8 +1,6 @@ use dep::types::{ abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot, - partial_state_reference::PartialStateReference, - mocked::AggregationObject, - traits::Empty + partial_state_reference::PartialStateReference, mocked::AggregationObject, traits::Empty }; use crate::abis::constant_rollup_data::ConstantRollupData; @@ -44,4 +42,4 @@ impl Empty for BaseOrMergeRollupPublicInputs { out_hash : 0, } } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr index b688397a7d9..824860f74b1 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/abis/constant_rollup_data.nr @@ -40,4 +40,4 @@ impl Empty for ConstantRollupData { global_variables: GlobalVariables::empty(), } } -} \ No newline at end of file +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr index 41f92bd5f22..70169e44548 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr @@ -4,7 +4,8 @@ use crate::{ combined_constant_data::CombinedConstantData, kernel_circuit_public_inputs::{public_kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs}, validation_requests::ValidationRequestsBuilder, call_request::CallRequest -}, traits::Empty +}, + traits::Empty }; struct PublicKernelCircuitPublicInputsBuilder { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr index 1840668e1b3..d1761a1a859 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/rollup_validation_requests.nr @@ -1,4 +1,7 @@ -use crate::{abis::max_block_number::MaxBlockNumber, traits::{Empty, Serialize}, constants::ROLLUP_VALIDATION_REQUESTS_LENGTH}; +use crate::{ + abis::max_block_number::MaxBlockNumber, traits::{Empty, Serialize}, + constants::ROLLUP_VALIDATION_REQUESTS_LENGTH +}; // These are validation requests that cannot be fulfilled in the current context (private or public), and must be // instead forwarded to the rollup for it to take care of them. diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr index 8d56adb7ea6..d8e34e36311 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/validation_requests/validation_requests.nr @@ -9,7 +9,8 @@ use crate::{ MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_READ_REQUESTS_PER_TX, MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, VALIDATION_REQUESTS_LENGTH -}, traits::Serialize +}, + traits::Serialize }; // TODO - Use specific structs for private and public: PrivateValidationRequests vs PublicValidationRequests @@ -52,4 +53,4 @@ impl Serialize for ValidationRequests { fields.storage } -} \ No newline at end of file +} diff --git a/noir/noir-repo/tooling/nargo_fmt/src/config.rs b/noir/noir-repo/tooling/nargo_fmt/src/config.rs index 2bb5d97c0af..5e38dc7d8b0 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/config.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/config.rs @@ -45,7 +45,7 @@ config! { max_width: usize, 100, "Maximum width of each line"; tab_spaces: usize, 4, "Number of spaces per tab"; remove_nested_parens: bool, true, "Remove nested parens"; - error_on_lost_comment: bool, true, "Error if unable to get comments"; + error_on_lost_comment: bool, false, "Error if unable to get comments"; short_array_element_width_threshold: usize, 10, "Width threshold for an array element to be considered short"; array_width: usize, 100, "Maximum width of an array literal before falling back to vertical formatting"; fn_call_width: usize, 60, "Maximum width of the args of a function call before falling back to vertical formatting"; From ac27376b9a0cdf0624a02d36c64ec25886b44b4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Thu, 9 May 2024 15:33:27 +0200 Subject: [PATCH 22/43] feat: move to_radix to a blackbox (#6294) This PR moves to_radix to a Brillig-specific blackbox. The AVM won't easily support field integer division, and the only usecase for field integer division in regular noir code is to radix / to bits. We extract to radix to a bb func so it can be directly integrated as a gadget in the avm. --- .../dsl/acir_format/serde/acir.hpp | 74 +++++++++++++++++-- .../noir-repo/acvm-repo/acir/codegen/acir.cpp | 56 +++++++++++++- .../acvm-repo/brillig/src/black_box.rs | 5 ++ .../acvm-repo/brillig_vm/src/black_box.rs | 21 ++++++ .../src/brillig/brillig_gen/brillig_block.rs | 38 +++++++--- .../brillig/brillig_ir/codegen_intrinsic.rs | 62 +++++++--------- .../src/brillig/brillig_ir/debug_show.rs | 9 +++ noir/noir-repo/noir_stdlib/src/field/bn254.nr | 57 ++++++++------ 8 files changed, 249 insertions(+), 73 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index 9fb0e2b3a35..683e4c62407 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -686,7 +686,6 @@ struct BlackBoxOp { Program::HeapVector inputs; Program::HeapArray iv; Program::HeapArray key; - Program::MemoryAddress length; Program::HeapVector outputs; friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); @@ -896,6 +895,16 @@ struct BlackBoxOp { static Sha256Compression bincodeDeserialize(std::vector); }; + struct ToRadix { + Program::MemoryAddress input; + uint32_t radix; + Program::HeapArray output; + + friend bool operator==(const ToRadix&, const ToRadix&); + std::vector bincodeSerialize() const; + static ToRadix bincodeDeserialize(std::vector); + }; + std::variant + Sha256Compression, + ToRadix> value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); @@ -3939,9 +3949,6 @@ inline bool operator==(const BlackBoxOp::AES128Encrypt& lhs, const BlackBoxOp::A if (!(lhs.key == rhs.key)) { return false; } - if (!(lhs.length == rhs.length)) { - return false; - } if (!(lhs.outputs == rhs.outputs)) { return false; } @@ -5141,6 +5148,63 @@ Program::BlackBoxOp::Sha256Compression serde::Deserializable BlackBoxOp::ToRadix::bincodeSerialize() const +{ + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); +} + +inline BlackBoxOp::ToRadix BlackBoxOp::ToRadix::bincodeDeserialize(std::vector input) +{ + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw_or_abort("Some input bytes were not read"); + } + return value; +} + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::ToRadix& obj, + Serializer& serializer) +{ + serde::Serializable::serialize(obj.input, serializer); + serde::Serializable::serialize(obj.radix, serializer); + serde::Serializable::serialize(obj.output, serializer); +} + +template <> +template +Program::BlackBoxOp::ToRadix serde::Deserializable::deserialize( + Deserializer& deserializer) +{ + Program::BlackBoxOp::ToRadix obj; + obj.input = serde::Deserializable::deserialize(deserializer); + obj.radix = serde::Deserializable::deserialize(deserializer); + obj.output = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Program { + inline bool operator==(const BlockId& lhs, const BlockId& rhs) { if (!(lhs.value == rhs.value)) { diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 5afcd68e987..222a7da6399 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -870,7 +870,17 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + struct ToRadix { + Program::MemoryAddress input; + uint32_t radix; + Program::HeapArray output; + + friend bool operator==(const ToRadix&, const ToRadix&); + std::vector bincodeSerialize() const; + static ToRadix bincodeDeserialize(std::vector); + }; + + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -4293,6 +4303,50 @@ Program::BlackBoxOp::Sha256Compression serde::Deserializable BlackBoxOp::ToRadix::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::ToRadix BlackBoxOp::ToRadix::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::ToRadix &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input, serializer); + serde::Serializable::serialize(obj.radix, serializer); + serde::Serializable::serialize(obj.output, serializer); +} + +template <> +template +Program::BlackBoxOp::ToRadix serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxOp::ToRadix obj; + obj.input = serde::Deserializable::deserialize(deserializer); + obj.radix = serde::Deserializable::deserialize(deserializer); + obj.output = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlockId &lhs, const BlockId &rhs) { diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index 15abc19ed90..9a66b428dc3 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -126,4 +126,9 @@ pub enum BlackBoxOp { hash_values: HeapVector, output: HeapArray, }, + ToRadix { + input: MemoryAddress, + radix: u32, + output: HeapArray, + }, } diff --git a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs index c999b5bf330..d6ecd25f454 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs @@ -5,6 +5,7 @@ use acvm_blackbox_solver::{ aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; +use num_bigint::BigUint; use crate::memory::MemoryValue; use crate::Memory; @@ -295,6 +296,25 @@ pub(crate) fn evaluate_black_box( memory.write_slice(memory.read_ref(output.pointer), &state); Ok(()) } + BlackBoxOp::ToRadix { input, radix, output } => { + let input: FieldElement = + memory.read(*input).try_into().expect("ToRadix input not a field"); + + let mut input = BigUint::from_bytes_be(&input.to_be_bytes()); + let radix = BigUint::from(*radix); + + let mut limbs: Vec = Vec::with_capacity(output.size); + + for _ in 0..output.size { + let limb = &input % &radix; + limbs.push(FieldElement::from_be_bytes_reduce(&limb.to_bytes_be()).into()); + input /= &radix; + } + + memory.write_slice(memory.read_ref(output.pointer), &limbs); + + Ok(()) + } } } @@ -321,6 +341,7 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::BigIntToLeBytes { .. } => BlackBoxFunc::BigIntToLeBytes, BlackBoxOp::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxOp::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, + BlackBoxOp::ToRadix { .. } => unreachable!("ToRadix is not an ACIR BlackBoxFunc"), } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index f660c8e0b7a..6a4f9f5cc0e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -488,8 +488,22 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); - let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); + + let radix: u32 = dfg + .get_numeric_constant(arguments[1]) + .expect("Radix should be known") + .try_to_u64() + .expect("Radix should fit in u64") + .try_into() + .expect("Radix should be u32"); + + let limb_count: usize = dfg + .get_numeric_constant(arguments[2]) + .expect("Limb count should be known") + .try_to_u64() + .expect("Limb count should fit in u64") + .try_into() + .expect("Limb count should fit in usize"); let results = dfg.instruction_results(instruction_id); @@ -511,7 +525,8 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context.cast_instruction(target_len, limb_count); + self.brillig_context + .usize_const_instruction(target_len.address, limb_count.into()); self.brillig_context.codegen_to_radix( source, @@ -524,7 +539,13 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count: usize = dfg + .get_numeric_constant(arguments[1]) + .expect("Limb count should be known") + .try_to_u64() + .expect("Limb count should fit in u64") + .try_into() + .expect("Limb count should fit in usize"); let results = dfg.instruction_results(instruction_id); @@ -549,21 +570,18 @@ impl<'block> BrilligBlock<'block> { BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; - let radix = self.brillig_context.make_constant_instruction(2_usize.into(), 32); - // Update the user-facing slice length - self.brillig_context.cast_instruction(target_len, limb_count); + self.brillig_context + .usize_const_instruction(target_len.address, limb_count.into()); self.brillig_context.codegen_to_radix( source, target_vector, - radix, + 2, limb_count, matches!(endianness, Endian::Big), 1, ); - - self.brillig_context.deallocate_single_addr(radix); } _ => { unreachable!("unsupported function call type {:?}", dfg[*func]) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index ab756217bcd..58166554e1d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -1,6 +1,7 @@ -use acvm::FieldElement; - -use crate::brillig::brillig_ir::BrilligBinaryOp; +use acvm::{ + acir::brillig::{BlackBoxOp, HeapArray}, + FieldElement, +}; use super::{ brillig_variable::{BrilligVector, SingleAddrVariable}, @@ -36,57 +37,46 @@ impl BrilligContext { &mut self, source_field: SingleAddrVariable, target_vector: BrilligVector, - radix: SingleAddrVariable, - limb_count: SingleAddrVariable, + radix: u32, + limb_count: usize, big_endian: bool, limb_bit_size: u32, ) { assert!(source_field.bit_size == FieldElement::max_num_bits()); - assert!(radix.bit_size == 32); - assert!(limb_count.bit_size == 32); - let radix_as_field = - SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); - self.cast_instruction(radix_as_field, radix); - self.cast_instruction(SingleAddrVariable::new_usize(target_vector.size), limb_count); + self.usize_const_instruction(target_vector.size, limb_count.into()); self.usize_const_instruction(target_vector.rc, 1_usize.into()); self.codegen_allocate_array(target_vector.pointer, target_vector.size); - let shifted_field = - SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); - self.mov_instruction(shifted_field.address, source_field.address); + self.black_box_op_instruction(BlackBoxOp::ToRadix { + input: source_field.address, + radix, + output: HeapArray { pointer: target_vector.pointer, size: limb_count }, + }); let limb_field = SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); let limb_casted = SingleAddrVariable::new(self.allocate_register(), limb_bit_size); - self.codegen_loop(target_vector.size, |ctx, iterator_register| { - // Compute the modulus - ctx.binary_instruction( - shifted_field, - radix_as_field, - limb_field, - BrilligBinaryOp::Modulo, - ); - // Cast it - ctx.cast_instruction(limb_casted, limb_field); - // Write it - ctx.codegen_array_set(target_vector.pointer, iterator_register, limb_casted.address); - // Integer div the field - ctx.binary_instruction( - shifted_field, - radix_as_field, - shifted_field, - BrilligBinaryOp::UnsignedDiv, - ); - }); + if limb_bit_size != FieldElement::max_num_bits() { + self.codegen_loop(target_vector.size, |ctx, iterator_register| { + // Read the limb + ctx.codegen_array_get(target_vector.pointer, iterator_register, limb_field.address); + // Cast it + ctx.cast_instruction(limb_casted, limb_field); + // Write it + ctx.codegen_array_set( + target_vector.pointer, + iterator_register, + limb_casted.address, + ); + }); + } // Deallocate our temporary registers - self.deallocate_single_addr(shifted_field); self.deallocate_single_addr(limb_field); self.deallocate_single_addr(limb_casted); - self.deallocate_single_addr(radix_as_field); if big_endian { self.codegen_reverse_vector_in_place(target_vector); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 667ccf6ddbe..f02f6059e7c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -451,6 +451,15 @@ impl DebugShow { output ); } + BlackBoxOp::ToRadix { input, radix, output } => { + debug_println!( + self.enable_debug_trace, + " TO_RADIX {} {} -> {}", + input, + radix, + output + ); + } } } diff --git a/noir/noir-repo/noir_stdlib/src/field/bn254.nr b/noir/noir-repo/noir_stdlib/src/field/bn254.nr index d70310be391..2e82d9e7c23 100644 --- a/noir/noir-repo/noir_stdlib/src/field/bn254.nr +++ b/noir/noir-repo/noir_stdlib/src/field/bn254.nr @@ -25,7 +25,7 @@ unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { fn assert_gt_limbs(a: (Field, Field), b: (Field, Field)) { let (alo, ahi) = a; let (blo, bhi) = b; - let borrow = lte_unsafe(alo, blo, 16); + let borrow = lte_unsafe_16(alo, blo); let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; let rhi = ahi - bhi - (borrow as Field); @@ -51,9 +51,9 @@ pub fn decompose(x: Field) -> (Field, Field) { (xlo, xhi) } -unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { - let x_bytes = x.__to_le_radix(256, num_bytes); - let y_bytes = y.__to_le_radix(256, num_bytes); +fn lt_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { + let x_bytes = x.to_le_radix(256, num_bytes); + let y_bytes = y.to_le_radix(256, num_bytes); let mut x_is_lt = false; let mut done = false; for i in 0..num_bytes { @@ -70,8 +70,20 @@ unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { x_is_lt } -unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { - lt_unsafe(x, y, num_bytes) | (x == y) +fn lte_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { + if x == y { + true + } else { + lt_unsafe_internal(x, y, num_bytes) + } +} + +unconstrained fn lt_unsafe_32(x: Field, y: Field) -> bool { + lt_unsafe_internal(x, y, 32) +} + +unconstrained fn lte_unsafe_16(x: Field, y: Field) -> bool { + lte_unsafe_internal(x, y, 16) } pub fn assert_gt(a: Field, b: Field) { @@ -90,7 +102,7 @@ pub fn assert_lt(a: Field, b: Field) { pub fn gt(a: Field, b: Field) -> bool { if a == b { false - } else if lt_unsafe(a, b, 32) { + } else if lt_unsafe_32(a, b) { assert_gt(b, a); false } else { @@ -105,7 +117,10 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{decompose_unsafe, decompose, lt_unsafe, assert_gt, gt, lt, TWO_POW_128, lte_unsafe, PLO, PHI}; + use crate::field::bn254::{ + decompose_unsafe, decompose, lt_unsafe_internal, assert_gt, gt, lt, TWO_POW_128, + lte_unsafe_internal, PLO, PHI + }; #[test] fn check_decompose_unsafe() { @@ -123,23 +138,23 @@ mod tests { #[test] fn check_lt_unsafe() { - assert(lt_unsafe(0, 1, 16)); - assert(lt_unsafe(0, 0x100, 16)); - assert(lt_unsafe(0x100, TWO_POW_128 - 1, 16)); - assert(!lt_unsafe(0, TWO_POW_128, 16)); + assert(lt_unsafe_internal(0, 1, 16)); + assert(lt_unsafe_internal(0, 0x100, 16)); + assert(lt_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); + assert(!lt_unsafe_internal(0, TWO_POW_128, 16)); } #[test] fn check_lte_unsafe() { - assert(lte_unsafe(0, 1, 16)); - assert(lte_unsafe(0, 0x100, 16)); - assert(lte_unsafe(0x100, TWO_POW_128 - 1, 16)); - assert(!lte_unsafe(0, TWO_POW_128, 16)); - - assert(lte_unsafe(0, 0, 16)); - assert(lte_unsafe(0x100, 0x100, 16)); - assert(lte_unsafe(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); - assert(lte_unsafe(TWO_POW_128, TWO_POW_128, 16)); + assert(lte_unsafe_internal(0, 1, 16)); + assert(lte_unsafe_internal(0, 0x100, 16)); + assert(lte_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); + assert(!lte_unsafe_internal(0, TWO_POW_128, 16)); + + assert(lte_unsafe_internal(0, 0, 16)); + assert(lte_unsafe_internal(0x100, 0x100, 16)); + assert(lte_unsafe_internal(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); + assert(lte_unsafe_internal(TWO_POW_128, TWO_POW_128, 16)); } #[test] From 26525764396ccfb2176e47a1016d194244b374f9 Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 9 May 2024 09:55:22 -0400 Subject: [PATCH 23/43] fix(ci): bench list (#6282) --- .github/workflows/ci.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d696c0d41bf..7a6fcbe4485 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,11 +37,10 @@ jobs: runs-on: ${{ inputs.username || github.actor }}-x86 outputs: e2e_list: ${{ steps.e2e_list.outputs.list }} + bench_list: ${{ steps.bench_list.outputs.list }} steps: - - { - uses: actions/checkout@v4, - with: { ref: "${{ github.event.pull_request.head.sha }}" }, - } + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" From bd2ccf0bd58f66bed0846617ac2a737f4a619262 Mon Sep 17 00:00:00 2001 From: Facundo Date: Thu, 9 May 2024 15:11:40 +0100 Subject: [PATCH 24/43] fix(avm-context): enqueueing of public from private (#6299) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes ``` e2e_token_contract burn › private › burn less than balance Simulation error: Packed values for hash 0x237f08330472d6db6fdd49901b949f2d7fbdbdc3062ef5339753f8c6bd784d15 not found in cache ``` Also fix calculation of unencrypted log length since after fixing the packing I was getting "No unencrypted logs are allowed for static calls". --- .../aztec-nr/aztec/src/context/interface.nr | 69 +++++++++++++++---- .../contracts/avm_test_contract/src/main.nr | 6 ++ .../end-to-end/src/e2e_avm_simulator.test.ts | 6 ++ .../simulator/src/avm/journal/journal.ts | 3 +- 4 files changed, 69 insertions(+), 15 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/context/interface.nr b/noir-projects/aztec-nr/aztec/src/context/interface.nr index 7f72656252b..0ceb66a05a8 100644 --- a/noir-projects/aztec-nr/aztec/src/context/interface.nr +++ b/noir-projects/aztec-nr/aztec/src/context/interface.nr @@ -1,6 +1,6 @@ use dep::protocol_types::{abis::function_selector::FunctionSelector, address::{AztecAddress, EthAddress}, traits::Deserialize}; -use crate::hash::hash_args; +use crate::oracle::arguments; use crate::context::private_context::PrivateContext; use crate::context::public_context::PublicContext; use crate::context::avm_context::AvmContext; @@ -118,7 +118,6 @@ struct PublicCallInterface { } impl PublicCallInterface { - pub fn call(self, context: &mut PublicContext) -> T where T: Deserialize { let returns = context.call_public_function_with_packed_args( self.target_contract, @@ -232,18 +231,39 @@ impl AvmCallInterface { } pub fn enqueue(self, context: &mut PrivateContext) { - let args_hash = hash_args(self.args); - context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, false) + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ false + ) } pub fn static_enqueue(self, context: &mut PrivateContext) { - let args_hash = hash_args(self.args); - context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, true, false) + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ true, + /*delegate=*/ false + ) } pub fn delegate_enqueue(self, context: &mut PrivateContext) { - let args_hash = hash_args(self.args); - context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, true) + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ true + ) } } @@ -276,17 +296,38 @@ impl AvmVoidCallInterface { } pub fn enqueue(self, context: &mut PrivateContext) { - let args_hash = hash_args(self.args); - context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, false) + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ false + ) } pub fn static_enqueue(self, context: &mut PrivateContext) { - let args_hash = hash_args(self.args); - context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, true, false) + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ true, + /*delegate=*/ false + ) } pub fn delegate_enqueue(self, context: &mut PrivateContext) { - let args_hash = hash_args(self.args); - context.call_public_function_with_packed_args(self.target_contract, self.selector, args_hash, false, true) + // This packing is only here because PrivateContext's call_public* functions do not accept a slice for the args. + let args_hash = arguments::pack_arguments(self.args); + context.call_public_function_with_packed_args( + self.target_contract, + self.selector, + args_hash, + /*static=*/ false, + /*delegate=*/ true + ) } } diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index 94d70614a13..e71861ffbef 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -153,6 +153,12 @@ contract AvmTest { U128::from_integer(should_overflow) } + #[aztec(private)] + fn enqueue_public_from_private() { + AvmTest::at(context.this_address()).set_opcode_u8().static_enqueue(&mut context); + AvmTest::at(context.this_address()).set_read_storage_single(5).enqueue(&mut context); + } + /************************************************************************ * Hashing functions ************************************************************************/ diff --git a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts index 56cca9370f4..4869cc90162 100644 --- a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts +++ b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts @@ -41,6 +41,12 @@ describe('e2e_avm_simulator', () => { }); }); + describe('From private', () => { + it('Should enqueue a public function correctly', async () => { + await avmContract.methods.enqueue_public_from_private().simulate(); + }); + }); + describe('Gas metering', () => { it('Tracks L2 gas usage on simulation', async () => { const request = await avmContract.methods.add_args_return(20n, 30n).create(); diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 7bea5f1c42a..c43418d1e6d 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -119,7 +119,8 @@ export class AvmPersistableStateManager { contractStorageUpdateRequests: [], unencryptedLogsHashes: [], unencryptedLogs: [], - unencryptedLogPreimagesLength: new Fr(0), + // The length starts at 4 because it will always include the size. + unencryptedLogPreimagesLength: new Fr(4), allUnencryptedLogs: [], nestedExecutions: [], }; From 67fedf1a4a93aed9c1ee1e14a21f4b098dde995e Mon Sep 17 00:00:00 2001 From: Charlie Lye Date: Thu, 9 May 2024 15:41:43 +0100 Subject: [PATCH 25/43] feat: build-images as earthly. (#6194) * Converts our build images from Dockerfile to Earthfile. This means we now need to reference the registry image from the devcontainer. Also now means you need earthly to build the build images. * Enables docker-in-docker within our devcontainer. * ARM ci now uses earthly wrapper to pick up the env vars. * Running devcontainer more than once outside vscode will reuse existing container. * If on master we enable the pushing of inline cache. --- .devcontainer/dev/devcontainer.json | 20 +- .devcontainer/dev/docker-in-docker/NOTES.md | 16 + .devcontainer/dev/docker-in-docker/README.md | 53 ++ .../devcontainer-feature.json | 70 ++ .devcontainer/dev/docker-in-docker/install.sh | 624 ++++++++++++++++++ .github/workflows/ci-arm.yml | 4 +- CODEOWNERS | 3 +- build-images/Dockerfile | 351 ---------- build-images/Earthfile | 410 +++++++++++- build-images/Makefile | 58 -- build-images/README.md | 31 +- build-images/entrypoint.sh | 14 +- build-images/install-docker.sh | 624 ++++++++++++++++++ build-images/run.sh | 44 +- scripts/earthly-ci | 6 + 15 files changed, 1870 insertions(+), 458 deletions(-) create mode 100644 .devcontainer/dev/docker-in-docker/NOTES.md create mode 100644 .devcontainer/dev/docker-in-docker/README.md create mode 100644 .devcontainer/dev/docker-in-docker/devcontainer-feature.json create mode 100755 .devcontainer/dev/docker-in-docker/install.sh delete mode 100644 build-images/Dockerfile delete mode 100755 build-images/Makefile create mode 100755 build-images/install-docker.sh diff --git a/.devcontainer/dev/devcontainer.json b/.devcontainer/dev/devcontainer.json index 792ffdbc010..e5fb68ec02e 100644 --- a/.devcontainer/dev/devcontainer.json +++ b/.devcontainer/dev/devcontainer.json @@ -1,22 +1,10 @@ { "name": "Development", - "build": { - "dockerfile": "../../build-images/Dockerfile", - "context": "../../build-images", - "target": "devbox" + "image": "aztecprotocol/devbox:1.0", + "features": { + // Use custom fork with noble added to list of supported distros. + "./docker-in-docker": {} }, "containerUser": "aztec-dev", - // ubuntu:noble is currently not supported. - // Can possibly workaround cherry-picking from here: - // https://github.com/devcontainers/features/blob/main/src/docker-in-docker/install.sh - // - // "image": "aztecprotocol/codespace", - // "features": { - // "docker-in-docker": { - // "version": "latest", - // "moby": true, - // "dockerDashComposeVersion": "v1" - // } - // }, "mounts": ["source=devbox-home,target=/home/aztec-dev,type=volume"] } diff --git a/.devcontainer/dev/docker-in-docker/NOTES.md b/.devcontainer/dev/docker-in-docker/NOTES.md new file mode 100644 index 00000000000..b8156f8b69f --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/NOTES.md @@ -0,0 +1,16 @@ +## Limitations + +This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: +* As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. +* The host and the container must be running on the same chip architecture. You will not be able to use it with an emulated x86 image with Docker Desktop on an Apple Silicon Mac, like in this example: + ``` + FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/typescript-node:16 + ``` + See [Issue #219](https://github.com/devcontainers/features/issues/219) for more details. + + +## OS Support + +This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. + +`bash` is required to execute the `install.sh` script. diff --git a/.devcontainer/dev/docker-in-docker/README.md b/.devcontainer/dev/docker-in-docker/README.md new file mode 100644 index 00000000000..29e3105c60b --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/README.md @@ -0,0 +1,53 @@ +# Docker (Docker-in-Docker) (docker-in-docker) + +**FORKED HERE TO SUPPORT NOBLE** + +Create child containers _inside_ a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs. + +## Example Usage + +```json +"features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": {} +} +``` + +## Options + +| Options Id | Description | Type | Default Value | +| -------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------- | ------------- | +| version | Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.) | string | latest | +| moby | Install OSS Moby build instead of Docker CE | boolean | true | +| mobyBuildxVersion | Install a specific version of moby-buildx when using Moby | string | latest | +| dockerDashComposeVersion | Default version of Docker Compose (latest, v2 or none) | string | latest | +| azureDnsAutoDetection | Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure | boolean | true | +| dockerDefaultAddressPool | Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24 | string | - | +| installDockerBuildx | Install Docker Buildx | boolean | true | +| installDockerComposeSwitch | Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter. | boolean | true | + +## Customizations + +### VS Code Extensions + +- `ms-azuretools.vscode-docker` + +## Limitations + +This docker-in-docker Dev Container Feature is roughly based on the [official docker-in-docker wrapper script](https://github.com/moby/moby/blob/master/hack/dind) that is part of the [Moby project](https://mobyproject.org/). With this in mind: + +- As the name implies, the Feature is expected to work when the host is running Docker (or the OSS Moby container engine it is built on). It may be possible to get running in other container engines, but it has not been tested with them. +- The host and the container must be running on the same chip architecture. You will not be able to use it with an emulated x86 image with Docker Desktop on an Apple Silicon Mac, like in this example: + ``` + FROM --platform=linux/amd64 mcr.microsoft.com/devcontainers/typescript-node:16 + ``` + See [Issue #219](https://github.com/devcontainers/features/issues/219) for more details. + +## OS Support + +This Feature should work on recent versions of Debian/Ubuntu-based distributions with the `apt` package manager installed. + +`bash` is required to execute the `install.sh` script. + +--- + +_Note: This file was auto-generated from the [devcontainer-feature.json](https://github.com/devcontainers/features/blob/main/src/docker-in-docker/devcontainer-feature.json). Add additional notes to a `NOTES.md`._ diff --git a/.devcontainer/dev/docker-in-docker/devcontainer-feature.json b/.devcontainer/dev/docker-in-docker/devcontainer-feature.json new file mode 100644 index 00000000000..7b8b472245b --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/devcontainer-feature.json @@ -0,0 +1,70 @@ +{ + "id": "docker-in-docker", + "version": "2.10.2", + "name": "Docker (Docker-in-Docker)", + "documentationURL": "https://github.com/devcontainers/features/tree/main/src/docker-in-docker", + "description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.", + "options": { + "version": { + "type": "string", + "proposals": ["latest", "none", "20.10"], + "default": "latest", + "description": "Select or enter a Docker/Moby Engine version. (Availability can vary by OS version.)" + }, + "moby": { + "type": "boolean", + "default": true, + "description": "Install OSS Moby build instead of Docker CE" + }, + "mobyBuildxVersion": { + "type": "string", + "default": "latest", + "description": "Install a specific version of moby-buildx when using Moby" + }, + "dockerDashComposeVersion": { + "type": "string", + "enum": ["none", "latest", "v2"], + "default": "latest", + "description": "Default version of Docker Compose (latest, v2 or none)" + }, + "azureDnsAutoDetection": { + "type": "boolean", + "default": true, + "description": "Allow automatically setting the dockerd DNS server when the installation script detects it is running in Azure" + }, + "dockerDefaultAddressPool": { + "type": "string", + "default": "", + "proposals": [], + "description": "Define default address pools for Docker networks. e.g. base=192.168.0.0/16,size=24" + }, + "installDockerBuildx": { + "type": "boolean", + "default": true, + "description": "Install Docker Buildx" + }, + "installDockerComposeSwitch": { + "type": "boolean", + "default": true, + "description": "Install Compose Switch (provided docker compose is available) which is a replacement to the Compose V1 docker-compose (python) executable. It translates the command line into Compose V2 docker compose then runs the latter." + } + }, + "entrypoint": "/usr/local/share/docker-init.sh", + "privileged": true, + "containerEnv": { + "DOCKER_BUILDKIT": "1" + }, + "customizations": { + "vscode": { + "extensions": ["ms-azuretools.vscode-docker"] + } + }, + "mounts": [ + { + "source": "dind-var-lib-docker-${devcontainerId}", + "target": "/var/lib/docker", + "type": "volume" + } + ], + "installsAfter": ["ghcr.io/devcontainers/features/common-utils"] +} diff --git a/.devcontainer/dev/docker-in-docker/install.sh b/.devcontainer/dev/docker-in-docker/install.sh new file mode 100755 index 00000000000..4a433a02220 --- /dev/null +++ b/.devcontainer/dev/docker-in-docker/install.sh @@ -0,0 +1,624 @@ +#!/usr/bin/env bash +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md +# Maintainer: The Dev Container spec maintainers + + +DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version +USE_MOBY="${MOBY:-"true"}" +MOBY_BUILDX_VERSION="${MOBYBUILDXVERSION:-"latest"}" +DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"latest"}" #latest, v2 or none +AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}" +DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL:-""}" +USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" +INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" +INSTALL_DOCKER_COMPOSE_SWITCH="${INSTALLDOCKERCOMPOSESWITCH:-"true"}" +MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" +DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal jammy noble" +DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal hirsute impish jammy noble" + +# Default: Exit on any failure. +set -e + +# Clean up +rm -rf /var/lib/apt/lists/* + +# Setup STDERR. +err() { + echo "(!) $*" >&2 +} + +if [ "$(id -u)" -ne 0 ]; then + err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 +fi + +################### +# Helper Functions +# See: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/shared/utils.sh +################### + +# Determine the appropriate non-root user +if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi +elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root +fi + +apt_get_update() +{ + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + apt-get update -y + fi +} + +# Checks if packages are installed and installs them if not +check_packages() { + if ! dpkg -s "$@" > /dev/null 2>&1; then + apt_get_update + apt-get -y install --no-install-recommends "$@" + fi +} + +# Figure out correct version of a three part version number is not passed +find_version_from_git_tags() { + local variable_name=$1 + local requested_version=${!variable_name} + if [ "${requested_version}" = "none" ]; then return; fi + local repository=$2 + local prefix=${3:-"tags/v"} + local separator=${4:-"."} + local last_part_optional=${5:-"false"} + if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then + local escaped_separator=${separator//./\\.} + local last_part + if [ "${last_part_optional}" = "true" ]; then + last_part="(${escaped_separator}[0-9]+)?" + else + last_part="${escaped_separator}[0-9]+" + fi + local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" + local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" + if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then + declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" + else + set +e + declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" + set -e + fi + fi + if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then + err "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 + exit 1 + fi + echo "${variable_name}=${!variable_name}" +} + +# Use semver logic to decrement a version number then look for the closest match +find_prev_version_from_git_tags() { + local variable_name=$1 + local current_version=${!variable_name} + local repository=$2 + # Normally a "v" is used before the version number, but support alternate cases + local prefix=${3:-"tags/v"} + # Some repositories use "_" instead of "." for version number part separation, support that + local separator=${4:-"."} + # Some tools release versions that omit the last digit (e.g. go) + local last_part_optional=${5:-"false"} + # Some repositories may have tags that include a suffix (e.g. actions/node-versions) + local version_suffix_regex=$6 + # Try one break fix version number less if we get a failure. Use "set +e" since "set -e" can cause failures in valid scenarios. + set +e + major="$(echo "${current_version}" | grep -oE '^[0-9]+' || echo '')" + minor="$(echo "${current_version}" | grep -oP '^[0-9]+\.\K[0-9]+' || echo '')" + breakfix="$(echo "${current_version}" | grep -oP '^[0-9]+\.[0-9]+\.\K[0-9]+' 2>/dev/null || echo '')" + + if [ "${minor}" = "0" ] && [ "${breakfix}" = "0" ]; then + ((major=major-1)) + declare -g ${variable_name}="${major}" + # Look for latest version from previous major release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + # Handle situations like Go's odd version pattern where "0" releases omit the last part + elif [ "${breakfix}" = "" ] || [ "${breakfix}" = "0" ]; then + ((minor=minor-1)) + declare -g ${variable_name}="${major}.${minor}" + # Look for latest version from previous minor release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + else + ((breakfix=breakfix-1)) + if [ "${breakfix}" = "0" ] && [ "${last_part_optional}" = "true" ]; then + declare -g ${variable_name}="${major}.${minor}" + else + declare -g ${variable_name}="${major}.${minor}.${breakfix}" + fi + fi + set -e +} + +# Function to fetch the version released prior to the latest version +get_previous_version() { + local url=$1 + local repo_url=$2 + local variable_name=$3 + prev_version=${!variable_name} + + output=$(curl -s "$repo_url"); + message=$(echo "$output" | jq -r '.message') + + if [[ $message == "API rate limit exceeded"* ]]; then + echo -e "\nAn attempt to find latest version using GitHub Api Failed... \nReason: ${message}" + echo -e "\nAttempting to find latest version using GitHub tags." + find_prev_version_from_git_tags prev_version "$url" "tags/v" + declare -g ${variable_name}="${prev_version}" + else + echo -e "\nAttempting to find latest version using GitHub Api." + version=$(echo "$output" | jq -r '.tag_name') + declare -g ${variable_name}="${version#v}" + fi + echo "${variable_name}=${!variable_name}" +} + +get_github_api_repo_url() { + local url=$1 + echo "${url/https:\/\/github.com/https:\/\/api.github.com\/repos}/releases/latest" +} + +########################################### +# Start docker-in-docker installation +########################################### + +# Ensure apt is in non-interactive to avoid prompts +export DEBIAN_FRONTEND=noninteractive + + +# Source /etc/os-release to get OS info +. /etc/os-release +# Fetch host/container arch. +architecture="$(dpkg --print-architecture)" + +# Check if distro is supported +if [ "${USE_MOBY}" = "true" ]; then + if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'" +else + if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'" +fi + +# Install dependencies +check_packages apt-transport-https curl ca-certificates pigz iptables gnupg2 dirmngr wget jq +if ! type git > /dev/null 2>&1; then + check_packages git +fi + +# Swap to legacy iptables for compatibility +if type iptables-legacy > /dev/null 2>&1; then + update-alternatives --set iptables /usr/sbin/iptables-legacy + update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy +fi + + + +# Set up the necessary apt repos (either Microsoft's or Docker's) +if [ "${USE_MOBY}" = "true" ]; then + + # Name of open source engine/cli + engine_package_name="moby-engine" + cli_package_name="moby-cli" + + # Import key safely and import Microsoft apt repo + curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg + echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list +else + # Name of licensed engine/cli + engine_package_name="docker-ce" + cli_package_name="docker-ce-cli" + + # Import key safely and import Docker apt repo + curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list +fi + +# Refresh apt lists +apt-get update + +# Soft version matching +if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + engine_version_suffix="" + cli_version_suffix="" +else + # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...) + docker_version_dot_escaped="${DOCKER_VERSION//./\\.}" + docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}" + # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/ + docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e # Don't exit if finding version fails - will handle gracefully + cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + engine_version_suffix="=$(apt-cache madison ${engine_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + set -e + if [ -z "${engine_version_suffix}" ] || [ "${engine_version_suffix}" = "=" ] || [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ] ; then + err "No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "engine_version_suffix ${engine_version_suffix}" + echo "cli_version_suffix ${cli_version_suffix}" +fi + +# Version matching for moby-buildx +if [ "${USE_MOBY}" = "true" ]; then + if [ "${MOBY_BUILDX_VERSION}" = "latest" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + buildx_version_suffix="" + else + buildx_version_dot_escaped="${MOBY_BUILDX_VERSION//./\\.}" + buildx_version_dot_plus_escaped="${buildx_version_dot_escaped//+/\\+}" + buildx_version_regex="^(.+:)?${buildx_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e + buildx_version_suffix="=$(apt-cache madison moby-buildx | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${buildx_version_regex}")" + set -e + if [ -z "${buildx_version_suffix}" ] || [ "${buildx_version_suffix}" = "=" ]; then + err "No full or partial moby-buildx version match found for \"${MOBY_BUILDX_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison moby-buildx | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "buildx_version_suffix ${buildx_version_suffix}" + fi +fi + +# Install Docker / Moby CLI if not already installed +if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then + echo "Docker / Moby CLI and Engine already installed." +else + if [ "${USE_MOBY}" = "true" ]; then + # Install engine + set +e # Handle error gracefully + apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx${buildx_version_suffix} moby-engine${engine_version_suffix} + exit_code=$? + set -e + + if [ ${exit_code} -ne 0 ]; then + err "Packages for moby not available in OS ${ID} ${VERSION_CODENAME} (${architecture}). To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS version (eg: 'ubuntu-20.04')." + exit 1 + fi + + # Install compose + apt-get -y install --no-install-recommends moby-compose || err "Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + else + apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix} docker-ce${engine_version_suffix} + # Install compose + apt-get -y install --no-install-recommends docker-compose-plugin || echo "(*) Package docker-compose-plugin (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi +fi + +echo "Finished installing docker / moby!" + +docker_home="/usr/libexec/docker" +cli_plugins_dir="${docker_home}/cli-plugins" + +# fallback for docker-compose +fallback_compose(){ + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker-compose v${compose_version}..." + get_previous_version "${url}" "${repo_url}" compose_version + echo -e "\nAttempting to install v${compose_version}" + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} +} + +# If 'docker-compose' command is to be included +if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + case "${architecture}" in + amd64) target_compose_arch=x86_64 ;; + arm64) target_compose_arch=aarch64 ;; + *) + echo "(!) Docker in docker does not support machine architecture '$architecture'. Please use an x86-64 or ARM64 machine." + exit 1 + esac + + docker_compose_path="/usr/local/bin/docker-compose" + if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then + err "The final Compose V1 release, version 1.29.2, was May 10, 2021. These packages haven't received any security updates since then. Use at your own risk." + INSTALL_DOCKER_COMPOSE_SWITCH="false" + + if [ "${target_compose_arch}" = "x86_64" ]; then + echo "(*) Installing docker compose v1..." + curl -fsSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64" -o ${docker_compose_path} + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + elif [ "${VERSION_CODENAME}" = "bookworm" ]; then + err "Docker compose v1 is unavailable for 'bookworm' on Arm64. Kindly switch to use v2" + exit 1 + else + # Use pip to get a version that runs on this architecture + check_packages python3-minimal python3-pip libffi-dev python3-venv + echo "(*) Installing docker compose v1 via pip..." + export PYTHONUSERBASE=/usr/local + pip3 install --disable-pip-version-check --no-cache-dir --user "Cython<3.0" pyyaml wheel docker-compose --no-build-isolation + fi + else + compose_version=${DOCKER_DASH_COMPOSE_VERSION#v} + docker_compose_url="https://github.com/docker/compose" + find_version_from_git_tags compose_version "$docker_compose_url" "tags/v" + echo "(*) Installing docker-compose ${compose_version}..." + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} || { + if [[ $DOCKER_DASH_COMPOSE_VERSION == "latest" ]]; then + fallback_compose "$docker_compose_url" + else + echo -e "Error: Failed to install docker-compose v${compose_version}" + fi + } + + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + + mkdir -p ${cli_plugins_dir} + cp ${docker_compose_path} ${cli_plugins_dir} + fi +fi + +# fallback method for compose-switch +fallback_compose-switch() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for compose-switch v${compose_switch_version}..." + get_previous_version "$url" "$repo_url" compose_switch_version + echo -e "\nAttempting to install v${compose_switch_version}" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch +} + +# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation +if [ "${INSTALL_DOCKER_COMPOSE_SWITCH}" = "true" ] && ! type compose-switch > /dev/null 2>&1; then + if type docker-compose > /dev/null 2>&1; then + echo "(*) Installing compose-switch..." + current_compose_path="$(which docker-compose)" + target_compose_path="$(dirname "${current_compose_path}")/docker-compose-v1" + compose_switch_version="latest" + compose_switch_url="https://github.com/docker/compose-switch" + find_version_from_git_tags compose_switch_version "$compose_switch_url" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch || fallback_compose-switch "$compose_switch_url" + chmod +x /usr/local/bin/compose-switch + # TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11 + # Setup v1 CLI as alternative in addition to compose-switch (which maps to v2) + mv "${current_compose_path}" "${target_compose_path}" + update-alternatives --install ${docker_compose_path} docker-compose /usr/local/bin/compose-switch 99 + update-alternatives --install ${docker_compose_path} docker-compose "${target_compose_path}" 1 + else + err "Skipping installation of compose-switch as docker compose is unavailable..." + fi +fi + +# If init file already exists, exit +if [ -f "/usr/local/share/docker-init.sh" ]; then + echo "/usr/local/share/docker-init.sh already exists, so exiting." + # Clean up + rm -rf /var/lib/apt/lists/* + exit 0 +fi +echo "docker-init doesn't exist, adding..." + +if ! cat /etc/group | grep -e "^docker:" > /dev/null 2>&1; then + groupadd -r docker +fi + +usermod -aG docker ${USERNAME} + +# fallback for docker/buildx +fallback_buildx() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker buildx v${buildx_version}..." + get_previous_version "$url" "$repo_url" buildx_version + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + echo -e "\nAttempting to install v${buildx_version}" + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} +} + +if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then + buildx_version="latest" + docker_buildx_url="https://github.com/docker/buildx" + find_version_from_git_tags buildx_version "$docker_buildx_url" "refs/tags/v" + echo "(*) Installing buildx ${buildx_version}..." + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + + cd /tmp + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} || fallback_buildx "$docker_buildx_url" + + docker_home="/usr/libexec/docker" + cli_plugins_dir="${docker_home}/cli-plugins" + + mkdir -p ${cli_plugins_dir} + mv ${buildx_file_name} ${cli_plugins_dir}/docker-buildx + chmod +x ${cli_plugins_dir}/docker-buildx + + chown -R "${USERNAME}:docker" "${docker_home}" + chmod -R g+r+w "${docker_home}" + find "${docker_home}" -type d -print0 | xargs -n 1 -0 chmod g+s +fi + +tee /usr/local/share/docker-init.sh > /dev/null \ +<< EOF +#!/bin/sh +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +set -e + +AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} +DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} +EOF + +tee -a /usr/local/share/docker-init.sh > /dev/null \ +<< 'EOF' +dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} $(cat << 'INNEREOF' + # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly + find /run /var/run -iname 'docker*.pid' -delete || : + find /run /var/run -iname 'container*.pid' -delete || : + + # -- Start: dind wrapper script -- + # Maintained: https://github.com/moby/moby/blob/master/hack/dind + + export container=docker + + if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security; then + mount -t securityfs none /sys/kernel/security || { + echo >&2 'Could not mount /sys/kernel/security.' + echo >&2 'AppArmor detection and --privileged mode might break.' + } + fi + + # Mount /tmp (conditionally) + if ! mountpoint -q /tmp; then + mount -t tmpfs none /tmp + fi + + set_cgroup_nesting() + { + # cgroup v2: enable nesting + if [ -f /sys/fs/cgroup/cgroup.controllers ]; then + # move the processes from the root group to the /init group, + # otherwise writing subtree_control fails with EBUSY. + # An error during moving non-existent process (i.e., "cat") is ignored. + mkdir -p /sys/fs/cgroup/init + xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : + # enable controllers + sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ + > /sys/fs/cgroup/cgroup.subtree_control + fi + } + + # Set cgroup nesting, retrying if necessary + retry_cgroup_nesting=0 + + until [ "${retry_cgroup_nesting}" -eq "5" ]; + do + set +e + set_cgroup_nesting + + if [ $? -ne 0 ]; then + echo "(*) cgroup v2: Failed to enable nesting, retrying..." + else + break + fi + + retry_cgroup_nesting=`expr $retry_cgroup_nesting + 1` + set -e + done + + # -- End: dind wrapper script -- + + # Handle DNS + set +e + cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' > /dev/null 2>&1 + if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ] + then + echo "Setting dockerd Azure DNS." + CUSTOMDNS="--dns 168.63.129.16" + else + echo "Not setting dockerd DNS manually." + CUSTOMDNS="" + fi + set -e + + if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ] + then + DEFAULT_ADDRESS_POOL="" + else + DEFAULT_ADDRESS_POOL="--default-address-pool $DOCKER_DEFAULT_ADDRESS_POOL" + fi + + # Start docker/moby engine + ( dockerd $CUSTOMDNS $DEFAULT_ADDRESS_POOL > /tmp/dockerd.log 2>&1 ) & +INNEREOF +)" + +sudo_if() { + COMMAND="$*" + + if [ "$(id -u)" -ne 0 ]; then + sudo $COMMAND + else + $COMMAND + fi +} + +retry_docker_start_count=0 +docker_ok="false" + +until [ "${docker_ok}" = "true" ] || [ "${retry_docker_start_count}" -eq "5" ]; +do + # Start using sudo if not invoked as root + if [ "$(id -u)" -ne 0 ]; then + sudo /bin/sh -c "${dockerd_start}" + else + eval "${dockerd_start}" + fi + + retry_count=0 + until [ "${docker_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; + do + sleep 1s + set +e + docker info > /dev/null 2>&1 && docker_ok="true" + set -e + + retry_count=`expr $retry_count + 1` + done + + if [ "${docker_ok}" != "true" ] && [ "${retry_docker_start_count}" != "4" ]; then + echo "(*) Failed to start docker, retrying..." + set +e + sudo_if pkill dockerd + sudo_if pkill containerd + set -e + fi + + retry_docker_start_count=`expr $retry_docker_start_count + 1` +done + +# Execute whatever commands were passed in (if any). This allows us +# to set this script to ENTRYPOINT while still executing the default CMD. +exec "$@" +EOF + +chmod +x /usr/local/share/docker-init.sh +chown ${USERNAME}:root /usr/local/share/docker-init.sh + +# Clean up +rm -rf /var/lib/apt/lists/* + +echo 'docker-in-docker-debian script has completed!' diff --git a/.github/workflows/ci-arm.yml b/.github/workflows/ci-arm.yml index 3317870ec5f..c750b142724 100644 --- a/.github/workflows/ci-arm.yml +++ b/.github/workflows/ci-arm.yml @@ -44,7 +44,7 @@ jobs: # prepare images locally, tagged by commit hash - name: "Build E2E Image" timeout-minutes: 40 - run: earthly ./yarn-project+export-e2e-test-images + run: earthly-ci ./yarn-project+export-e2e-test-images # all the end-to-end integration tests for aztec e2e: @@ -62,7 +62,7 @@ jobs: - name: Test working-directory: ./yarn-project/end-to-end/ timeout-minutes: 15 - run: earthly -P --no-output +uniswap-trade-on-l1-from-l2 + run: earthly-ci -P --no-output +uniswap-trade-on-l1-from-l2 notify: needs: [e2e] diff --git a/CODEOWNERS b/CODEOWNERS index cdd57834a49..37be432af89 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,5 +1,4 @@ -/build-system/ @charlielye -/build_manifest.yml @charlielye +/build-images/ @charlielye # Notify the AVM team of any changes to public oracle. /yarn-project/simulator/src/public/public_execution_context.ts @Maddiaa0 @fcarreiro @dbanks12 diff --git a/build-images/Dockerfile b/build-images/Dockerfile deleted file mode 100644 index 893c93fe4c0..00000000000 --- a/build-images/Dockerfile +++ /dev/null @@ -1,351 +0,0 @@ -######################################################################################################################## -# Build wasi-sdk. -FROM ubuntu:noble AS wasi-sdk-build -RUN apt update && apt install -y \ - clang \ - cmake \ - ninja-build \ - git \ - cargo -RUN git clone --depth 1 --recursive --branch wasi-sdk-22 \ - https://github.com/WebAssembly/wasi-sdk.git -RUN mkdir -p /wasi-sdk/build/install/opt/wasi-sdk -WORKDIR /wasi-sdk -ENV MAKEFLAGS="-j$(nproc)" -RUN make build/llvm.BUILT -RUN make build/wasi-libc.BUILT -RUN make build/compiler-rt.BUILT -RUN make build/libcxx.BUILT -RUN make build/config.BUILT -RUN make build/version.BUILT -RUN mv build/install/opt/wasi-sdk /opt/wasi-sdk -FROM ubuntu:noble AS wasi-sdk -COPY --from=wasi-sdk-build /opt/wasi-sdk /opt/wasi-sdk - -######################################################################################################################## -# Build osxcross. -FROM ubuntu:noble AS osxcross-build -RUN export DEBIAN_FRONTEND="noninteractive" \ - && apt-get update \ - && apt-get install --no-install-recommends -y \ - bash \ - binutils-multiarch-dev \ - build-essential \ - ca-certificates \ - clang \ - git \ - libbz2-dev \ - libmpc-dev \ - libmpfr-dev \ - libgmp-dev \ - liblzma-dev \ - libpsi3-dev \ - libssl-dev \ - libxml2-dev \ - libz-dev \ - lzma-dev \ - make \ - patch \ - python3 \ - uuid-dev \ - wget \ - xz-utils \ - zlib1g-dev \ - cmake \ - curl \ - && apt-get -y autoremove \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -WORKDIR /usr/src/osxcross -ARG OSX_CROSS_COMMIT="ff8d100f3f026b4ffbe4ce96d8aac4ce06f1278b" -RUN git clone https://github.com/tpoechtrager/osxcross.git . && git reset --hard $OSX_CROSS_COMMIT -ARG OSX_SDK="MacOSX14.0.sdk" -ARG OSX_SDK_URL="https://github.com/joseluisq/macosx-sdks/releases/download/14.0/${OSX_SDK}.tar.xz" -RUN curl -sSL "$OSX_SDK_URL" -o "./tarballs/$OSX_SDK.tar.xz" \ - && OSX_VERSION_MIN=14.0 UNATTENDED=1 ENABLE_COMPILER_RT_INSTALL=1 TARGET_DIR=/opt/osxcross ./build.sh \ - && rm -rf ./tarballs/$OSX_SDK.tar.xz /opt/osxcross/SDK/$OSX_SDK -FROM scratch AS osxcross -COPY --from=osxcross-build /opt/osxcross /opt/osxcross - -######################################################################################################################## -# Build foundry. -FROM ubuntu:noble AS foundry-build -RUN apt update && apt install -y git cargo -ARG TAG -RUN ulimit -n 65535 && \ - git clone --depth 1 --branch nightly-$TAG \ - https://github.com/foundry-rs/foundry.git && \ - cd foundry && cargo build --profile local && \ - mkdir -p /opt/foundry/bin && \ - for t in forge cast anvil chisel; do \ - mv ./target/local/$t /opt/foundry/bin/$t; \ - strip /opt/foundry/bin/$t; \ - done -FROM ubuntu:noble AS foundry -COPY --from=foundry-build /opt/foundry /opt/foundry -ENV PATH="/opt/foundry/bin:$PATH" - -######################################################################################################################## -# This image contains *just* what's needed to perform a full build of the aztec project. -# It acts as the base image for all CI builds, and we build on it to produce a developer box. -FROM ubuntu:noble as build -RUN apt update && \ - apt install -y \ - # Utils - curl \ - git \ - curl \ - wget \ - jq \ - gawk \ - unzip \ - netcat-openbsd \ - parallel \ - # C++ (clang=18, which we will move to. 16 is for current build.) - build-essential \ - cmake \ - ninja-build \ - clang \ - clang-16 \ - clang-format-16 \ - libc++-dev \ - libomp-dev \ - doxygen \ - # Node (18.19.1) - nodejs \ - npm \ - # Python (clang bindings for wasm bindgen.) - python3 \ - python3-clang \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install wasi-sdk. -COPY --from=aztecprotocol/wasi-sdk:22.0 /opt/wasi-sdk /opt/wasi-sdk - -# Install osxcross. Requires developer to mount SDK from their mac host. -COPY --from=aztecprotocol/osxcross:14.0 /opt/osxcross /opt/osxcross -ENV PATH="/opt/osxcross/bin:$PATH" -ENV LD_LIBRARY_PATH="/opt/osxcross/lib:$LD_LIBRARY_PATH" - -# Install foundry. -COPY --from=aztecprotocol/foundry:de33b6af53005037b463318d2628b5cfcaf39916 /opt/foundry /opt/foundry -ENV PATH="/opt/foundry/bin:$PATH" - -# Install rust and cross-compilers. Noir specifically uses 1.74.1. -# We add everyone write ownership so downstream boxes can write. -ENV RUSTUP_HOME=/opt/rust/rustup \ - CARGO_HOME=/opt/rust/cargo \ - PATH="/opt/rust/cargo/bin:$PATH" -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.74.1 && \ - rustup target add wasm32-unknown-unknown wasm32-wasi aarch64-apple-darwin && \ - chmod -R a+w /opt/rust - -# Install yq -RUN curl -sL https://github.com/mikefarah/yq/releases/download/v4.42.1/yq_linux_$(dpkg --print-architecture) \ - -o /usr/local/bin/yq && chmod +x /usr/local/bin/yq - -# Install yarn -RUN npm install --global yarn - -# Install solhint -RUN npm install --global solhint - -######################################################################################################################## -# We want to produce downstream images: codespace, devbox and sysbox. This image is the base image for each. -# It contains a suite of tools that developers might use to develop aztec. -FROM build as basebox -RUN yes | unminimize - -# Install stuff devs need. -RUN apt update && \ - apt install -y \ - zsh \ - fzf \ - libfuse2 \ - iproute2 \ - iputils-ping \ - telnet \ - lsb-release \ - tmux \ - vim \ - software-properties-common \ - gnupg \ - htop \ - cgroup-tools \ - neovim \ - sudo \ - clangd-16 \ - man \ - python3-blessed \ - && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -# Install earthly. -RUN wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly && \ - chmod +x /usr/local/bin/earthly - -# Install gh (github cli). -RUN mkdir -p -m 755 /etc/apt/keyrings && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg > /etc/apt/keyrings/githubcli-archive-keyring.gpg \ - && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ - && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ - && apt update \ - && apt install gh -y - -# Install gt (graphite). -RUN npm install -g @withgraphite/graphite-cli@stable - -# Install aws cli. -RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip" && \ - unzip awscliv2.zip && \ - ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/aws-cli --update && \ - rm -rf aws awscliv2.zip - -# Install terraform. -RUN curl -fsSL https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_$(dpkg --print-architecture).zip -o terraform.zip \ - && unzip terraform.zip -d /usr/local/bin \ - && chmod +x /usr/local/bin/terraform \ - && rm terraform.zip - -# fzf seems to not install this file for some reason. -COPY ./key-bindings.zsh /usr/share/doc/fzf/examples/key-bindings.zsh - -# Sets LANG explicitly. Ensures tmux shows unicode symbols. -# Sets RUSTUP_HOME. -# Adds foundry and cargo bin dirs to PATH. -COPY environment /etc/environment - -# Cargo home and bin path should be set within users home dir at login. -RUN echo 'export CARGO_HOME="$HOME/.cargo"' >> /etc/zsh/zshenv -RUN echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> /etc/zsh/zshenv - -# sudo group can sudo without password. -RUN echo '%sudo ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers - -######################################################################################################################## -# This devbox container can be used to provide a full development environment. -# -# It can be used as a dev container: -# - Configuration in .devcontainer/devcontainer.json. -# - To run locally install "Dev Containers" plugin in vscode. -# - To run in GitHub codespaces, visit the repo in github, press '.', and open the terminal. -# -# It can be used independently: -# - The user should use the ./run.sh script to launch. -# - A persistent volume will be mounted to /home/aztec-dev. -# - It provides docker via the hosts docker instance, mounted at /var/lib/docker.sock. -# - It uses an entrypoint script at runtime to perform uid/gid alignment with the host and drop into user account. -FROM basebox as devbox - -# Install docker client. Will use mounted host docker socket. -RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --batch --yes --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg \ - && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" \ - | tee /etc/apt/sources.list.d/docker.list > /dev/null \ - && apt-get update && apt-get install -y docker-ce-cli -ADD https://raw.githubusercontent.com/docker/docker-ce/master/components/cli/contrib/completion/bash/docker /etc/bash_completion.d/docker.sh - -RUN apt install -y gosu -ENV TERM=xterm-256color -# Detect if the host machine is Mac, if so set an env var, and disable prompts vcs info for performance. -RUN <> /etc/zsh/zshrc -EOF -# Create the user we'll run as and become the user. -RUN useradd --shell /bin/zsh -G sudo -m aztec-dev -USER aztec-dev -WORKDIR /home/aztec-dev -# Add dotfiles. -COPY --chown=aztec-dev:aztec-dev home . -# The .npmrc config is set to install global bins here, update PATH. -ENV PATH=/home/aztec-dev/.npm-global/bin:$PATH -# Need to ensure correct permissions, under some conditions these would otherwise be created by root. -RUN mkdir .vscode-server .npm-global .ssh -# Switch back to root. Gives option for root runtime adjustments before becoming aztec-dev. -USER root -# Use as entrypoint when running in an environment that requires uid/gid alignment (e.g. vanilla linux docker). -COPY ./entrypoint.sh /entrypoint.sh -ENTRYPOINT ["/entrypoint.sh"] -CMD ["/bin/zsh"] - -######################################################################################################################## -# This sysbox container can be used to provide a full development environment. -# It's more advanced than devbox in that it uses nestybox's sysbox container runtime to provide more of a vm experience. -# It's used primarily by internal aztec developers who have sysboxes running on a powerful underlying mainframe. -# It provides better isolation and security guarantees than a plain devbox. -FROM basebox AS sysbox - -###################### START OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### -# -# Systemd installation -# -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - systemd \ - systemd-sysv \ - libsystemd0 \ - ca-certificates \ - dbus \ - iptables \ - iproute2 \ - kmod \ - locales \ - sudo \ - udev && \ - \ - # Prevents journald from reading kernel messages from /dev/kmsg - echo "ReadKMsg=no" >> /etc/systemd/journald.conf && \ - \ - # Housekeeping - apt-get clean -y && \ - rm -rf \ - /var/cache/debconf/* \ - /var/lib/apt/lists/* \ - /var/log/* \ - /tmp/* \ - /var/tmp/* \ - /usr/share/local/* && \ - \ - # Create default 'ubuntu/ubuntu' user - echo "ubuntu:ubuntu" | chpasswd && adduser ubuntu sudo - -# Disable systemd services/units that are unnecessary within a container. -RUN systemctl mask systemd-udevd.service \ - systemd-udevd-kernel.socket \ - systemd-udevd-control.socket \ - systemd-modules-load.service \ - sys-kernel-config.mount \ - sys-kernel-debug.mount \ - sys-kernel-tracing.mount \ - e2scrub_reap.service - -# Make use of stopsignal (instead of sigterm) to stop systemd containers. -STOPSIGNAL SIGRTMIN+3 - -# Set systemd as entrypoint. -ENTRYPOINT [ "/sbin/init", "--log-level=err" ] - -###################### END OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### - -# Install docker. -RUN curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh \ - # Add user "ubuntu" to the Docker group - && usermod -a -G docker ubuntu -ADD https://raw.githubusercontent.com/docker/docker-ce/master/components/cli/contrib/completion/bash/docker /etc/bash_completion.d/docker.sh - -# Install sshd. -RUN apt install --no-install-recommends -y openssh-server \ - && rm -rf /var/lib/apt/lists/* \ - && mkdir /home/ubuntu/.ssh \ - && chown ubuntu:ubuntu /home/ubuntu/.ssh \ - && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDagCvr/+CA1jmFaJf+e9+Kw6iwfhvaKOpfbGEl5zLgB+rum5L4Kga6Jow1gLQeMnAHfqc2IgpsU4t04c8PYApAt8AWNDL+KxMiFytfjKfJ2DZJA73CYkFnkfnMtU+ki+JG9dAHd6m7ShtCSzE5n6EDO2yWCVWQfqE3dcnpwrymSWkJYrbxzeOixiNZ4f1nD9ddvFvTWGB4l+et5SWgeIaYgJYDqTI2teRt9ytJiDGrCWXs9olHsCZOL6TEJPUQmNekwBkjMAZ4TmbBMjwbUlIxOpW2UxzlONcNn7IlRcGQg0Gdbkpo/zOlCNXsvacvnphDk5vKKaQj+aQiG916LU5P charlie@aztecprotocol.com' >> /home/ubuntu/.ssh/authorized_keys \ - && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb5OVc+9S9nXx3/34F7eLVXjoPgQ3YHSdlfhTb8WflAGmpKJTLxtAYngtDBvhKofH5HrjPPkBWxOHP9KOTo0jxUQSr0suMpggLLOHuIrCszJKXIVi7whnQ4p2RHyzyS2ANwmpxWZmYxfgamzYst9JIvQYJgAPjTFweKBsG/Lc03knJ/qgz9BHqDSZHweMTnhv1dJNhZRKy1Lxyl/CjXKF374i8qbzVWJMeDgLEH6C84vCeaH89KMmM9J0+T31uEqxzIhZxNmRz9v+x6cQAVJtGi9OIveGT9qUQwKXZsk6/zorzxV+NiIvTWHxIn9epX/FUjgUmb/jFvpbEjDkbIngj adomurad@localhost.localdomain' >> /home/ubuntu/.ssh/authorized_keys \ - && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFKlUeOh9DyAL85NJ10LE+nyfi8oYm+CwxQ9JMaB6H+t root@mainframe' >> /home/ubuntu/.ssh/authorized_keys \ - && chown ubuntu:ubuntu /home/ubuntu/.ssh/authorized_keys - -# Install google authenticator for setting up 2fa. -RUN apt update && apt install -y libpam-google-authenticator - -EXPOSE 22 diff --git a/build-images/Earthfile b/build-images/Earthfile index f546ee73556..7bdc6934d84 100644 --- a/build-images/Earthfile +++ b/build-images/Earthfile @@ -1,4 +1,412 @@ VERSION 0.8 +base-build: + FROM ubuntu:noble + RUN export DEBIAN_FRONTEND="noninteractive" \ + && apt update && apt install --no-install-recommends -y \ + build-essential \ + ca-certificates \ + bash \ + clang \ + cmake \ + make \ + ninja-build \ + git \ + cargo \ + curl \ + python3 \ + wget \ + && apt-get -y autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:base-build-1.0-$TARGETARCH + +######################################################################################################################## +# Build wasi-sdk. +wasi-sdk: + FROM +base-build + RUN git clone --depth 1 --recursive --branch wasi-sdk-22 https://github.com/WebAssembly/wasi-sdk.git \ + && mkdir -p /wasi-sdk/build/install/opt/wasi-sdk \ + && cd /wasi-sdk \ + && export MAKEFLAGS="-j$(nproc)" \ + && make build/llvm.BUILT \ + && make build/wasi-libc.BUILT \ + && make build/compiler-rt.BUILT \ + && make build/libcxx.BUILT \ + && make build/config.BUILT \ + && make build/version.BUILT \ + && mv build/install/opt/wasi-sdk /opt/wasi-sdk \ + && cd / && rm -rf /wasi-sdk + SAVE ARTIFACT /opt/wasi-sdk /opt/wasi-sdk + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:wasi-sdk-22.0-$TARGETARCH + +######################################################################################################################## +# Build osxcross. +osxcross: + FROM +base-build + RUN apt update && apt-get install --no-install-recommends -y \ + binutils-multiarch-dev \ + libbz2-dev \ + libmpc-dev \ + libmpfr-dev \ + libgmp-dev \ + liblzma-dev \ + libpsi3-dev \ + libssl-dev \ + libxml2-dev \ + libz-dev \ + lzma-dev \ + patch \ + uuid-dev \ + xz-utils \ + zlib1g-dev \ + && apt-get -y autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + RUN git clone --depth=1 https://github.com/tpoechtrager/osxcross.git \ + && cd /osxcross \ + && git reset --hard ff8d100f3f026b4ffbe4ce96d8aac4ce06f1278b \ + && export OSX_SDK="MacOSX14.0.sdk" \ + && export OSX_SDK_URL="https://github.com/joseluisq/macosx-sdks/releases/download/14.0/${OSX_SDK}.tar.xz" \ + && curl -sSL "$OSX_SDK_URL" -o "./tarballs/$OSX_SDK.tar.xz" \ + && OSX_VERSION_MIN=14.0 UNATTENDED=1 ENABLE_COMPILER_RT_INSTALL=1 TARGET_DIR=/opt/osxcross ./build.sh \ + && rm -rf /osxcross /opt/osxcross/SDK/$OSX_SDK + SAVE ARTIFACT /opt/osxcross /opt/osxcross + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:osxcross-14.0-$TARGETARCH + +######################################################################################################################## +# Build foundry. +foundry-build: + LET FOUNDRY_TAG = de33b6af53005037b463318d2628b5cfcaf39916 + FROM +base-build + RUN ulimit -n 65535 \ + && git clone --depth 1 --branch nightly-$FOUNDRY_TAG https://github.com/foundry-rs/foundry.git \ + && cd foundry \ + && cargo build --profile local \ + && mkdir -p /opt/foundry/bin \ + && for t in forge cast anvil chisel; do \ + mv ./target/local/$t /opt/foundry/bin/$t; \ + strip /opt/foundry/bin/$t; \ + done \ + && rm -rf /foundry + SAVE ARTIFACT /opt/foundry /opt/foundry + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:foundry-build-$FOUNDRY_TAG-$TARGETARCH + +foundry: + BUILD +foundry-build + LET FOUNDRY_TAG = de33b6af53005037b463318d2628b5cfcaf39916 + ARG TARGETARCH + FROM ubuntu:noble + COPY +foundry-build/opt/foundry /opt/foundry + ENV PATH="/opt/foundry/bin:$PATH" + SAVE IMAGE --push aztecprotocol/foundry:$FOUNDRY_TAG-$TARGETARCH + +######################################################################################################################## +# This image contains *just* what's needed to perform a full build of the aztec project. +# It acts as the base image for all CI builds, and we build on it to produce a developer box. build: - FROM aztecprotocol/build:1.0 \ No newline at end of file + BUILD +wasi-sdk + BUILD +osxcross + BUILD +foundry + FROM +base-build + RUN apt update && \ + apt install -y \ + # Utils + curl \ + git \ + curl \ + wget \ + jq \ + gawk \ + unzip \ + netcat-openbsd \ + parallel \ + # C++ (clang=18, which we will move to. 16 is for current build.) + build-essential \ + cmake \ + ninja-build \ + clang \ + clang-16 \ + clang-format-16 \ + libc++-dev \ + libomp-dev \ + doxygen \ + # Node (18.19.1) + nodejs \ + npm \ + # Python (clang bindings for wasm bindgen.) + python3 \ + python3-clang \ + && apt-get -y autoremove \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + + # Install wasi-sdk. + COPY +wasi-sdk/opt/wasi-sdk /opt/wasi-sdk + + # Install osxcross. Requires developer to mount SDK from their mac host. + COPY +osxcross/opt/osxcross /opt/osxcross + ENV PATH="/opt/osxcross/bin:$PATH" + ENV LD_LIBRARY_PATH="/opt/osxcross/lib:$LD_LIBRARY_PATH" + + # Install foundry. + COPY +foundry-build/opt/foundry /opt/foundry + ENV PATH="/opt/foundry/bin:$PATH" + + # Install rust and cross-compilers. Noir specifically uses 1.74.1. + # We remove base-build's rust first. + # We give everyone write ownership so downstream boxes can write. + ENV RUSTUP_HOME=/opt/rust/rustup + ENV CARGO_HOME=/opt/rust/cargo + ENV PATH="/opt/rust/cargo/bin:$PATH" + RUN apt remove -y cargo rustc + RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.74.1 && \ + rustup target add wasm32-unknown-unknown wasm32-wasi aarch64-apple-darwin && \ + chmod -R a+w /opt/rust + + # Install yarn + RUN npm install --global yarn + + # Install solhint + RUN npm install --global solhint + + # Install aws cli. + RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip" && \ + unzip awscliv2.zip && \ + ./aws/install --bin-dir /usr/local/bin --install-dir /usr/local/aws-cli --update && \ + rm -rf aws awscliv2.zip + + # Install terraform. + RUN curl -fsSL https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_$(dpkg --print-architecture).zip -o terraform.zip \ + && unzip terraform.zip -d /usr/local/bin \ + && chmod +x /usr/local/bin/terraform \ + && rm terraform.zip + + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/build:1.0-$TARGETARCH + +######################################################################################################################## +# We want to produce downstream images: devbox and sysbox. This image is the base image for each. +# It contains a suite of tools that developers might use to develop aztec. +basebox: + BUILD +build + FROM +build + RUN yes | unminimize + + # Install stuff devs need. + RUN apt update && \ + apt install -y \ + zsh \ + fzf \ + libfuse2 \ + iproute2 \ + iputils-ping \ + telnet \ + lsb-release \ + tmux \ + vim \ + software-properties-common \ + gnupg \ + htop \ + cgroup-tools \ + neovim \ + sudo \ + clangd-16 \ + man \ + python3-blessed \ + && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + + # Install earthly. + RUN wget https://github.com/earthly/earthly/releases/latest/download/earthly-linux-$(dpkg --print-architecture) -O /usr/local/bin/earthly && \ + chmod +x /usr/local/bin/earthly + + # Install gh (github cli). + RUN mkdir -p -m 755 /etc/apt/keyrings && wget -qO- https://cli.github.com/packages/githubcli-archive-keyring.gpg > /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + && chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \ + && echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null \ + && apt update \ + && apt install gh -y + + # Install gt (graphite). + RUN npm install -g @withgraphite/graphite-cli@stable + + # fzf seems to not install this file for some reason. + COPY ./key-bindings.zsh /usr/share/doc/fzf/examples/key-bindings.zsh + + # Sets LANG explicitly. Ensures tmux shows unicode symbols. + ENV LANG=C.UTF-8 + # Ensure we get color terminal. + ENV TERM=xterm-256color + + # Cargo home and bin path should be set within users home dir at login. + RUN echo 'export CARGO_HOME="$HOME/.cargo"' >> /etc/zsh/zshenv + RUN echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> /etc/zsh/zshenv + + # sudo group can sudo without password. + RUN echo '%sudo ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers + + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/cache:basebox-1.0-$TARGETARCH + +######################################################################################################################## +# This devbox container can be used to provide a full development environment. +# +# It can be used as a dev container: +# - Configuration in .devcontainer/devcontainer.json. +# - To run locally install "Dev Containers" plugin in vscode. +# - To run in GitHub codespaces, visit the repo in github, press '.', and open the terminal. +# +# It can be used independently: +# - The user should use the ./run.sh script to launch. +# - A persistent volume will be mounted to /home/aztec-dev. +# - It provides docker via the hosts docker instance, mounted at /var/lib/docker.sock. +# - It uses an entrypoint script at runtime to perform uid/gid alignment with the host and drop into user account. +devbox: + BUILD +basebox + FROM +basebox + + # Install gosu so we can start as root, adjust uid/gid, and then use gosu to become aztec-dev. + RUN apt install -y gosu + + # Detect if the host machine is Mac, if so set an env var, and disable prompts vcs info for performance. + RUN echo ' \ + if mount | grep -q /host_mark/Users; then \ + export HOST_OSTYPE=darwin; \ + export PROMPT_LEAN_VCS=0; \ + fi \ + ' >> /etc/zsh/zshrc + + # Create the user we'll run as (remove ubuntu first). + RUN userdel -r ubuntu && useradd --shell /bin/zsh -G sudo -m aztec-dev + WORKDIR /home/aztec-dev + + # Add dotfiles. + COPY --chown=aztec-dev:aztec-dev home . + + # The .npmrc config is set to install global bins here, update PATH. + ENV PATH=/home/aztec-dev/.npm-global/bin:$PATH + + # Need to ensure correct permissions, under some conditions these would otherwise be created by root. + RUN mkdir .vscode-server .npm-global .ssh && chown aztec-dev:aztec-dev .* + + # Install docker using docker-in-docker dev-container feature install script, modified to permit noble. + COPY install-docker.sh /install-docker.sh + RUN /install-docker.sh && rm /install-docker.sh + + # Use as entrypoint when running in an environment that requires uid/gid alignment (e.g. vanilla linux docker). + COPY ./entrypoint.sh /entrypoint.sh + ENTRYPOINT ["/entrypoint.sh"] + CMD ["/bin/zsh"] + + ARG TARGETARCH + SAVE IMAGE --push aztecprotocol/devbox:1.0-$TARGETARCH + # Save it without the arch tag as this is what's referenced in devcontainer.json + SAVE IMAGE aztecprotocol/devbox:1.0 + +devbox-manifest: + LET VERSION = 1.0 + ARG TARGETARCH + WAIT + BUILD +devbox + END + LOCALLY + RUN docker push aztecprotocol/devbox:$VERSION-$TARGETARCH + RUN docker manifest rm aztecprotocol/devbox:$VERSION || true + RUN docker manifest create aztecprotocol/devbox:$VERSION \ + --amend aztecprotocol/devbox:$VERSION-amd64 \ + --amend aztecprotocol/devbox:$VERSION-arm64 + RUN docker manifest push aztecprotocol/devbox:$VERSION + +######################################################################################################################## +# This sysbox container can be used to provide a full development environment. +# It's more advanced than devbox in that it uses nestybox's sysbox container runtime to provide more of a vm experience. +# It's used primarily by internal aztec developers who have sysboxes running on a powerful underlying mainframe. +# It provides better isolation and security guarantees than a plain devbox. +sysbox: + FROM +basebox + + ###################### START OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### + # + # Systemd installation + # + RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + systemd \ + systemd-sysv \ + libsystemd0 \ + ca-certificates \ + dbus \ + iptables \ + iproute2 \ + kmod \ + locales \ + sudo \ + udev && \ + \ + # Prevents journald from reading kernel messages from /dev/kmsg + echo "ReadKMsg=no" >> /etc/systemd/journald.conf && \ + \ + # Housekeeping + apt-get clean -y && \ + rm -rf \ + /var/cache/debconf/* \ + /var/lib/apt/lists/* \ + /var/log/* \ + /tmp/* \ + /var/tmp/* \ + /usr/share/local/* && \ + \ + # Create default 'ubuntu/ubuntu' user + echo "ubuntu:ubuntu" | chpasswd && adduser ubuntu sudo + + # Disable systemd services/units that are unnecessary within a container. + RUN systemctl mask systemd-udevd.service \ + systemd-udevd-kernel.socket \ + systemd-udevd-control.socket \ + systemd-modules-load.service \ + sys-kernel-config.mount \ + sys-kernel-debug.mount \ + sys-kernel-tracing.mount \ + e2scrub_reap.service + + # Make use of stopsignal (instead of sigterm) to stop systemd containers. + STOPSIGNAL SIGRTMIN+3 + + # Set systemd as entrypoint. + ENTRYPOINT [ "/sbin/init", "--log-level=err" ] + + ###################### END OF STOCK NESTYBOX SYSTEMD CONTAINER ############################### + + # Install docker. + RUN curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh \ + # Add user "ubuntu" to the Docker group + && usermod -a -G docker ubuntu + ADD https://raw.githubusercontent.com/docker/docker-ce/master/components/cli/contrib/completion/bash/docker /etc/bash_completion.d/docker.sh + + # Install sshd. + RUN apt install --no-install-recommends -y openssh-server \ + && rm -rf /var/lib/apt/lists/* \ + && mkdir /home/ubuntu/.ssh \ + && chown ubuntu:ubuntu /home/ubuntu/.ssh \ + && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDagCvr/+CA1jmFaJf+e9+Kw6iwfhvaKOpfbGEl5zLgB+rum5L4Kga6Jow1gLQeMnAHfqc2IgpsU4t04c8PYApAt8AWNDL+KxMiFytfjKfJ2DZJA73CYkFnkfnMtU+ki+JG9dAHd6m7ShtCSzE5n6EDO2yWCVWQfqE3dcnpwrymSWkJYrbxzeOixiNZ4f1nD9ddvFvTWGB4l+et5SWgeIaYgJYDqTI2teRt9ytJiDGrCWXs9olHsCZOL6TEJPUQmNekwBkjMAZ4TmbBMjwbUlIxOpW2UxzlONcNn7IlRcGQg0Gdbkpo/zOlCNXsvacvnphDk5vKKaQj+aQiG916LU5P charlie@aztecprotocol.com' >> /home/ubuntu/.ssh/authorized_keys \ + && echo 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb5OVc+9S9nXx3/34F7eLVXjoPgQ3YHSdlfhTb8WflAGmpKJTLxtAYngtDBvhKofH5HrjPPkBWxOHP9KOTo0jxUQSr0suMpggLLOHuIrCszJKXIVi7whnQ4p2RHyzyS2ANwmpxWZmYxfgamzYst9JIvQYJgAPjTFweKBsG/Lc03knJ/qgz9BHqDSZHweMTnhv1dJNhZRKy1Lxyl/CjXKF374i8qbzVWJMeDgLEH6C84vCeaH89KMmM9J0+T31uEqxzIhZxNmRz9v+x6cQAVJtGi9OIveGT9qUQwKXZsk6/zorzxV+NiIvTWHxIn9epX/FUjgUmb/jFvpbEjDkbIngj adomurad@localhost.localdomain' >> /home/ubuntu/.ssh/authorized_keys \ + && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFKlUeOh9DyAL85NJ10LE+nyfi8oYm+CwxQ9JMaB6H+t root@mainframe' >> /home/ubuntu/.ssh/authorized_keys \ + && chown ubuntu:ubuntu /home/ubuntu/.ssh/authorized_keys + + # Install google authenticator for setting up 2fa. + RUN apt update && apt install -y libpam-google-authenticator + + # We login to sysbox via ssh. This loses env vars set with ENV, so add them here. + # Sets RUSTUP_HOME. + # Adds foundry and cargo bin dirs to PATH. + COPY environment /etc/environment + + EXPOSE 22 + + ARG TARGETARCH + SAVE IMAGE aztecprotocol/sysbox:1.0-$TARGETARCH + SAVE IMAGE aztecprotocol/sysbox:1.0 \ No newline at end of file diff --git a/build-images/Makefile b/build-images/Makefile deleted file mode 100755 index 24934a9fa0a..00000000000 --- a/build-images/Makefile +++ /dev/null @@ -1,58 +0,0 @@ -ARCH := $(shell uname -m | sed 's/aarch64/arm64/') -FOUNDRY_TAG := de33b6af53005037b463318d2628b5cfcaf39916 -VERSION := 1.0 - -wasi-sdk: - docker build -t aztecprotocol/wasi-sdk:$(ARCH)-22.0 --target wasi-sdk --push . - docker manifest create aztecprotocol/wasi-sdk:22.0 \ - --amend aztecprotocol/wasi-sdk:x86_64-22.0 \ - --amend aztecprotocol/wasi-sdk:arm64-22.0 - docker manifest push aztecprotocol/wasi-sdk:22.0 - -foundry: - docker build -t aztecprotocol/foundry:$(ARCH)-$(FOUNDRY_TAG) --build-arg TAG=$(FOUNDRY_TAG) --target foundry --push . - docker tag aztecprotocol/foundry:$(ARCH)-$(FOUNDRY_TAG) aztecprotocol/foundry:$(FOUNDRY_TAG) - docker manifest rm aztecprotocol/foundry:$(FOUNDRY_TAG) - docker manifest create aztecprotocol/foundry:$(FOUNDRY_TAG) \ - --amend aztecprotocol/foundry:x86_64-$(FOUNDRY_TAG) \ - --amend aztecprotocol/foundry:arm64-$(FOUNDRY_TAG) - docker manifest push aztecprotocol/foundry:$(FOUNDRY_TAG) - -osxcross: - docker build -t aztecprotocol/osxcross:$(ARCH)-14.0 --target osxcross --push . - docker manifest rm aztecprotocol/osxcross:14.0 - docker manifest create aztecprotocol/osxcross:14.0 \ - --amend aztecprotocol/osxcross:x86_64-14.0 \ - --amend aztecprotocol/osxcross:arm64-14.0 - docker manifest push aztecprotocol/osxcross:14.0 - -build: - docker build -t aztecprotocol/build:$(ARCH)-$(VERSION) --target build . - docker tag aztecprotocol/build:$(ARCH)-$(VERSION) aztecprotocol/build - -build-push: build - docker push aztecprotocol/build:$(ARCH)-$(VERSION) - docker manifest rm aztecprotocol/build:$(VERSION) - docker manifest create aztecprotocol/build:$(VERSION) \ - --amend aztecprotocol/build:x86_64-$(VERSION) \ - --amend aztecprotocol/build:arm64-$(VERSION) - docker manifest push aztecprotocol/build:$(VERSION) - -devbox: - docker build -t aztecprotocol/devbox:$(ARCH)-$(VERSION) --target devbox . - docker tag aztecprotocol/devbox:$(ARCH)-$(VERSION) aztecprotocol/devbox - -devbox-push: devbox - docker push aztecprotocol/devbox:$(ARCH)-$(VERSION) - docker manifest rm aztecprotocol/devbox:$(VERSION) - docker manifest create aztecprotocol/devbox:$(VERSION) \ - --amend aztecprotocol/devbox:x86_64-$(VERSION) \ - --amend aztecprotocol/devbox:arm64-$(VERSION) - docker manifest push aztecprotocol/devbox:$(VERSION) - -sysbox: - docker build -t aztecprotocol/sysbox --target sysbox . - -all: build devbox sysbox - -.PHONY: all build devbox sysbox diff --git a/build-images/README.md b/build-images/README.md index 2ff02e1393f..d2824ba4564 100644 --- a/build-images/README.md +++ b/build-images/README.md @@ -2,10 +2,6 @@ To ensure a consistent environment for developers, and ease of getting started, we provide a development container. -## Install Docker - -If you don't already have docker installed, follow this guide: https://docs.docker.com/engine/install - ## Visual Studio Code If you use vscode, the simplest thing to do is install the "Dev Containers" plugin, and open the repo. @@ -25,3 +21,30 @@ Your repo will be mounted at `/workspaces/aztec-packages`, and your home directo This is also compatible with GitHub codespaces. Visit the repo at `http://github.com/aztecprotocol/aztec-packages`. Press `.`, and open a terminal window. You will be prompted to create a new machine. You can then continue to work within the browser, or reopen the codespace in your local vscode. + +## Building the build image + +If for some reason you want to build the images such as devbox yourself, follow these steps: + +### Install Docker + +If you don't already have docker installed, follow this guide: https://docs.docker.com/engine/install + +### Install earthly + +We use earthly to build things, follow this guide: https://earthly.dev/get-earthly + +### Build The Dev Container + +If you want to build entirely from scratch, you can do: + +``` +$ earthly +devbox +``` + +This will take significant time and compute however, as it builds several toolchains from the ground up. +If you have a reasonable internet connection, leveraging the cache to avoid building maybe prefereable. + +``` +$ earthly --use-inline-cache +devbox +``` \ No newline at end of file diff --git a/build-images/entrypoint.sh b/build-images/entrypoint.sh index d6f36b79dd0..52b676dad3a 100755 --- a/build-images/entrypoint.sh +++ b/build-images/entrypoint.sh @@ -5,18 +5,6 @@ set -e [ -n "$LOCAL_GROUP_ID" ] && groupmod -g $LOCAL_GROUP_ID aztec-dev [ -n "$LOCAL_USER_ID" ] && usermod -u $LOCAL_USER_ID aztec-dev &> /dev/null -# Find the group id of the docker socket, add aztec-dev to that group, or create the group and add aztec-dev. -if [ -S /var/run/docker.sock ]; then - SOCKET_GID=$(stat -c %g /var/run/docker.sock) - EXISTING_GROUP=$(getent group $SOCKET_GID | cut -d: -f1) - if [ -z "$EXISTING_GROUP" ]; then - # No existing group with that gid, so create one called 'docker' and add the user to it. - groupadd -g $SOCKET_GID docker - usermod -aG docker aztec-dev - else - # A group with the desired gid already exists, add the user to it. - usermod -aG $EXISTING_GROUP aztec-dev - fi -fi +/usr/local/share/docker-init.sh &> /dev/null exec /usr/sbin/gosu aztec-dev "$@" \ No newline at end of file diff --git a/build-images/install-docker.sh b/build-images/install-docker.sh new file mode 100755 index 00000000000..4a433a02220 --- /dev/null +++ b/build-images/install-docker.sh @@ -0,0 +1,624 @@ +#!/usr/bin/env bash +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- +# +# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md +# Maintainer: The Dev Container spec maintainers + + +DOCKER_VERSION="${VERSION:-"latest"}" # The Docker/Moby Engine + CLI should match in version +USE_MOBY="${MOBY:-"true"}" +MOBY_BUILDX_VERSION="${MOBYBUILDXVERSION:-"latest"}" +DOCKER_DASH_COMPOSE_VERSION="${DOCKERDASHCOMPOSEVERSION:-"latest"}" #latest, v2 or none +AZURE_DNS_AUTO_DETECTION="${AZUREDNSAUTODETECTION:-"true"}" +DOCKER_DEFAULT_ADDRESS_POOL="${DOCKERDEFAULTADDRESSPOOL:-""}" +USERNAME="${USERNAME:-"${_REMOTE_USER:-"automatic"}"}" +INSTALL_DOCKER_BUILDX="${INSTALLDOCKERBUILDX:-"true"}" +INSTALL_DOCKER_COMPOSE_SWITCH="${INSTALLDOCKERCOMPOSESWITCH:-"true"}" +MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" +DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal jammy noble" +DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="bookworm buster bullseye bionic focal hirsute impish jammy noble" + +# Default: Exit on any failure. +set -e + +# Clean up +rm -rf /var/lib/apt/lists/* + +# Setup STDERR. +err() { + echo "(!) $*" >&2 +} + +if [ "$(id -u)" -ne 0 ]; then + err 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' + exit 1 +fi + +################### +# Helper Functions +# See: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/shared/utils.sh +################### + +# Determine the appropriate non-root user +if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then + USERNAME="" + POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") + for CURRENT_USER in "${POSSIBLE_USERS[@]}"; do + if id -u ${CURRENT_USER} > /dev/null 2>&1; then + USERNAME=${CURRENT_USER} + break + fi + done + if [ "${USERNAME}" = "" ]; then + USERNAME=root + fi +elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then + USERNAME=root +fi + +apt_get_update() +{ + if [ "$(find /var/lib/apt/lists/* | wc -l)" = "0" ]; then + echo "Running apt-get update..." + apt-get update -y + fi +} + +# Checks if packages are installed and installs them if not +check_packages() { + if ! dpkg -s "$@" > /dev/null 2>&1; then + apt_get_update + apt-get -y install --no-install-recommends "$@" + fi +} + +# Figure out correct version of a three part version number is not passed +find_version_from_git_tags() { + local variable_name=$1 + local requested_version=${!variable_name} + if [ "${requested_version}" = "none" ]; then return; fi + local repository=$2 + local prefix=${3:-"tags/v"} + local separator=${4:-"."} + local last_part_optional=${5:-"false"} + if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then + local escaped_separator=${separator//./\\.} + local last_part + if [ "${last_part_optional}" = "true" ]; then + last_part="(${escaped_separator}[0-9]+)?" + else + last_part="${escaped_separator}[0-9]+" + fi + local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$" + local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)" + if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then + declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)" + else + set +e + declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")" + set -e + fi + fi + if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then + err "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2 + exit 1 + fi + echo "${variable_name}=${!variable_name}" +} + +# Use semver logic to decrement a version number then look for the closest match +find_prev_version_from_git_tags() { + local variable_name=$1 + local current_version=${!variable_name} + local repository=$2 + # Normally a "v" is used before the version number, but support alternate cases + local prefix=${3:-"tags/v"} + # Some repositories use "_" instead of "." for version number part separation, support that + local separator=${4:-"."} + # Some tools release versions that omit the last digit (e.g. go) + local last_part_optional=${5:-"false"} + # Some repositories may have tags that include a suffix (e.g. actions/node-versions) + local version_suffix_regex=$6 + # Try one break fix version number less if we get a failure. Use "set +e" since "set -e" can cause failures in valid scenarios. + set +e + major="$(echo "${current_version}" | grep -oE '^[0-9]+' || echo '')" + minor="$(echo "${current_version}" | grep -oP '^[0-9]+\.\K[0-9]+' || echo '')" + breakfix="$(echo "${current_version}" | grep -oP '^[0-9]+\.[0-9]+\.\K[0-9]+' 2>/dev/null || echo '')" + + if [ "${minor}" = "0" ] && [ "${breakfix}" = "0" ]; then + ((major=major-1)) + declare -g ${variable_name}="${major}" + # Look for latest version from previous major release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + # Handle situations like Go's odd version pattern where "0" releases omit the last part + elif [ "${breakfix}" = "" ] || [ "${breakfix}" = "0" ]; then + ((minor=minor-1)) + declare -g ${variable_name}="${major}.${minor}" + # Look for latest version from previous minor release + find_version_from_git_tags "${variable_name}" "${repository}" "${prefix}" "${separator}" "${last_part_optional}" + else + ((breakfix=breakfix-1)) + if [ "${breakfix}" = "0" ] && [ "${last_part_optional}" = "true" ]; then + declare -g ${variable_name}="${major}.${minor}" + else + declare -g ${variable_name}="${major}.${minor}.${breakfix}" + fi + fi + set -e +} + +# Function to fetch the version released prior to the latest version +get_previous_version() { + local url=$1 + local repo_url=$2 + local variable_name=$3 + prev_version=${!variable_name} + + output=$(curl -s "$repo_url"); + message=$(echo "$output" | jq -r '.message') + + if [[ $message == "API rate limit exceeded"* ]]; then + echo -e "\nAn attempt to find latest version using GitHub Api Failed... \nReason: ${message}" + echo -e "\nAttempting to find latest version using GitHub tags." + find_prev_version_from_git_tags prev_version "$url" "tags/v" + declare -g ${variable_name}="${prev_version}" + else + echo -e "\nAttempting to find latest version using GitHub Api." + version=$(echo "$output" | jq -r '.tag_name') + declare -g ${variable_name}="${version#v}" + fi + echo "${variable_name}=${!variable_name}" +} + +get_github_api_repo_url() { + local url=$1 + echo "${url/https:\/\/github.com/https:\/\/api.github.com\/repos}/releases/latest" +} + +########################################### +# Start docker-in-docker installation +########################################### + +# Ensure apt is in non-interactive to avoid prompts +export DEBIAN_FRONTEND=noninteractive + + +# Source /etc/os-release to get OS info +. /etc/os-release +# Fetch host/container arch. +architecture="$(dpkg --print-architecture)" + +# Check if distro is supported +if [ "${USE_MOBY}" = "true" ]; then + if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'" +else + if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then + err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution" + err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" + exit 1 + fi + echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'" +fi + +# Install dependencies +check_packages apt-transport-https curl ca-certificates pigz iptables gnupg2 dirmngr wget jq +if ! type git > /dev/null 2>&1; then + check_packages git +fi + +# Swap to legacy iptables for compatibility +if type iptables-legacy > /dev/null 2>&1; then + update-alternatives --set iptables /usr/sbin/iptables-legacy + update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy +fi + + + +# Set up the necessary apt repos (either Microsoft's or Docker's) +if [ "${USE_MOBY}" = "true" ]; then + + # Name of open source engine/cli + engine_package_name="moby-engine" + cli_package_name="moby-cli" + + # Import key safely and import Microsoft apt repo + curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg + echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list +else + # Name of licensed engine/cli + engine_package_name="docker-ce" + cli_package_name="docker-ce-cli" + + # Import key safely and import Docker apt repo + curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg + echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list +fi + +# Refresh apt lists +apt-get update + +# Soft version matching +if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + engine_version_suffix="" + cli_version_suffix="" +else + # Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...) + docker_version_dot_escaped="${DOCKER_VERSION//./\\.}" + docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}" + # Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/ + docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e # Don't exit if finding version fails - will handle gracefully + cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + engine_version_suffix="=$(apt-cache madison ${engine_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")" + set -e + if [ -z "${engine_version_suffix}" ] || [ "${engine_version_suffix}" = "=" ] || [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ] ; then + err "No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "engine_version_suffix ${engine_version_suffix}" + echo "cli_version_suffix ${cli_version_suffix}" +fi + +# Version matching for moby-buildx +if [ "${USE_MOBY}" = "true" ]; then + if [ "${MOBY_BUILDX_VERSION}" = "latest" ]; then + # Empty, meaning grab whatever "latest" is in apt repo + buildx_version_suffix="" + else + buildx_version_dot_escaped="${MOBY_BUILDX_VERSION//./\\.}" + buildx_version_dot_plus_escaped="${buildx_version_dot_escaped//+/\\+}" + buildx_version_regex="^(.+:)?${buildx_version_dot_plus_escaped}([\\.\\+ ~:-]|$)" + set +e + buildx_version_suffix="=$(apt-cache madison moby-buildx | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${buildx_version_regex}")" + set -e + if [ -z "${buildx_version_suffix}" ] || [ "${buildx_version_suffix}" = "=" ]; then + err "No full or partial moby-buildx version match found for \"${MOBY_BUILDX_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:" + apt-cache madison moby-buildx | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+' + exit 1 + fi + echo "buildx_version_suffix ${buildx_version_suffix}" + fi +fi + +# Install Docker / Moby CLI if not already installed +if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then + echo "Docker / Moby CLI and Engine already installed." +else + if [ "${USE_MOBY}" = "true" ]; then + # Install engine + set +e # Handle error gracefully + apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx${buildx_version_suffix} moby-engine${engine_version_suffix} + exit_code=$? + set -e + + if [ ${exit_code} -ne 0 ]; then + err "Packages for moby not available in OS ${ID} ${VERSION_CODENAME} (${architecture}). To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS version (eg: 'ubuntu-20.04')." + exit 1 + fi + + # Install compose + apt-get -y install --no-install-recommends moby-compose || err "Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + else + apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix} docker-ce${engine_version_suffix} + # Install compose + apt-get -y install --no-install-recommends docker-compose-plugin || echo "(*) Package docker-compose-plugin (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping." + fi +fi + +echo "Finished installing docker / moby!" + +docker_home="/usr/libexec/docker" +cli_plugins_dir="${docker_home}/cli-plugins" + +# fallback for docker-compose +fallback_compose(){ + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker-compose v${compose_version}..." + get_previous_version "${url}" "${repo_url}" compose_version + echo -e "\nAttempting to install v${compose_version}" + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} +} + +# If 'docker-compose' command is to be included +if [ "${DOCKER_DASH_COMPOSE_VERSION}" != "none" ]; then + case "${architecture}" in + amd64) target_compose_arch=x86_64 ;; + arm64) target_compose_arch=aarch64 ;; + *) + echo "(!) Docker in docker does not support machine architecture '$architecture'. Please use an x86-64 or ARM64 machine." + exit 1 + esac + + docker_compose_path="/usr/local/bin/docker-compose" + if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then + err "The final Compose V1 release, version 1.29.2, was May 10, 2021. These packages haven't received any security updates since then. Use at your own risk." + INSTALL_DOCKER_COMPOSE_SWITCH="false" + + if [ "${target_compose_arch}" = "x86_64" ]; then + echo "(*) Installing docker compose v1..." + curl -fsSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64" -o ${docker_compose_path} + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-Linux-x86_64.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + elif [ "${VERSION_CODENAME}" = "bookworm" ]; then + err "Docker compose v1 is unavailable for 'bookworm' on Arm64. Kindly switch to use v2" + exit 1 + else + # Use pip to get a version that runs on this architecture + check_packages python3-minimal python3-pip libffi-dev python3-venv + echo "(*) Installing docker compose v1 via pip..." + export PYTHONUSERBASE=/usr/local + pip3 install --disable-pip-version-check --no-cache-dir --user "Cython<3.0" pyyaml wheel docker-compose --no-build-isolation + fi + else + compose_version=${DOCKER_DASH_COMPOSE_VERSION#v} + docker_compose_url="https://github.com/docker/compose" + find_version_from_git_tags compose_version "$docker_compose_url" "tags/v" + echo "(*) Installing docker-compose ${compose_version}..." + curl -fsSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}" -o ${docker_compose_path} || { + if [[ $DOCKER_DASH_COMPOSE_VERSION == "latest" ]]; then + fallback_compose "$docker_compose_url" + else + echo -e "Error: Failed to install docker-compose v${compose_version}" + fi + } + + chmod +x ${docker_compose_path} + + # Download the SHA256 checksum + DOCKER_COMPOSE_SHA256="$(curl -sSL "https://github.com/docker/compose/releases/download/v${compose_version}/docker-compose-linux-${target_compose_arch}.sha256" | awk '{print $1}')" + echo "${DOCKER_COMPOSE_SHA256} ${docker_compose_path}" > docker-compose.sha256sum + sha256sum -c docker-compose.sha256sum --ignore-missing + + mkdir -p ${cli_plugins_dir} + cp ${docker_compose_path} ${cli_plugins_dir} + fi +fi + +# fallback method for compose-switch +fallback_compose-switch() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for compose-switch v${compose_switch_version}..." + get_previous_version "$url" "$repo_url" compose_switch_version + echo -e "\nAttempting to install v${compose_switch_version}" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch +} + +# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation +if [ "${INSTALL_DOCKER_COMPOSE_SWITCH}" = "true" ] && ! type compose-switch > /dev/null 2>&1; then + if type docker-compose > /dev/null 2>&1; then + echo "(*) Installing compose-switch..." + current_compose_path="$(which docker-compose)" + target_compose_path="$(dirname "${current_compose_path}")/docker-compose-v1" + compose_switch_version="latest" + compose_switch_url="https://github.com/docker/compose-switch" + find_version_from_git_tags compose_switch_version "$compose_switch_url" + curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch || fallback_compose-switch "$compose_switch_url" + chmod +x /usr/local/bin/compose-switch + # TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11 + # Setup v1 CLI as alternative in addition to compose-switch (which maps to v2) + mv "${current_compose_path}" "${target_compose_path}" + update-alternatives --install ${docker_compose_path} docker-compose /usr/local/bin/compose-switch 99 + update-alternatives --install ${docker_compose_path} docker-compose "${target_compose_path}" 1 + else + err "Skipping installation of compose-switch as docker compose is unavailable..." + fi +fi + +# If init file already exists, exit +if [ -f "/usr/local/share/docker-init.sh" ]; then + echo "/usr/local/share/docker-init.sh already exists, so exiting." + # Clean up + rm -rf /var/lib/apt/lists/* + exit 0 +fi +echo "docker-init doesn't exist, adding..." + +if ! cat /etc/group | grep -e "^docker:" > /dev/null 2>&1; then + groupadd -r docker +fi + +usermod -aG docker ${USERNAME} + +# fallback for docker/buildx +fallback_buildx() { + local url=$1 + local repo_url=$(get_github_api_repo_url "$url") + echo -e "\n(!) Failed to fetch the latest artifacts for docker buildx v${buildx_version}..." + get_previous_version "$url" "$repo_url" buildx_version + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + echo -e "\nAttempting to install v${buildx_version}" + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} +} + +if [ "${INSTALL_DOCKER_BUILDX}" = "true" ]; then + buildx_version="latest" + docker_buildx_url="https://github.com/docker/buildx" + find_version_from_git_tags buildx_version "$docker_buildx_url" "refs/tags/v" + echo "(*) Installing buildx ${buildx_version}..." + buildx_file_name="buildx-v${buildx_version}.linux-${architecture}" + + cd /tmp + wget https://github.com/docker/buildx/releases/download/v${buildx_version}/${buildx_file_name} || fallback_buildx "$docker_buildx_url" + + docker_home="/usr/libexec/docker" + cli_plugins_dir="${docker_home}/cli-plugins" + + mkdir -p ${cli_plugins_dir} + mv ${buildx_file_name} ${cli_plugins_dir}/docker-buildx + chmod +x ${cli_plugins_dir}/docker-buildx + + chown -R "${USERNAME}:docker" "${docker_home}" + chmod -R g+r+w "${docker_home}" + find "${docker_home}" -type d -print0 | xargs -n 1 -0 chmod g+s +fi + +tee /usr/local/share/docker-init.sh > /dev/null \ +<< EOF +#!/bin/sh +#------------------------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. +#------------------------------------------------------------------------------------------------------------- + +set -e + +AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} +DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} +EOF + +tee -a /usr/local/share/docker-init.sh > /dev/null \ +<< 'EOF' +dockerd_start="AZURE_DNS_AUTO_DETECTION=${AZURE_DNS_AUTO_DETECTION} DOCKER_DEFAULT_ADDRESS_POOL=${DOCKER_DEFAULT_ADDRESS_POOL} $(cat << 'INNEREOF' + # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly + find /run /var/run -iname 'docker*.pid' -delete || : + find /run /var/run -iname 'container*.pid' -delete || : + + # -- Start: dind wrapper script -- + # Maintained: https://github.com/moby/moby/blob/master/hack/dind + + export container=docker + + if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security; then + mount -t securityfs none /sys/kernel/security || { + echo >&2 'Could not mount /sys/kernel/security.' + echo >&2 'AppArmor detection and --privileged mode might break.' + } + fi + + # Mount /tmp (conditionally) + if ! mountpoint -q /tmp; then + mount -t tmpfs none /tmp + fi + + set_cgroup_nesting() + { + # cgroup v2: enable nesting + if [ -f /sys/fs/cgroup/cgroup.controllers ]; then + # move the processes from the root group to the /init group, + # otherwise writing subtree_control fails with EBUSY. + # An error during moving non-existent process (i.e., "cat") is ignored. + mkdir -p /sys/fs/cgroup/init + xargs -rn1 < /sys/fs/cgroup/cgroup.procs > /sys/fs/cgroup/init/cgroup.procs || : + # enable controllers + sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ + > /sys/fs/cgroup/cgroup.subtree_control + fi + } + + # Set cgroup nesting, retrying if necessary + retry_cgroup_nesting=0 + + until [ "${retry_cgroup_nesting}" -eq "5" ]; + do + set +e + set_cgroup_nesting + + if [ $? -ne 0 ]; then + echo "(*) cgroup v2: Failed to enable nesting, retrying..." + else + break + fi + + retry_cgroup_nesting=`expr $retry_cgroup_nesting + 1` + set -e + done + + # -- End: dind wrapper script -- + + # Handle DNS + set +e + cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' > /dev/null 2>&1 + if [ $? -eq 0 ] && [ "${AZURE_DNS_AUTO_DETECTION}" = "true" ] + then + echo "Setting dockerd Azure DNS." + CUSTOMDNS="--dns 168.63.129.16" + else + echo "Not setting dockerd DNS manually." + CUSTOMDNS="" + fi + set -e + + if [ -z "$DOCKER_DEFAULT_ADDRESS_POOL" ] + then + DEFAULT_ADDRESS_POOL="" + else + DEFAULT_ADDRESS_POOL="--default-address-pool $DOCKER_DEFAULT_ADDRESS_POOL" + fi + + # Start docker/moby engine + ( dockerd $CUSTOMDNS $DEFAULT_ADDRESS_POOL > /tmp/dockerd.log 2>&1 ) & +INNEREOF +)" + +sudo_if() { + COMMAND="$*" + + if [ "$(id -u)" -ne 0 ]; then + sudo $COMMAND + else + $COMMAND + fi +} + +retry_docker_start_count=0 +docker_ok="false" + +until [ "${docker_ok}" = "true" ] || [ "${retry_docker_start_count}" -eq "5" ]; +do + # Start using sudo if not invoked as root + if [ "$(id -u)" -ne 0 ]; then + sudo /bin/sh -c "${dockerd_start}" + else + eval "${dockerd_start}" + fi + + retry_count=0 + until [ "${docker_ok}" = "true" ] || [ "${retry_count}" -eq "5" ]; + do + sleep 1s + set +e + docker info > /dev/null 2>&1 && docker_ok="true" + set -e + + retry_count=`expr $retry_count + 1` + done + + if [ "${docker_ok}" != "true" ] && [ "${retry_docker_start_count}" != "4" ]; then + echo "(*) Failed to start docker, retrying..." + set +e + sudo_if pkill dockerd + sudo_if pkill containerd + set -e + fi + + retry_docker_start_count=`expr $retry_docker_start_count + 1` +done + +# Execute whatever commands were passed in (if any). This allows us +# to set this script to ENTRYPOINT while still executing the default CMD. +exec "$@" +EOF + +chmod +x /usr/local/share/docker-init.sh +chown ${USERNAME}:root /usr/local/share/docker-init.sh + +# Clean up +rm -rf /var/lib/apt/lists/* + +echo 'docker-in-docker-debian script has completed!' diff --git a/build-images/run.sh b/build-images/run.sh index 97132414f76..2e54ba1ae90 100755 --- a/build-images/run.sh +++ b/build-images/run.sh @@ -2,6 +2,18 @@ set -eu cd $(dirname $0) +hostname=$(hostname) + +# Define next hostname based on this hostname for nesting. devbox, devbox1, etc. +if [[ $hostname == "devbox" ]]; then + hostname="devbox1" +elif [[ $hostname =~ ^devbox([0-9]+)$ ]]; then + num_suffix="${BASH_REMATCH[1]}" + new_num=$((num_suffix + 1)) + hostname="devbox$new_num" +else + hostname="devbox" +fi # On linux we need to perform uid/gid alignment to ensure files modified on the host have the correct owner. # The entrypoint.sh script picks up these environment variables and adjusts the aztec-dev user accordingly. @@ -10,14 +22,24 @@ if [[ "$OSTYPE" == "linux"* ]]; then ID_ARGS="-e LOCAL_USER_ID=$(id -u) -e LOCAL_GROUP_ID=$(id -g)" fi -docker run \ - -ti --rm \ - --hostname devbox \ - -e SSH_CONNECTION=' ' \ - ${ID_ARGS:-} \ - -w/workspaces/aztec-packages \ - -v$PWD/..:/workspaces/aztec-packages \ - -vdevbox-home:/home/aztec-dev \ - -v$HOME/.ssh/id_rsa:/home/aztec-dev/.ssh/id_rsa:ro \ - -v/var/run/docker.sock:/var/run/docker.sock \ - aztecprotocol/devbox +if docker ps -a --format '{{.Names}}' | grep -q '^aztec-devbox$'; then + # Container already exists. Exec into a new shell. + docker exec -ti --user aztec-dev aztec-devbox /bin/zsh +else + # We override the docker config dir to ensure we don't conflict with vscodes dev-container. + # They share the same home dir, but vscode will add some credentials config that it needs to its docker config. + docker run \ + -ti --rm \ + --name aztec-devbox \ + --hostname $hostname \ + -e SSH_CONNECTION=' ' \ + -e DOCKER_CONFIG=/home/aztec-dev/.docker-devbox \ + ${ID_ARGS:-} \ + -w/workspaces/aztec-packages \ + -v$PWD/..:/workspaces/aztec-packages \ + -vdevbox-home:/home/aztec-dev \ + -vdevbox-var-lib-docker:/var/lib/docker \ + -v$HOME/.ssh/id_rsa:/home/aztec-dev/.ssh/id_rsa:ro \ + --privileged \ + aztecprotocol/devbox:1.0 +fi \ No newline at end of file diff --git a/scripts/earthly-ci b/scripts/earthly-ci index e424c0a4201..fe625d87015 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -25,6 +25,12 @@ INCONSISTENT_GRAPH_STATE_COUNT=0 # Counter for 'inconsistent graph state' error MAX_ATTEMPTS=3 ATTEMPT_COUNT=0 +export EARTHLY_USE_INLINE_CACHE=true +if [ "$GITHUB_REF_NAME" == "master" ]; then + export EARTHLY_SAVE_INLINE_CACHE=true + export EARTHLY_PUSH=true +fi + # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do if earthly $@ 2>&1 | tee $OUTPUT_FILE >&2 ; then From 044d0fef3bbecf673c579bd63d2640dc81b35ba3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Thu, 9 May 2024 17:42:45 +0200 Subject: [PATCH 26/43] fix: temporarily revert to_radix blackbox (#6304) This reverts commit ac27376b9a0cdf0624a02d36c64ec25886b44b4a. --- .../dsl/acir_format/serde/acir.hpp | 74 ++----------------- .../noir-repo/acvm-repo/acir/codegen/acir.cpp | 56 +------------- .../acvm-repo/brillig/src/black_box.rs | 5 -- .../acvm-repo/brillig_vm/src/black_box.rs | 21 ------ .../src/brillig/brillig_gen/brillig_block.rs | 38 +++------- .../brillig/brillig_ir/codegen_intrinsic.rs | 62 +++++++++------- .../src/brillig/brillig_ir/debug_show.rs | 9 --- noir/noir-repo/noir_stdlib/src/field/bn254.nr | 57 ++++++-------- 8 files changed, 73 insertions(+), 249 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index 683e4c62407..9fb0e2b3a35 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -686,6 +686,7 @@ struct BlackBoxOp { Program::HeapVector inputs; Program::HeapArray iv; Program::HeapArray key; + Program::MemoryAddress length; Program::HeapVector outputs; friend bool operator==(const AES128Encrypt&, const AES128Encrypt&); @@ -895,16 +896,6 @@ struct BlackBoxOp { static Sha256Compression bincodeDeserialize(std::vector); }; - struct ToRadix { - Program::MemoryAddress input; - uint32_t radix; - Program::HeapArray output; - - friend bool operator==(const ToRadix&, const ToRadix&); - std::vector bincodeSerialize() const; - static ToRadix bincodeDeserialize(std::vector); - }; - std::variant + Sha256Compression> value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); @@ -3949,6 +3939,9 @@ inline bool operator==(const BlackBoxOp::AES128Encrypt& lhs, const BlackBoxOp::A if (!(lhs.key == rhs.key)) { return false; } + if (!(lhs.length == rhs.length)) { + return false; + } if (!(lhs.outputs == rhs.outputs)) { return false; } @@ -5148,63 +5141,6 @@ Program::BlackBoxOp::Sha256Compression serde::Deserializable BlackBoxOp::ToRadix::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline BlackBoxOp::ToRadix BlackBoxOp::ToRadix::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxOp::ToRadix& obj, - Serializer& serializer) -{ - serde::Serializable::serialize(obj.input, serializer); - serde::Serializable::serialize(obj.radix, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Program::BlackBoxOp::ToRadix serde::Deserializable::deserialize( - Deserializer& deserializer) -{ - Program::BlackBoxOp::ToRadix obj; - obj.input = serde::Deserializable::deserialize(deserializer); - obj.radix = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - inline bool operator==(const BlockId& lhs, const BlockId& rhs) { if (!(lhs.value == rhs.value)) { diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 222a7da6399..5afcd68e987 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -870,17 +870,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - struct ToRadix { - Program::MemoryAddress input; - uint32_t radix; - Program::HeapArray output; - - friend bool operator==(const ToRadix&, const ToRadix&); - std::vector bincodeSerialize() const; - static ToRadix bincodeDeserialize(std::vector); - }; - - std::variant value; + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -4303,50 +4293,6 @@ Program::BlackBoxOp::Sha256Compression serde::Deserializable BlackBoxOp::ToRadix::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxOp::ToRadix BlackBoxOp::ToRadix::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxOp::ToRadix &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.input, serializer); - serde::Serializable::serialize(obj.radix, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Program::BlackBoxOp::ToRadix serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxOp::ToRadix obj; - obj.input = serde::Deserializable::deserialize(deserializer); - obj.radix = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const BlockId &lhs, const BlockId &rhs) { diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index 9a66b428dc3..15abc19ed90 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -126,9 +126,4 @@ pub enum BlackBoxOp { hash_values: HeapVector, output: HeapArray, }, - ToRadix { - input: MemoryAddress, - radix: u32, - output: HeapArray, - }, } diff --git a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs index d6ecd25f454..c999b5bf330 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs @@ -5,7 +5,6 @@ use acvm_blackbox_solver::{ aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; -use num_bigint::BigUint; use crate::memory::MemoryValue; use crate::Memory; @@ -296,25 +295,6 @@ pub(crate) fn evaluate_black_box( memory.write_slice(memory.read_ref(output.pointer), &state); Ok(()) } - BlackBoxOp::ToRadix { input, radix, output } => { - let input: FieldElement = - memory.read(*input).try_into().expect("ToRadix input not a field"); - - let mut input = BigUint::from_bytes_be(&input.to_be_bytes()); - let radix = BigUint::from(*radix); - - let mut limbs: Vec = Vec::with_capacity(output.size); - - for _ in 0..output.size { - let limb = &input % &radix; - limbs.push(FieldElement::from_be_bytes_reduce(&limb.to_bytes_be()).into()); - input /= &radix; - } - - memory.write_slice(memory.read_ref(output.pointer), &limbs); - - Ok(()) - } } } @@ -341,7 +321,6 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::BigIntToLeBytes { .. } => BlackBoxFunc::BigIntToLeBytes, BlackBoxOp::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxOp::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, - BlackBoxOp::ToRadix { .. } => unreachable!("ToRadix is not an ACIR BlackBoxFunc"), } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 6a4f9f5cc0e..f660c8e0b7a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -488,22 +488,8 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - - let radix: u32 = dfg - .get_numeric_constant(arguments[1]) - .expect("Radix should be known") - .try_to_u64() - .expect("Radix should fit in u64") - .try_into() - .expect("Radix should be u32"); - - let limb_count: usize = dfg - .get_numeric_constant(arguments[2]) - .expect("Limb count should be known") - .try_to_u64() - .expect("Limb count should fit in u64") - .try_into() - .expect("Limb count should fit in usize"); + let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); let results = dfg.instruction_results(instruction_id); @@ -525,8 +511,7 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context - .usize_const_instruction(target_len.address, limb_count.into()); + self.brillig_context.cast_instruction(target_len, limb_count); self.brillig_context.codegen_to_radix( source, @@ -539,13 +524,7 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let limb_count: usize = dfg - .get_numeric_constant(arguments[1]) - .expect("Limb count should be known") - .try_to_u64() - .expect("Limb count should fit in u64") - .try_into() - .expect("Limb count should fit in usize"); + let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); let results = dfg.instruction_results(instruction_id); @@ -570,18 +549,21 @@ impl<'block> BrilligBlock<'block> { BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; + let radix = self.brillig_context.make_constant_instruction(2_usize.into(), 32); + // Update the user-facing slice length - self.brillig_context - .usize_const_instruction(target_len.address, limb_count.into()); + self.brillig_context.cast_instruction(target_len, limb_count); self.brillig_context.codegen_to_radix( source, target_vector, - 2, + radix, limb_count, matches!(endianness, Endian::Big), 1, ); + + self.brillig_context.deallocate_single_addr(radix); } _ => { unreachable!("unsupported function call type {:?}", dfg[*func]) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index 58166554e1d..ab756217bcd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -1,7 +1,6 @@ -use acvm::{ - acir::brillig::{BlackBoxOp, HeapArray}, - FieldElement, -}; +use acvm::FieldElement; + +use crate::brillig::brillig_ir::BrilligBinaryOp; use super::{ brillig_variable::{BrilligVector, SingleAddrVariable}, @@ -37,46 +36,57 @@ impl BrilligContext { &mut self, source_field: SingleAddrVariable, target_vector: BrilligVector, - radix: u32, - limb_count: usize, + radix: SingleAddrVariable, + limb_count: SingleAddrVariable, big_endian: bool, limb_bit_size: u32, ) { assert!(source_field.bit_size == FieldElement::max_num_bits()); + assert!(radix.bit_size == 32); + assert!(limb_count.bit_size == 32); + let radix_as_field = + SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); + self.cast_instruction(radix_as_field, radix); - self.usize_const_instruction(target_vector.size, limb_count.into()); + self.cast_instruction(SingleAddrVariable::new_usize(target_vector.size), limb_count); self.usize_const_instruction(target_vector.rc, 1_usize.into()); self.codegen_allocate_array(target_vector.pointer, target_vector.size); - self.black_box_op_instruction(BlackBoxOp::ToRadix { - input: source_field.address, - radix, - output: HeapArray { pointer: target_vector.pointer, size: limb_count }, - }); + let shifted_field = + SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); + self.mov_instruction(shifted_field.address, source_field.address); let limb_field = SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); let limb_casted = SingleAddrVariable::new(self.allocate_register(), limb_bit_size); - if limb_bit_size != FieldElement::max_num_bits() { - self.codegen_loop(target_vector.size, |ctx, iterator_register| { - // Read the limb - ctx.codegen_array_get(target_vector.pointer, iterator_register, limb_field.address); - // Cast it - ctx.cast_instruction(limb_casted, limb_field); - // Write it - ctx.codegen_array_set( - target_vector.pointer, - iterator_register, - limb_casted.address, - ); - }); - } + self.codegen_loop(target_vector.size, |ctx, iterator_register| { + // Compute the modulus + ctx.binary_instruction( + shifted_field, + radix_as_field, + limb_field, + BrilligBinaryOp::Modulo, + ); + // Cast it + ctx.cast_instruction(limb_casted, limb_field); + // Write it + ctx.codegen_array_set(target_vector.pointer, iterator_register, limb_casted.address); + // Integer div the field + ctx.binary_instruction( + shifted_field, + radix_as_field, + shifted_field, + BrilligBinaryOp::UnsignedDiv, + ); + }); // Deallocate our temporary registers + self.deallocate_single_addr(shifted_field); self.deallocate_single_addr(limb_field); self.deallocate_single_addr(limb_casted); + self.deallocate_single_addr(radix_as_field); if big_endian { self.codegen_reverse_vector_in_place(target_vector); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index f02f6059e7c..667ccf6ddbe 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -451,15 +451,6 @@ impl DebugShow { output ); } - BlackBoxOp::ToRadix { input, radix, output } => { - debug_println!( - self.enable_debug_trace, - " TO_RADIX {} {} -> {}", - input, - radix, - output - ); - } } } diff --git a/noir/noir-repo/noir_stdlib/src/field/bn254.nr b/noir/noir-repo/noir_stdlib/src/field/bn254.nr index 2e82d9e7c23..d70310be391 100644 --- a/noir/noir-repo/noir_stdlib/src/field/bn254.nr +++ b/noir/noir-repo/noir_stdlib/src/field/bn254.nr @@ -25,7 +25,7 @@ unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { fn assert_gt_limbs(a: (Field, Field), b: (Field, Field)) { let (alo, ahi) = a; let (blo, bhi) = b; - let borrow = lte_unsafe_16(alo, blo); + let borrow = lte_unsafe(alo, blo, 16); let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; let rhi = ahi - bhi - (borrow as Field); @@ -51,9 +51,9 @@ pub fn decompose(x: Field) -> (Field, Field) { (xlo, xhi) } -fn lt_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { - let x_bytes = x.to_le_radix(256, num_bytes); - let y_bytes = y.to_le_radix(256, num_bytes); +unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { + let x_bytes = x.__to_le_radix(256, num_bytes); + let y_bytes = y.__to_le_radix(256, num_bytes); let mut x_is_lt = false; let mut done = false; for i in 0..num_bytes { @@ -70,20 +70,8 @@ fn lt_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { x_is_lt } -fn lte_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { - if x == y { - true - } else { - lt_unsafe_internal(x, y, num_bytes) - } -} - -unconstrained fn lt_unsafe_32(x: Field, y: Field) -> bool { - lt_unsafe_internal(x, y, 32) -} - -unconstrained fn lte_unsafe_16(x: Field, y: Field) -> bool { - lte_unsafe_internal(x, y, 16) +unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { + lt_unsafe(x, y, num_bytes) | (x == y) } pub fn assert_gt(a: Field, b: Field) { @@ -102,7 +90,7 @@ pub fn assert_lt(a: Field, b: Field) { pub fn gt(a: Field, b: Field) -> bool { if a == b { false - } else if lt_unsafe_32(a, b) { + } else if lt_unsafe(a, b, 32) { assert_gt(b, a); false } else { @@ -117,10 +105,7 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{ - decompose_unsafe, decompose, lt_unsafe_internal, assert_gt, gt, lt, TWO_POW_128, - lte_unsafe_internal, PLO, PHI - }; + use crate::field::bn254::{decompose_unsafe, decompose, lt_unsafe, assert_gt, gt, lt, TWO_POW_128, lte_unsafe, PLO, PHI}; #[test] fn check_decompose_unsafe() { @@ -138,23 +123,23 @@ mod tests { #[test] fn check_lt_unsafe() { - assert(lt_unsafe_internal(0, 1, 16)); - assert(lt_unsafe_internal(0, 0x100, 16)); - assert(lt_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); - assert(!lt_unsafe_internal(0, TWO_POW_128, 16)); + assert(lt_unsafe(0, 1, 16)); + assert(lt_unsafe(0, 0x100, 16)); + assert(lt_unsafe(0x100, TWO_POW_128 - 1, 16)); + assert(!lt_unsafe(0, TWO_POW_128, 16)); } #[test] fn check_lte_unsafe() { - assert(lte_unsafe_internal(0, 1, 16)); - assert(lte_unsafe_internal(0, 0x100, 16)); - assert(lte_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); - assert(!lte_unsafe_internal(0, TWO_POW_128, 16)); - - assert(lte_unsafe_internal(0, 0, 16)); - assert(lte_unsafe_internal(0x100, 0x100, 16)); - assert(lte_unsafe_internal(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); - assert(lte_unsafe_internal(TWO_POW_128, TWO_POW_128, 16)); + assert(lte_unsafe(0, 1, 16)); + assert(lte_unsafe(0, 0x100, 16)); + assert(lte_unsafe(0x100, TWO_POW_128 - 1, 16)); + assert(!lte_unsafe(0, TWO_POW_128, 16)); + + assert(lte_unsafe(0, 0, 16)); + assert(lte_unsafe(0x100, 0x100, 16)); + assert(lte_unsafe(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); + assert(lte_unsafe(TWO_POW_128, TWO_POW_128, 16)); } #[test] From 0238254b85f79ad6281d878028ecb0d135112cf8 Mon Sep 17 00:00:00 2001 From: Leila Wang Date: Thu, 9 May 2024 17:17:12 +0100 Subject: [PATCH 27/43] feat: silo note hashes with nonces first (#6273) Changing siloed note hash from: `hash(nonce, hash(contract_address, inner_note_hash))` to `hash(contract_address, hash(nonce, inner_note_hash))` --- .../docs/learn/concepts/storage/trees/main.md | 16 ++-- .../protocol-specs/state/note-hash-tree.md | 8 +- .../aztec-nr/aztec/src/note/utils.nr | 48 ++++++----- .../kernel_circuit_public_inputs_composer.nr | 11 +-- .../src/private_kernel_tail.nr | 13 ++- .../src/private_kernel_tail_to_public.nr | 12 +-- .../crates/types/src/hash.nr | 44 +++++----- .../src/hash/__snapshots__/hash.test.ts.snap | 6 +- .../circuits.js/src/hash/hash.test.ts | 20 ++--- yarn-project/circuits.js/src/hash/hash.ts | 28 +++---- .../src/note_processor/note_processor.test.ts | 4 +- .../src/note_processor/produce_note_dao.ts | 17 ++-- .../pxe/src/pxe_service/pxe_service.ts | 49 ++++++----- .../src/client/client_execution_context.ts | 8 +- .../src/client/private_execution.test.ts | 6 +- .../simulator/src/client/simulator.test.ts | 12 +-- .../simulator/src/client/simulator.ts | 82 +------------------ 17 files changed, 159 insertions(+), 225 deletions(-) diff --git a/docs/docs/learn/concepts/storage/trees/main.md b/docs/docs/learn/concepts/storage/trees/main.md index 826aa7364a2..d59d1911a43 100644 --- a/docs/docs/learn/concepts/storage/trees/main.md +++ b/docs/docs/learn/concepts/storage/trees/main.md @@ -79,20 +79,20 @@ note_hash: Field = pedersen::compress( The Private Kernel circuit will modify this `note_hash` further, before it is inserted into the tree. It will: -- Silo the commitment, to prevent cross-contamination of this contract's state variables with other contracts' state variables: - `siloed_note_hash: Field = hash(contract_address, note_hash);` +- Ensure uniqueness of the note hash, by hashing it with a nonce + `unique_note_hash: Field = hash(nonce, note_hash);`, where `nonce: Field = hash(new_nullifiers[0], index)`, where `new_nullifiers[0]` is a the first nullifier emitted in a transaction and `index` is the position of the new note hash in all new note hashes inserted by the transaction to the note hash tree. :::info - **Siloing** refers to a process of hashing a hash with some other domain specific information (e.g. contract address). - This siloing ensures that all hashes are appropriately domain-separated. + First nullifier of a transaction is always ensured to be non-zero because it is always set by the protocol and it represents a transaction hash. + For this reason hashing the transaction hash with the index of the note hash in the transaction is sufficient to ensure uniqueness of the note hash. ::: -- Ensure uniqueness of the commitment, by hashing it with a nonce - `unique_siloed_note_hash: Field = hash(nonce, siloed_note_hash);`, where `nonce: Field = hash(new_nullifiers[0], index)`, where `new_nullifiers[0]` is a the first nullifier emitted in a transaction and `index` is the position of the new note hash in all new note hashes inserted by the transaction to the note hash tree. +- Silo the note hash, to prevent cross-contamination of this contract's state variables with other contracts' state variables: + `siloed_note_hash: Field = hash(contract_address, unique_note_hash);` :::info - First nullifier of a transaction is always ensured to be non-zero because it is always set by the protocol and it represents a transaction hash. - For this reason hashing the transaction hash with the index of the note hash in the transaction is sufficient to ensure uniqueness of the note hash. + **Siloing** refers to a process of hashing a hash with some other domain specific information (e.g. contract address). + This siloing ensures that all hashes are appropriately domain-separated. ::: The tree is append-only for a few of reasons: diff --git a/docs/docs/protocol-specs/state/note-hash-tree.md b/docs/docs/protocol-specs/state/note-hash-tree.md index 788b02363ae..174328ae143 100644 --- a/docs/docs/protocol-specs/state/note-hash-tree.md +++ b/docs/docs/protocol-specs/state/note-hash-tree.md @@ -6,16 +6,16 @@ Note commitments , which are subsequently [siloed](./tree-implementations.md#siloing-leaves) by contract address by the Kernel circuit. Siloing the commitment ensures that a malicious contract cannot create notes for (that is, modify the state of) another contract. -The Kernel circuit also guarantees uniqueness of commitments by further hashing them with a nonce, derived from the transaction identifier and the index of the commitment within the transaction's array of newly-created note hashes. Uniqueness means that a note with the same contents can be emitted more than once, and each instance can be independently nullified. Without uniqueness, two notes with the same content would yield the same commitment and nullifier, so nullifying one of them would render the second one as nullified as well. +The Kernel circuit also guarantees uniqueness of commitments by hashing them with a nonce, derived from the transaction identifier and the index of the commitment within the transaction's array of newly-created note hashes. Uniqueness means that a note with the same contents can be emitted more than once, and each instance can be independently nullified. Without uniqueness, two notes with the same content would yield the same commitment and nullifier, so nullifying one of them would render the second one as nullified as well. The pseudocode for siloing and making a commitment unique is the following, where each `hash` operation is a Pedersen hash with a unique generator index, indicated by the constant in all caps. ``` -fn compute_unique_siloed_note_hash(commitment, contract, transaction): - let siloed_note_hash = hash([contract, commitment], SILOED_NOTE_HASH) +fn compute_siloed_note_hash(commitment, contract, transaction): let index = index_of(commitment, transaction.commitments) let nonce = hash([transaction.tx_hash, index], NOTE_HASH_NONCE) - return hash([nonce, siloed_note_hash], UNIQUE_NOTE_HASH) + let unique_note_hash = hash([nonce, commitment], UNIQUE_NOTE_HASH); + return hash([contract, unique_note_hash], SILOED_NOTE_HASH) ``` The unique siloed commitment of a note is included in the [transaction `data`](../transactions/tx-object.md), and then inserted into the Note Hash tree by the sequencer as the transaction is included in a block. diff --git a/noir-projects/aztec-nr/aztec/src/note/utils.nr b/noir-projects/aztec-nr/aztec/src/note/utils.nr index c5c06b46bcb..444923c3fbb 100644 --- a/noir-projects/aztec-nr/aztec/src/note/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/note/utils.nr @@ -9,13 +9,13 @@ use dep::protocol_types::{ hash::pedersen_hash, utils::arr_copy_slice }; -fn compute_siloed_hash(contract_address: AztecAddress, inner_note_hash: Field) -> Field { - let inputs = [contract_address.to_field(), inner_note_hash]; +fn compute_siloed_hash(contract_address: AztecAddress, unique_note_hash: Field) -> Field { + let inputs = [contract_address.to_field(), unique_note_hash]; pedersen_hash(inputs, GENERATOR_INDEX__SILOED_NOTE_HASH) } -fn compute_unique_hash(nonce: Field, siloed_note_hash: Field) -> Field { - let inputs = [nonce, siloed_note_hash]; +fn compute_unique_hash(nonce: Field, inner_note_hash: Field) -> Field { + let inputs = [nonce, inner_note_hash]; pedersen_hash(inputs, GENERATOR_INDEX__UNIQUE_NOTE_HASH) } @@ -29,20 +29,27 @@ fn compute_inner_note_hash(note: Note) -> Field where Note: NoteInterfa ) } -fn compute_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { +fn compute_unique_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { let header = note_with_header.get_header(); let inner_note_hash = compute_inner_note_hash(note_with_header); - compute_siloed_hash(header.contract_address, inner_note_hash) + compute_unique_hash(header.nonce, inner_note_hash) } -fn compute_unique_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { +fn compute_siloed_note_hash(note_with_header: Note) -> Field where Note: NoteInterface { let header = note_with_header.get_header(); - let siloed_note_hash = compute_siloed_note_hash(note_with_header); + let unique_note_hash = if (header.nonce == 0) { + // If nonce is zero, that means we are reading a public note. + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) + // Remove this once notes added from public also include nonces. + compute_inner_note_hash(note_with_header) + } else { + compute_unique_note_hash(note_with_header) + }; - compute_unique_hash(header.nonce, siloed_note_hash) + compute_siloed_hash(header.contract_address, unique_note_hash) } pub fn compute_siloed_nullifier( @@ -70,15 +77,12 @@ pub fn compute_note_hash_for_consumption(note: Note) -> Field where Not if (header.is_transient) { // If a note is transient, we just read the inner_note_hash (kernel will silo by contract address). compute_inner_note_hash(note) - } else if (header.nonce == 0) { - // If not transient and nonce is zero, that means we are reading a public note. - compute_siloed_note_hash(note) } else { - // When nonce is nonzero, that means we are reading a settled note (from tree) created in a - // previous TX. So we need the unique_siloed_note_hash which has already been hashed with - // contract address and then nonce. This hash will match the existing leaf in the note hash + // If a note is not transient, that means we are reading a settled note (from tree) created in a + // previous TX. So we need the siloed_note_hash which has already been hashed with + // nonce and then contract address. This hash will match the existing leaf in the note hash // tree, so the kernel can just perform a membership check directly on this hash/leaf. - compute_unique_siloed_note_hash(note) + compute_siloed_note_hash(note) // IMPORTANT NOTE ON REDUNDANT SILOING BY CONTRACT ADDRESS: The note hash computed above is // "siloed" by contract address. When a note hash is computed solely for the purpose of // nullification, it is not strictly necessary to silo the note hash before computing @@ -102,12 +106,18 @@ pub fn compute_note_hash_and_nullifier( let inner_note_hash = compute_inner_note_hash(note); - let siloed_note_hash = compute_siloed_hash(note_header.contract_address, inner_note_hash); + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) + // Should always be calling compute_unique_hash() once notes added from public also include nonces. + let unique_note_hash = if note_header.nonce != 0 { + compute_unique_hash(note_header.nonce, inner_note_hash) + } else { + inner_note_hash + }; - let unique_siloed_note_hash = compute_unique_hash(note_header.nonce, siloed_note_hash); + let siloed_note_hash = compute_siloed_hash(note_header.contract_address, unique_note_hash); let inner_nullifier = note.compute_nullifier_without_context(); // docs:start:compute_note_hash_and_nullifier_returns - [inner_note_hash, siloed_note_hash, unique_siloed_note_hash, inner_nullifier] + [inner_note_hash, unique_note_hash, siloed_note_hash, inner_nullifier] // docs:end:compute_note_hash_and_nullifier_returns } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr index 18115cc2ea6..05e4af96eae 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr @@ -9,10 +9,7 @@ use dep::types::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - hash::{ - compute_l2_to_l1_hash, compute_note_hash_nonce, compute_unique_siloed_note_hash, silo_note_hash, - silo_nullifier -}, + hash::{compute_l2_to_l1_hash, compute_note_hash_nonce, compute_unique_note_hash, silo_note_hash, silo_nullifier}, utils::arrays::{array_length, array_to_bounded_vec, assert_sorted_array} }; @@ -137,10 +134,10 @@ impl KernelCircuitPublicInputsComposer { for i in 0..MAX_NEW_NOTE_HASHES_PER_TX { let note_hash = note_hashes[i]; if note_hash.value() != 0 { - let siloed = silo_note_hash(note_hash.contract_address, note_hash.value()); let nonce = compute_note_hash_nonce(first_nullifier, i); - let unique_note_hash = compute_unique_siloed_note_hash(nonce, siloed); - self.public_inputs.end.new_note_hashes.storage[i].note_hash.value = unique_note_hash; + let unique_note_hash = compute_unique_note_hash(nonce, note_hash.value()); + let siloed = silo_note_hash(note_hash.contract_address, unique_note_hash); + self.public_inputs.end.new_note_hashes.storage[i].note_hash.value = siloed; } } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index 598dfe018f7..4d52011707f 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -104,10 +104,7 @@ mod tests { side_effect::SideEffect, gas::Gas }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{ - compute_note_hash_nonce, compute_unique_siloed_note_hash, sha256_to_field, silo_note_hash, - silo_nullifier - }, + hash::{compute_note_hash_nonce, compute_unique_note_hash, sha256_to_field, silo_note_hash, silo_nullifier}, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::{Empty, is_empty, is_empty_array} }; @@ -141,16 +138,16 @@ mod tests { // note_hashes for the given note_hashes. pub fn compute_output_note_hashes(self, note_hashes: [ScopedNoteHash; N]) -> [Field; N] { let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0); - let mut unique_siloed_note_hashes = [0; N]; + let mut output = [0; N]; for i in 0..N { let note_hash = note_hashes[i]; if note_hash.value() != 0 { - let siloed = silo_note_hash(note_hash.contract_address, note_hash.value()); let nonce = compute_note_hash_nonce(first_nullifier.value(), i); - unique_siloed_note_hashes[i] = compute_unique_siloed_note_hash(nonce, siloed); + let unique_note_hash = compute_unique_note_hash(nonce, note_hash.value()); + output[i] = silo_note_hash(note_hash.contract_address, unique_note_hash); } } - unique_siloed_note_hashes + output } pub fn compute_output_nullifiers(_self: Self, nullifiers: [ScopedNullifier; N]) -> [Field; N] { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index 9dd2319a041..ec2e8637cdd 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -107,7 +107,7 @@ mod tests { side_effect::SideEffect }, grumpkin_private_key::GrumpkinPrivateKey, - hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, silo_note_hash, silo_nullifier}, + hash::{compute_note_hash_nonce, compute_unique_note_hash, silo_note_hash, silo_nullifier}, tests::{fixture_builder::FixtureBuilder, sort::sort_get_sorted_hints}, utils::{arrays::{array_eq, array_length}}, traits::is_empty_array }; @@ -141,19 +141,19 @@ mod tests { // note_hashes for the given note_hashes. pub fn compute_output_note_hashes(self, note_hashes: [ScopedNoteHash; N]) -> [NoteHash; N] { let first_nullifier = self.previous_kernel.new_nullifiers.get_unchecked(0).value(); - let mut unique_siloed_note_hashes = [NoteHash::empty(); N]; + let mut output = [NoteHash::empty(); N]; for i in 0..N { let note_hash = note_hashes[i]; if note_hash.value() != 0 { - let siloed = silo_note_hash(note_hash.contract_address, note_hash.value()); let nonce = compute_note_hash_nonce(first_nullifier, i); - unique_siloed_note_hashes[i] = NoteHash { - value: compute_unique_siloed_note_hash(nonce, siloed), + let unique_note_hash = compute_unique_note_hash(nonce, note_hash.value()); + output[i] = NoteHash { + value: silo_note_hash(note_hash.contract_address, unique_note_hash), counter: 0, // Counter is cleared so it's not exposed to the public. }; } } - unique_siloed_note_hashes + output } pub fn compute_output_nullifiers( diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index b6efc83586f..6c3678b6bb3 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -34,11 +34,31 @@ pub fn private_functions_root_from_siblings( root_from_sibling_path(function_leaf, function_leaf_index, function_leaf_sibling_path) } -pub fn silo_note_hash(address: AztecAddress, inner_commitment: Field) -> Field { +pub fn compute_note_hash_nonce(first_nullifier: Field, note_hash_index: u64) -> Field { + pedersen_hash( + [ + first_nullifier, + note_hash_index as Field + ], + GENERATOR_INDEX__NOTE_HASH_NONCE + ) +} + +pub fn compute_unique_note_hash(nonce: Field, note_hash: Field) -> Field { + pedersen_hash( + [ + nonce, + note_hash + ], + GENERATOR_INDEX__UNIQUE_NOTE_HASH + ) +} + +pub fn silo_note_hash(address: AztecAddress, unique_note_hash: Field) -> Field { pedersen_hash( [ address.to_field(), - inner_commitment + unique_note_hash ], GENERATOR_INDEX__SILOED_NOTE_HASH ) @@ -143,26 +163,6 @@ pub fn compute_tx_logs_hash(logs: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX]) -> Fi hash } -pub fn compute_note_hash_nonce(first_nullifier: Field, commitment_index: u64) -> Field { - pedersen_hash( - [ - first_nullifier, - commitment_index as Field - ], - GENERATOR_INDEX__NOTE_HASH_NONCE - ) -} - -pub fn compute_unique_siloed_note_hash(nonce: Field, siloed_note_hash: Field) -> Field { - pedersen_hash( - [ - nonce, - siloed_note_hash - ], - GENERATOR_INDEX__UNIQUE_NOTE_HASH - ) -} - pub fn pedersen_hash(inputs: [Field; N], hash_index: u32) -> Field { dep::std::hash::pedersen_hash_with_separator(inputs, hash_index) } diff --git a/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap b/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap index a58b637a959..8aa78d9dc8a 100644 --- a/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap +++ b/yarn-project/circuits.js/src/hash/__snapshots__/hash.test.ts.snap @@ -4,17 +4,17 @@ exports[`hash Var args hash matches noir 1`] = `Fr<0x05a1023fef839ac88731f49ae98 exports[`hash compute secret message hash 1`] = `Fr<0x0dc06f2167e2cd19adf738d1f38469d7f8bff1e26b029816e8230bcd6ab6332e>`; -exports[`hash computes commitment nonce 1`] = `Fr<0x10ebab01bc813263ef92ed71b9c781ad3ef58019b66a8f71304d2f72d7defe4d>`; +exports[`hash computes note hash nonce 1`] = `Fr<0x10ebab01bc813263ef92ed71b9c781ad3ef58019b66a8f71304d2f72d7defe4d>`; exports[`hash computes public data tree leaf slot 1`] = `Fr<0x14114ab3dbdd0a1ccc5c4fe68dd576f3c6cd79708770e06ab4086398cdd828f4>`; exports[`hash computes public data tree value 1`] = `Fr<0x0000000000000000000000000000000000000000000000000000000000000003>`; -exports[`hash computes siloed commitment 1`] = `Fr<0x100e57c07ab6db86f4ae43f5a7d4355c57c5a1e2523746e0fb16ac29f0dc3bbb>`; +exports[`hash computes siloed note hash 1`] = `Fr<0x100e57c07ab6db86f4ae43f5a7d4355c57c5a1e2523746e0fb16ac29f0dc3bbb>`; exports[`hash computes siloed nullifier 1`] = `Fr<0x1743145fde103eaa88af576e0562e61d85eba590fddf01d19550e4f024709373>`; -exports[`hash computes unique commitment 1`] = `Fr<0x1cbdcecec4fe92f6638eb6a8dade96ca358ecba4954cf597c363199fae3d47e8>`; +exports[`hash computes unique note hash 1`] = `Fr<0x1cbdcecec4fe92f6638eb6a8dade96ca358ecba4954cf597c363199fae3d47e8>`; exports[`hash hashes empty function args 1`] = `Fr<0x0000000000000000000000000000000000000000000000000000000000000000>`; diff --git a/yarn-project/circuits.js/src/hash/hash.test.ts b/yarn-project/circuits.js/src/hash/hash.test.ts index 9c148367c66..bccb381cecd 100644 --- a/yarn-project/circuits.js/src/hash/hash.test.ts +++ b/yarn-project/circuits.js/src/hash/hash.test.ts @@ -4,7 +4,7 @@ import { setupCustomSnapshotSerializers } from '@aztec/foundation/testing'; import { AztecAddress, Fr } from '../index.js'; import { makeAztecAddress } from '../tests/factories.js'; import { - computeCommitmentNonce, + computeNoteHashNonce, computePublicDataTreeLeafSlot, computePublicDataTreeValue, computeSecretHash, @@ -17,24 +17,24 @@ import { describe('hash', () => { setupCustomSnapshotSerializers(expect); - it('computes commitment nonce', () => { + it('computes note hash nonce', () => { const nullifierZero = new Fr(123n); - const commitmentIndex = 456; - const res = computeCommitmentNonce(nullifierZero, commitmentIndex); + const noteHashIndex = 456; + const res = computeNoteHashNonce(nullifierZero, noteHashIndex); expect(res).toMatchSnapshot(); }); - it('computes unique commitment', () => { + it('computes unique note hash', () => { const nonce = new Fr(123n); - const innerCommitment = new Fr(456); - const res = computeUniqueNoteHash(nonce, innerCommitment); + const innerNoteHash = new Fr(456); + const res = computeUniqueNoteHash(nonce, innerNoteHash); expect(res).toMatchSnapshot(); }); - it('computes siloed commitment', () => { + it('computes siloed note hash', () => { const contractAddress = new AztecAddress(new Fr(123n).toBuffer()); - const uniqueCommitment = new Fr(456); - const res = siloNoteHash(contractAddress, uniqueCommitment); + const uniqueNoteHash = new Fr(456); + const res = siloNoteHash(contractAddress, uniqueNoteHash); expect(res).toMatchSnapshot(); }); diff --git a/yarn-project/circuits.js/src/hash/hash.ts b/yarn-project/circuits.js/src/hash/hash.ts index 6127573c8c9..6f8621d5bcc 100644 --- a/yarn-project/circuits.js/src/hash/hash.ts +++ b/yarn-project/circuits.js/src/hash/hash.ts @@ -32,24 +32,24 @@ export function hashVK(vkBuf: Buffer) { } /** - * Computes a commitment nonce, which will be used to create a unique commitment. + * Computes a note hash nonce, which will be used to create a unique note hash. * @param nullifierZero - The first nullifier in the tx. - * @param commitmentIndex - The index of the commitment. - * @returns A commitment nonce. + * @param noteHashIndex - The index of the note hash. + * @returns A note hash nonce. */ -export function computeCommitmentNonce(nullifierZero: Fr, commitmentIndex: number): Fr { - return pedersenHash([nullifierZero, commitmentIndex], GeneratorIndex.NOTE_HASH_NONCE); +export function computeNoteHashNonce(nullifierZero: Fr, noteHashIndex: number): Fr { + return pedersenHash([nullifierZero, noteHashIndex], GeneratorIndex.NOTE_HASH_NONCE); } /** - * Computes a siloed commitment, given the contract address and the commitment itself. - * A siloed commitment effectively namespaces a commitment to a specific contract. + * Computes a siloed note hash, given the contract address and the note hash itself. + * A siloed note hash effectively namespaces a note hash to a specific contract. * @param contract - The contract address - * @param innerNoteHash - The commitment to silo. - * @returns A siloed commitment. + * @param innerNoteHash - The note hash to silo. + * @returns A siloed note hash. */ -export function siloNoteHash(contract: AztecAddress, innerNoteHash: Fr): Fr { - return pedersenHash([contract, innerNoteHash], GeneratorIndex.SILOED_NOTE_HASH); +export function siloNoteHash(contract: AztecAddress, uniqueNoteHash: Fr): Fr { + return pedersenHash([contract, uniqueNoteHash], GeneratorIndex.SILOED_NOTE_HASH); } /** @@ -75,11 +75,11 @@ export function computeInnerNoteHash(storageSlot: Fr, noteHash: Fr): Fr { * Computes a unique note hash. * @dev Includes a nonce which contains data that guarantees the resulting note hash will be unique. * @param nonce - The contract address. - * @param siloedNoteHash - An siloed note hash. + * @param innerNoteHash - An inner note hash. * @returns A unique note hash. */ -export function computeUniqueNoteHash(nonce: Fr, siloedNoteHash: Fr): Fr { - return pedersenHash([nonce, siloedNoteHash], GeneratorIndex.UNIQUE_NOTE_HASH); +export function computeUniqueNoteHash(nonce: Fr, innerNoteHash: Fr): Fr { + return pedersenHash([nonce, innerNoteHash], GeneratorIndex.UNIQUE_NOTE_HASH); } /** diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index f334c25162e..840df093bf7 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -146,8 +146,8 @@ describe('Note Processor', () => { simulator.computeNoteHashAndNullifier.mockImplementation((...args) => Promise.resolve({ innerNoteHash: Fr.random(), - siloedNoteHash: Fr.random(), - uniqueSiloedNoteHash: pedersenHash(args[4].items), // args[4] is note + uniqueNoteHash: Fr.random(), + siloedNoteHash: pedersenHash(args[4].items), // args[4] is note innerNullifier: Fr.random(), }), ); diff --git a/yarn-project/pxe/src/note_processor/produce_note_dao.ts b/yarn-project/pxe/src/note_processor/produce_note_dao.ts index 02b8526be07..f22d17f63eb 100644 --- a/yarn-project/pxe/src/note_processor/produce_note_dao.ts +++ b/yarn-project/pxe/src/note_processor/produce_note_dao.ts @@ -1,6 +1,6 @@ import { type L1NotePayload, type TxHash } from '@aztec/circuit-types'; import { Fr, type PublicKey } from '@aztec/circuits.js'; -import { computeCommitmentNonce, siloNullifier } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type AcirSimulator } from '@aztec/simulator'; import { NoteDao } from '../database/note_dao.js'; @@ -78,7 +78,6 @@ async function findNoteIndexAndNullifier( let nonce: Fr | undefined; let innerNoteHash: Fr | undefined; let siloedNoteHash: Fr | undefined; - let uniqueSiloedNoteHash: Fr | undefined; let innerNullifier: Fr | undefined; const firstNullifier = Fr.fromBuffer(txHash.toBuffer()); @@ -92,10 +91,16 @@ async function findNoteIndexAndNullifier( break; } - const expectedNonce = computeCommitmentNonce(firstNullifier, commitmentIndex); - ({ innerNoteHash, siloedNoteHash, uniqueSiloedNoteHash, innerNullifier } = - await simulator.computeNoteHashAndNullifier(contractAddress, expectedNonce, storageSlot, noteTypeId, note)); - if (commitment.equals(uniqueSiloedNoteHash)) { + const expectedNonce = computeNoteHashNonce(firstNullifier, commitmentIndex); + ({ innerNoteHash, siloedNoteHash, innerNullifier } = await simulator.computeNoteHashAndNullifier( + contractAddress, + expectedNonce, + storageSlot, + noteTypeId, + note, + )); + + if (commitment.equals(siloedNoteHash)) { nonce = expectedNonce; break; } diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 02ff03a95ee..9b9fcbcbf2f 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -34,7 +34,7 @@ import { computeContractClassId, getContractClassFromArtifact, } from '@aztec/circuits.js'; -import { computeCommitmentNonce, siloNullifier } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type ContractArtifact, type DecodedReturn, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { arrayNonEmptyLength, padArrayEnd } from '@aztec/foundation/collection'; import { Fr, type Point } from '@aztec/foundation/fields'; @@ -329,19 +329,15 @@ export class PXEService implements PXE { } for (const nonce of nonces) { - const { innerNoteHash, siloedNoteHash, uniqueSiloedNoteHash, innerNullifier } = - await this.simulator.computeNoteHashAndNullifier( - note.contractAddress, - nonce, - note.storageSlot, - note.noteTypeId, - note.note, - ); + const { innerNoteHash, siloedNoteHash, innerNullifier } = await this.simulator.computeNoteHashAndNullifier( + note.contractAddress, + nonce, + note.storageSlot, + note.noteTypeId, + note.note, + ); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // This can always be `uniqueSiloedNoteHash` once notes added from public also include nonces. - const noteHashToLookUp = nonce.isZero() ? siloedNoteHash : uniqueSiloedNoteHash; - const index = await this.node.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, noteHashToLookUp); + const index = await this.node.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, siloedNoteHash); if (index === undefined) { throw new Error('Note does not exist.'); } @@ -383,6 +379,23 @@ export class PXEService implements PXE { } const nonces: Fr[] = []; + + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) + // Remove this once notes added from public also include nonces. + { + const publicNoteNonce = Fr.ZERO; + const { siloedNoteHash } = await this.simulator.computeNoteHashAndNullifier( + note.contractAddress, + publicNoteNonce, + note.storageSlot, + note.noteTypeId, + note.note, + ); + if (tx.noteHashes.some(hash => hash.equals(siloedNoteHash))) { + nonces.push(publicNoteNonce); + } + } + const firstNullifier = tx.nullifiers[0]; const hashes = tx.noteHashes; for (let i = 0; i < hashes.length; ++i) { @@ -391,21 +404,15 @@ export class PXEService implements PXE { break; } - const nonce = computeCommitmentNonce(firstNullifier, i); - const { siloedNoteHash, uniqueSiloedNoteHash } = await this.simulator.computeNoteHashAndNullifier( + const nonce = computeNoteHashNonce(firstNullifier, i); + const { siloedNoteHash } = await this.simulator.computeNoteHashAndNullifier( note.contractAddress, nonce, note.storageSlot, note.noteTypeId, note.note, ); - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // Remove this once notes added from public also include nonces. if (hash.equals(siloedNoteHash)) { - nonces.push(Fr.ZERO); - break; - } - if (hash.equals(uniqueSiloedNoteHash)) { nonces.push(nonce); } } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index ed90d7ce44c..ca18abe1c32 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -262,11 +262,11 @@ export class ClientExecutionContext extends ViewDataOracle { notes.forEach(n => { if (n.index !== undefined) { - const siloedNoteHash = siloNoteHash(n.contractAddress, n.innerNoteHash); - const uniqueSiloedNoteHash = computeUniqueNoteHash(n.nonce, siloedNoteHash); // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1386) - // Should always be uniqueSiloedNoteHash when publicly created notes include nonces. - const noteHashForReadRequest = n.nonce.isZero() ? siloedNoteHash : uniqueSiloedNoteHash; + // Should always call computeUniqueNoteHash when publicly created notes include nonces. + const uniqueNoteHash = n.nonce.isZero() ? n.innerNoteHash : computeUniqueNoteHash(n.nonce, n.innerNoteHash); + const siloedNoteHash = siloNoteHash(n.contractAddress, uniqueNoteHash); + const noteHashForReadRequest = siloedNoteHash; this.noteHashLeafIndexMap.set(noteHashForReadRequest.toBigInt(), n.index); } }); diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 3b93537e10c..2226848258b 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -27,7 +27,7 @@ import { getContractInstanceFromDeployParams, getNonEmptyItems, } from '@aztec/circuits.js'; -import { computeCommitmentNonce, computeSecretHash, computeVarArgsHash } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, computeSecretHash, computeVarArgsHash } from '@aztec/circuits.js/hash'; import { makeHeader } from '@aztec/circuits.js/testing'; import { type FunctionArtifact, FunctionSelector, encodeArguments, getFunctionArtifact } from '@aztec/foundation/abi'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -270,7 +270,7 @@ describe('Private Execution test suite', () => { // array index at the output of the final kernel/ordering circuit are used to derive nonce via: // `hash(firstNullifier, noteHashIndex)` const noteHashIndex = randomInt(1); // mock index in TX's final newNoteHashes array - const nonce = computeCommitmentNonce(mockFirstNullifier, noteHashIndex); + const nonce = computeNoteHashNonce(mockFirstNullifier, noteHashIndex); const note = new Note([new Fr(amount), owner.toField(), Fr.random()]); const innerNoteHash = pedersenHash(note.items); return { @@ -428,7 +428,7 @@ describe('Private Execution test suite', () => { const readRequests = getNonEmptyItems(result.callStackItem.publicInputs.noteHashReadRequests).map(r => r.value); expect(readRequests).toHaveLength(consumedNotes.length); - expect(readRequests).toEqual(expect.arrayContaining(consumedNotes.map(n => n.uniqueSiloedNoteHash))); + expect(readRequests).toEqual(expect.arrayContaining(consumedNotes.map(n => n.siloedNoteHash))); }); it('should be able to destroy_and_create with dummy notes', async () => { diff --git a/yarn-project/simulator/src/client/simulator.test.ts b/yarn-project/simulator/src/client/simulator.test.ts index 24211b5f35a..0f34bc9cd89 100644 --- a/yarn-project/simulator/src/client/simulator.test.ts +++ b/yarn-project/simulator/src/client/simulator.test.ts @@ -66,20 +66,16 @@ describe('Simulator', () => { const note = createNote(); const tokenNoteHash = computeNoteContentHash(note.items); const innerNoteHash = computeInnerNoteHash(storageSlot, tokenNoteHash); - const siloedNoteHash = siloNoteHash(contractAddress, innerNoteHash); - const uniqueSiloedNoteHash = computeUniqueNoteHash(nonce, siloedNoteHash); - const innerNullifier = poseidon2Hash([ - uniqueSiloedNoteHash, - appNullifierSecretKey, - GeneratorIndex.NOTE_NULLIFIER, - ]); + const uniqueNoteHash = computeUniqueNoteHash(nonce, innerNoteHash); + const siloedNoteHash = siloNoteHash(contractAddress, uniqueNoteHash); + const innerNullifier = poseidon2Hash([siloedNoteHash, appNullifierSecretKey, GeneratorIndex.NOTE_NULLIFIER]); const result = await simulator.computeNoteHashAndNullifier(contractAddress, nonce, storageSlot, noteTypeId, note); expect(result).toEqual({ innerNoteHash, + uniqueNoteHash, siloedNoteHash, - uniqueSiloedNoteHash, innerNullifier, }); }); diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index 1fbb92ad03d..0eebf76e26a 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -200,7 +200,7 @@ export class AcirSimulator { args: encodeArguments(artifact, [contractAddress, nonce, storageSlot, noteTypeId, extendedNoteItems]), }; - const [innerNoteHash, siloedNoteHash, uniqueSiloedNoteHash, innerNullifier] = (await this.runUnconstrained( + const [innerNoteHash, uniqueNoteHash, siloedNoteHash, innerNullifier] = (await this.runUnconstrained( execRequest, artifact, contractAddress, @@ -208,8 +208,8 @@ export class AcirSimulator { return { innerNoteHash: new Fr(innerNoteHash), + uniqueNoteHash: new Fr(uniqueNoteHash), siloedNoteHash: new Fr(siloedNoteHash), - uniqueSiloedNoteHash: new Fr(uniqueSiloedNoteHash), innerNullifier: new Fr(innerNullifier), }; } @@ -232,82 +232,4 @@ export class AcirSimulator { ); return innerNoteHash; } - - /** - * Computes the unique note hash of a note. - * @param contractAddress - The address of the contract. - * @param nonce - The nonce of the note hash. - * @param storageSlot - The storage slot. - * @param noteTypeId - The note type identifier. - * @param note - The note. - * @returns The note hash. - */ - public async computeUniqueSiloedNoteHash( - contractAddress: AztecAddress, - nonce: Fr, - storageSlot: Fr, - noteTypeId: Fr, - note: Note, - ) { - const { uniqueSiloedNoteHash } = await this.computeNoteHashAndNullifier( - contractAddress, - nonce, - storageSlot, - noteTypeId, - note, - ); - return uniqueSiloedNoteHash; - } - - /** - * Computes the siloed note hash of a note. - * @param contractAddress - The address of the contract. - * @param nonce - The nonce of the note hash. - * @param storageSlot - The storage slot. - * @param noteTypeId - The note type identifier. - * @param note - The note. - * @returns The note hash. - */ - public async computeSiloedNoteHash( - contractAddress: AztecAddress, - nonce: Fr, - storageSlot: Fr, - noteTypeId: Fr, - note: Note, - ) { - const { siloedNoteHash } = await this.computeNoteHashAndNullifier( - contractAddress, - nonce, - storageSlot, - noteTypeId, - note, - ); - return siloedNoteHash; - } - - /** - * Computes the inner note hash of a note, which contains storage slot and the custom note hash. - * @param contractAddress - The address of the contract. - * @param nonce - The nonce of the unique note hash. - * @param storageSlot - The storage slot. - * @param noteTypeId - The note type identifier. - * @param note - The note. - * @returns The note hash. - */ - public async computeInnerNullifier( - contractAddress: AztecAddress, - nonce: Fr, - storageSlot: Fr, - noteTypeId: Fr, - note: Note, - ) { - const { innerNullifier } = await this.computeNoteHashAndNullifier( - contractAddress, - nonce, - storageSlot, - noteTypeId, - note, - ); - return innerNullifier; - } } From 27534aca901c74e2754e5c27d62ad686756e90d1 Mon Sep 17 00:00:00 2001 From: Facundo Date: Thu, 9 May 2024 17:55:06 +0100 Subject: [PATCH 28/43] chore(avm-context): implement Empty (#6303) Will be needed for https://github.com/AztecProtocol/aztec-packages/blob/master/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr#L132 . --- .../aztec-nr/aztec/src/context/avm_context.nr | 8 +++++++- .../aztec/src/context/inputs/avm_context_inputs.nr | 11 +++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/src/context/avm_context.nr b/noir-projects/aztec-nr/aztec/src/context/avm_context.nr index d7180bd8338..d87c5e92c9b 100644 --- a/noir-projects/aztec-nr/aztec/src/context/avm_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/avm_context.nr @@ -3,7 +3,7 @@ use dep::protocol_types::{ address::{AztecAddress, EthAddress}, constants::{L1_TO_L2_MESSAGE_LENGTH, NESTED_CALL_L2_GAS_BUFFER}, header::Header }; -use dep::protocol_types::traits::Serialize; +use dep::protocol_types::traits::{Deserialize, Serialize, Empty}; use dep::protocol_types::abis::function_selector::FunctionSelector; use dep::protocol_types::abis::public_circuit_public_inputs::PublicCircuitPublicInputs; use crate::context::inputs::avm_context_inputs::AvmContextInputs; @@ -191,6 +191,12 @@ impl ContextInterface for AvmContext { } } +impl Empty for AvmContext { + fn empty() -> Self { + AvmContext::new(AvmContextInputs::empty()) + } +} + // Helper functions fn gas_for_call(user_gas: GasOpts) -> [Field; 2] { [ diff --git a/noir-projects/aztec-nr/aztec/src/context/inputs/avm_context_inputs.nr b/noir-projects/aztec-nr/aztec/src/context/inputs/avm_context_inputs.nr index ffd16b268ac..0000b903f6d 100644 --- a/noir-projects/aztec-nr/aztec/src/context/inputs/avm_context_inputs.nr +++ b/noir-projects/aztec-nr/aztec/src/context/inputs/avm_context_inputs.nr @@ -1,4 +1,15 @@ +use dep::protocol_types::traits::Empty; + struct AvmContextInputs { selector: Field, args_hash: Field, } + +impl Empty for AvmContextInputs { + fn empty() -> Self { + AvmContextInputs { + selector: 0, + args_hash: 0, + } + } +} From 0c20f44f10b6436cafab690a9d6d5a888b37b4ee Mon Sep 17 00:00:00 2001 From: Facundo Date: Thu, 9 May 2024 17:55:21 +0100 Subject: [PATCH 29/43] chore(test-contracts): prepare e2e_token_contract+ error msgs for AVM migration (#6307) --- yarn-project/end-to-end/src/e2e_authwit.test.ts | 9 +++++---- .../src/e2e_blacklist_token_contract/burn.test.ts | 6 +++--- .../src/e2e_blacklist_token_contract/shielding.test.ts | 4 ++-- .../transfer_private.test.ts | 3 ++- .../transfer_public.test.ts | 4 ++-- .../e2e_blacklist_token_contract/unshielding.test.ts | 3 ++- .../end-to-end/src/e2e_token_contract/burn.test.ts | 6 +++--- .../src/e2e_token_contract/shielding.test.ts | 4 ++-- .../src/e2e_token_contract/transfer_private.test.ts | 7 ++++--- .../src/e2e_token_contract/transfer_public.test.ts | 10 +++++----- .../src/e2e_token_contract/unshielding.test.ts | 3 ++- yarn-project/end-to-end/src/fixtures/fixtures.ts | 2 ++ 12 files changed, 34 insertions(+), 27 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_authwit.test.ts b/yarn-project/end-to-end/src/e2e_authwit.test.ts index 42865d4793a..29e84a1b62d 100644 --- a/yarn-project/end-to-end/src/e2e_authwit.test.ts +++ b/yarn-project/end-to-end/src/e2e_authwit.test.ts @@ -3,6 +3,7 @@ import { SchnorrAccountContract } from '@aztec/noir-contracts.js'; import { jest } from '@jest/globals'; +import { DUPLICATE_NULLIFIER_ERROR } from './fixtures/fixtures.js'; import { publicDeployAccounts, setup } from './fixtures/utils.js'; const TIMEOUT = 90_000; @@ -86,7 +87,7 @@ describe('e2e_authwit_tests', () => { }); // The transaction should be dropped because of a cancelled authwit (duplicate nullifier) - await expect(txCancelledAuthwit.wait()).rejects.toThrow('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); it('invalid chain id', async () => { @@ -130,7 +131,7 @@ describe('e2e_authwit_tests', () => { }); // The transaction should be dropped because of the invalid chain id - await expect(txCancelledAuthwit.wait()).rejects.toThrow('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); it('invalid version', async () => { @@ -174,7 +175,7 @@ describe('e2e_authwit_tests', () => { }); // The transaction should be dropped because of the invalid version - await expect(txCancelledAuthwit.wait()).rejects.toThrow('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); }); }); @@ -234,7 +235,7 @@ describe('e2e_authwit_tests', () => { const c = await SchnorrAccountContract.at(wallets[0].getAddress(), wallets[0]); const txCancelledAuthwit = c.withWallet(wallets[1]).methods.spend_public_authwit(innerHash).send(); // The transaction should be dropped because of a cancelled authwit (duplicate nullifier) - await expect(txCancelledAuthwit.wait()).rejects.toThrow('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); }); }); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts index cb521a0baef..05ca22f844f 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/burn.test.ts @@ -1,6 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; -import { U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; +import { DUPLICATE_NULLIFIER_ERROR, U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; import { BlacklistTokenContractTest } from './blacklist_token_contract_test.js'; describe('e2e_blacklist_token_contract burn', () => { @@ -50,7 +50,7 @@ describe('e2e_blacklist_token_contract burn', () => { // Check that the message hash is no longer valid. Need to try to send since nullifiers are handled by sequencer. const txReplay = asset.withWallet(wallets[1]).methods.burn_public(wallets[0].getAddress(), amount, nonce).send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { @@ -146,7 +146,7 @@ describe('e2e_blacklist_token_contract burn', () => { // Perform the transfer again, should fail const txReplay = asset.withWallet(wallets[1]).methods.burn(wallets[0].getAddress(), amount, nonce).send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts index 4bffbc3a7ef..d5dfbe462e9 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/shielding.test.ts @@ -1,6 +1,6 @@ import { Fr, computeSecretHash } from '@aztec/aztec.js'; -import { U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; +import { DUPLICATE_NULLIFIER_ERROR, U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; import { BlacklistTokenContractTest } from './blacklist_token_contract_test.js'; describe('e2e_blacklist_token_contract shield + redeem_shield', () => { @@ -67,7 +67,7 @@ describe('e2e_blacklist_token_contract shield + redeem_shield', () => { .withWallet(wallets[1]) .methods.shield(wallets[0].getAddress(), amount, secretHash, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); // Redeem it await t.addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts index ed78def1481..ffc06411d2e 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_private.test.ts @@ -1,5 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { BlacklistTokenContractTest } from './blacklist_token_contract_test.js'; describe('e2e_blacklist_token_contract transfer private', () => { @@ -67,7 +68,7 @@ describe('e2e_blacklist_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_public.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_public.test.ts index 1459704e8aa..45996cf0207 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_public.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/transfer_public.test.ts @@ -1,6 +1,6 @@ import { Fr } from '@aztec/aztec.js'; -import { U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; +import { DUPLICATE_NULLIFIER_ERROR, U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; import { BlacklistTokenContractTest } from './blacklist_token_contract_test.js'; describe('e2e_blacklist_token_contract transfer public', () => { @@ -66,7 +66,7 @@ describe('e2e_blacklist_token_contract transfer public', () => { .withWallet(wallets[1]) .methods.transfer_public(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts index ba8f69e6f26..224a26b5f0f 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts @@ -1,5 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { BlacklistTokenContractTest } from './blacklist_token_contract_test.js'; describe('e2e_blacklist_token_contract unshielding', () => { @@ -57,7 +58,7 @@ describe('e2e_blacklist_token_contract unshielding', () => { .withWallet(wallets[1]) .methods.unshield(wallets[0].getAddress(), wallets[1].getAddress(), amount, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); // @todo @LHerskind This error is weird? }); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts index ff7aed370b5..bfe3406329c 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/burn.test.ts @@ -1,6 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; -import { U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; +import { DUPLICATE_NULLIFIER_ERROR, U128_UNDERFLOW_ERROR } from '../fixtures/index.js'; import { TokenContractTest } from './token_contract_test.js'; describe('e2e_token_contract burn', () => { @@ -49,7 +49,7 @@ describe('e2e_token_contract burn', () => { // Check that the message hash is no longer valid. Need to try to send since nullifiers are handled by sequencer. const txReplay = asset.withWallet(wallets[1]).methods.burn_public(accounts[0].address, amount, nonce).send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { @@ -139,7 +139,7 @@ describe('e2e_token_contract burn', () => { // Perform the transfer again, should fail const txReplay = asset.withWallet(wallets[1]).methods.burn(accounts[0].address, amount, nonce).send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { diff --git a/yarn-project/end-to-end/src/e2e_token_contract/shielding.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/shielding.test.ts index b0cee961f35..93ab4e44870 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/shielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/shielding.test.ts @@ -1,6 +1,6 @@ import { Fr, computeSecretHash } from '@aztec/aztec.js'; -import { U128_UNDERFLOW_ERROR } from '../fixtures/fixtures.js'; +import { DUPLICATE_NULLIFIER_ERROR, U128_UNDERFLOW_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; describe('e2e_token_contract shield + redeem shield', () => { @@ -60,7 +60,7 @@ describe('e2e_token_contract shield + redeem shield', () => { // Check that replaying the shield should fail! const txReplay = asset.withWallet(wallets[1]).methods.shield(accounts[0].address, amount, secretHash, nonce).send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); // Redeem it await t.addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts index 3251c7422a9..fb5394567dc 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts @@ -1,5 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; describe('e2e_token_contract transfer private', () => { @@ -66,7 +67,7 @@ describe('e2e_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { @@ -188,7 +189,7 @@ describe('e2e_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrowError('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrowError(DUPLICATE_NULLIFIER_ERROR); }); it('transfer on behalf of other, cancelled authwit, flow 2', async () => { @@ -212,7 +213,7 @@ describe('e2e_token_contract transfer private', () => { .withWallet(wallets[1]) .methods.transfer(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrow('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); it('transfer on behalf of other, invalid spend_private_authwit on "from"', async () => { diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts index 13430c1916a..cb352c57df2 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_public.test.ts @@ -1,6 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; -import { U128_UNDERFLOW_ERROR } from '../fixtures/fixtures.js'; +import { DUPLICATE_NULLIFIER_ERROR, U128_UNDERFLOW_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; describe('e2e_token_contract transfer public', () => { @@ -65,7 +65,7 @@ describe('e2e_token_contract transfer public', () => { .withWallet(wallets[1]) .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { @@ -194,7 +194,7 @@ describe('e2e_token_contract transfer public', () => { .withWallet(wallets[1]) .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrowError('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrowError(DUPLICATE_NULLIFIER_ERROR); }); it('transfer on behalf of other, cancelled authwit, flow 2', async () => { @@ -216,7 +216,7 @@ describe('e2e_token_contract transfer public', () => { .withWallet(wallets[1]) .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrowError('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrowError(DUPLICATE_NULLIFIER_ERROR); }); it('transfer on behalf of other, cancelled authwit, flow 3', async () => { @@ -244,7 +244,7 @@ describe('e2e_token_contract transfer public', () => { .withWallet(wallets[1]) .methods.transfer_public(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txCancelledAuthwit.wait()).rejects.toThrow('Transaction '); + await expect(txCancelledAuthwit.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); it('transfer on behalf of other, invalid spend_public_authwit on "from"', async () => { diff --git a/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts index 998b978e081..d52b3ce214e 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts @@ -1,5 +1,6 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; +import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; describe('e2e_token_contract unshielding', () => { @@ -56,7 +57,7 @@ describe('e2e_token_contract unshielding', () => { .withWallet(wallets[1]) .methods.unshield(accounts[0].address, accounts[1].address, amount, nonce) .send(); - await expect(txReplay.wait()).rejects.toThrow('Transaction '); + await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); describe('failure cases', () => { diff --git a/yarn-project/end-to-end/src/fixtures/fixtures.ts b/yarn-project/end-to-end/src/fixtures/fixtures.ts index c33f3929718..8e9284a1945 100644 --- a/yarn-project/end-to-end/src/fixtures/fixtures.ts +++ b/yarn-project/end-to-end/src/fixtures/fixtures.ts @@ -7,3 +7,5 @@ export const U128_UNDERFLOW_ERROR = "Assertion failed: attempt to subtract with export const U128_OVERFLOW_ERROR = "Assertion failed: attempt to add with overflow 'hi == high'"; export const BITSIZE_TOO_BIG_ERROR = "Assertion failed: call to assert_max_bit_size 'self.__assert_max_bit_size(bit_size)'"; +// TODO(https://github.com/AztecProtocol/aztec-packages/issues/5818): Make this a fixed error after transition. +export const DUPLICATE_NULLIFIER_ERROR = /Transaction .*|.*duplicate nullifier.*/; From c191a40bebf5910d4001f3fac61bb7235f805104 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Thu, 9 May 2024 14:06:49 -0300 Subject: [PATCH 30/43] feat!: shared mutable configurable delays (#6104) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #5493, follow up of #6085. This makes the delay in SharedMutable not be fixed and instead configurable by users throughout the lifetime of the contract. This is however more complicated than it sounds at first: because private proofs are being created relying on the public values being stable until a future point in time, it must not be possible to cause for a shared value to change before some delay. Two scenarios are particularly tricky: - if the delay is reduced, then it is possible to schedule a value change with a shorter delay, violating the original delay's constraints. The solution to this is to make delay changes be scheduled actions themselves, so that the total delay (wait time for the new delay to come into effect plus the new reduced delay) equals the original delay. Note that increasing a delay cna be done instantly. - if we schedule delay changes as per the above, then we must consider a scenario in which a delay reduction is scheduled in the near future. It may happen that waiting for the reduction to come into effect and then scheduling results in a shorter delay than if the scheduling were to happen immediately - this lower 'effective delay' is the value that must be used in private proofs. ## How I had originally considered creating a sort of wrapper state variable that held two SharedMutables, one for the value and one for the delay, or alternatively two ScheduledValueChanges, but ultimately I realized that a scheduled value change is significantly different from a scheduled delay change. Namely: - the notion of the 'current' delay is meaningless in private - we only care about the 'effective' delay - there's no use for the block horizon of a delay change - scheduling a delay change requires setting a delay depending on the current and new values, not an externally defined one Due to these differences, I introduced ScheduledDelayChange, which is essentially a variant of the value change, but with these considerations baked in. I think this is a reasonable way to do things, even if at first this may seem to introduce too many concepts. It also helps with the fact that there's so many values involved (pre, post and block of change for value and delays, as well as current, effective, historical values, etc.), and with language becoming weird - we need to describe the delay for scheduling a delay change, which will later affect the delays of scheduling value changes. With ScheduledDelayChange, extending the functionality of SharedMutable was relatively straightforward. The unit tests became a bit more complicated due to there bieng more scenarios, so I also used this as an opportunity to try to create slightly more complex Noir tests. I didn't go too crazy here, but they seem to be right at the point where we'd want to introduce something like a `Test` struct with custom impls for setup, common assertions, etc. ## Problems An uninitialized `SharedMutable` has both delay and value of 0. A zero delay transforms `SharedMutable` into `PublicMutable`: scheduled value changes become effective immediately, and it is not possible to read from private since `tx.max_block_number` would equal a historical block (i.e. an already mined one). Delay initialization is therefore required, and this is typically fine: since the initial delay is 0 any change will be an increase, and therefore instant. The problem arises when we cannot have explicit initialization and instead wish to rely on defaults. This happens e.g. when we put a SharedMutable inside a `Map`: we can't initialize all entries for all keys, and we run into trouble. This is a pattern followed by `KeyRegistry` and `TokenBlacklist`: we have per-user configuration, and cant really ask users to initialize their state before interacting with the system. ## Solution? A possible solution would be to have a default value for the delay, and to store e.g. `Option` instead of plain integers and using `unwrap_or(DEFAULT)`. We could then make this a type parameter for SharedMutable, e.g. `registry: Map>`. This would make certain things more complicated, particularly the effective delay and delay change block of change computations, but it should all be containable within `ScheduledDelayChange`, which sounds just about right. ---- I'm keeping this is a draft so we can discuss the current approach and wether we think the above or an alternative solution would be reasonable to attempt. Note that this PR won't pass CI as some of the contracts won't build. --------- Co-authored-by: Jan Beneš Co-authored-by: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> --- .../references/storage/shared_state.md | 6 +- docs/docs/misc/migration_notes.md | 6 + .../aztec/src/state_vars/shared_mutable.nr | 1 + .../shared_mutable/scheduled_delay_change.nr | 512 ++++++++++++++++++ .../shared_mutable/scheduled_value_change.nr | 201 ++++--- .../shared_mutable/shared_mutable.nr | 468 ++++++++++++---- .../shared_mutable_private_getter.nr | 61 ++- .../contracts/auth_contract/src/main.nr | 2 - 8 files changed, 1050 insertions(+), 207 deletions(-) create mode 100644 noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_delay_change.nr diff --git a/docs/docs/developers/contracts/references/storage/shared_state.md b/docs/docs/developers/contracts/references/storage/shared_state.md index 2039eaa56b7..7490c503175 100644 --- a/docs/docs/developers/contracts/references/storage/shared_state.md +++ b/docs/docs/developers/contracts/references/storage/shared_state.md @@ -28,11 +28,9 @@ While shared state variables are much less leaky than the assertion in public ap The `max_block_number` transaction property will be set to a value close to the current block number plus the duration of the delay in blocks. The exact value depends on the historical block over which the private proof is constructed. For example, if the current block number is 100 and a shared state variable has a delay of 20 blocks, then transactions that read this value privately will set `max_block_number` to a value close to 120 (clients building proofs on older state will select a lower `max_block_number`). This implicitly leaks the duration of the delay. -Applications using similar delays will therefore be part of the same privacy set. It is expected for social coordination to result in small set of predetermined delays that developers choose from depending on their needs, as an example a viable set might be: 12 hours (for time-sensitive operations, such as emergency mechanisms), 5 days (for middle-of-the-road operations) and 2 weeks (for operations that require lengthy public scrutiny). +Applications using similar delays will therefore be part of the same privacy set. It is expected for social coordination to result in small set of predetermined delays that developers choose from depending on their needs, as an example a viable set might be: 12 hours (for time-sensitive operations, such as emergency mechanisms), 5 days (for middle-of-the-road operations) and 2 weeks (for operations that require lengthy public scrutiny). These delays can be changed during the contract lifetime as the application's needs evolve. -:::note -Shared state delays are currently hardcoded at compilation time and cannot be changed, but there are plans to make this a mutable value. -:::note +Additionally, users might choose to coordinate and constrain their transactions to set `max_block_number` to a value lower than would be strictly needed by the applications they interact with (if any!) using some common delay, and by doing so prevent privacy leakage. ### Choosing Epochs diff --git a/docs/docs/misc/migration_notes.md b/docs/docs/misc/migration_notes.md index c792470b0b5..902eae67a4d 100644 --- a/docs/docs/misc/migration_notes.md +++ b/docs/docs/misc/migration_notes.md @@ -6,6 +6,12 @@ keywords: [sandbox, cli, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. +## 0.39.0 + +### [Aztec.nr] Mutable delays in `SharedMutable` + +The type signature for `SharedMutable` changed from `SharedMutable` to `SharedMutable`. The behavior is the same as before, except the delay can now be changed after deployment by calling `schedule_delay_change`. + ## 0.38.0 ### [Aztec.nr] Emmiting encrypted logs diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr index 533639390d8..13b726cc2af 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr @@ -1,4 +1,5 @@ mod shared_mutable; +mod scheduled_delay_change; mod scheduled_value_change; mod shared_mutable_private_getter; diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_delay_change.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_delay_change.nr new file mode 100644 index 00000000000..55634984f33 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_delay_change.nr @@ -0,0 +1,512 @@ +use dep::protocol_types::traits::{Serialize, Deserialize, FromField, ToField}; +use dep::std::cmp::min; + +// This data structure is used by SharedMutable to store the minimum delay with which a ScheduledValueChange object can +// schedule a change. +// This delay is initally equal to INITIAL_DELAY, and can be safely mutated to any other value over time. This mutation +// is performed via `schedule_change` in order to satisfy ScheduleValueChange constraints: if e.g. we allowed for the +// delay to be decreased immediately then it'd be possible for the state variable to schedule a value change with a +// reduced delay, invalidating prior private reads. +struct ScheduledDelayChange { + // Both pre and post are stored in public storage, so by default they are zeroed. By wrapping them in an Option, + // they default to Option::none(), which we detect and replace with INITIAL_DELAY. The end result is that a + // ScheduledDelayChange that has not been initialized has a delay equal to INITIAL_DELAY, which is the desired + // effect. Once initialized, the Option will never be none again. + pre: Option, + post: Option, + // Block at which `post` value is used instead of `pre` + block_of_change: u32, + // The _dummy variable forces INITIAL_DELAY to be interpreted as a numeric value. This is a workaround to + // https://github.com/noir-lang/noir/issues/4633. Remove once resolved. + _dummy: [Field; INITIAL_DELAY], +} + +impl ScheduledDelayChange { + pub fn new(pre: Option, post: Option, block_of_change: u32) -> Self { + Self { pre, post, block_of_change, _dummy: [0; INITIAL_DELAY] } + } + + /// Returns the current value of the delay stored in the data structure. + /// This function only returns a meaningful value when called in public with the current block number - for + /// historical private reads use `get_effective_minimum_delay_at` instead. + pub fn get_current(self, current_block_number: u32) -> u32 { + // The post value becomes the current one at the block of change, so any transaction that is included in the + // block of change will use the post value. + + if current_block_number < self.block_of_change { + self.pre.unwrap_or(INITIAL_DELAY) + } else { + self.post.unwrap_or(INITIAL_DELAY) + } + } + + /// Returns the scheduled change, i.e. the post-change delay and the block at which it will become the current + /// delay. Note that this block may be in the past if the change has already taken place. + /// Additionally, further changes might be later scheduled, potentially canceling the one returned by this function. + pub fn get_scheduled(self) -> (u32, u32) { + (self.post.unwrap_or(INITIAL_DELAY), self.block_of_change) + } + + /// Mutates the delay change by scheduling a change at the current block number. This function is only meaningful + /// when called in public with the current block number. + /// The block at which the new delay will become effective is determined automatically: + /// - when increasing the delay, the change is effective immediately + /// - when reducing the delay, the change will take effect after a delay equal to the difference between old and + /// new delay. For example, if reducing from 3 days to 1 day, the reduction will be scheduled to happen after 2 + /// days. + pub fn schedule_change(&mut self, new: u32, current_block_number: u32) { + let current = self.get_current(current_block_number); + + // When changing the delay value we must ensure that it is not possible to produce a value change with a delay + // shorter than the current one. + let blocks_until_change = if new > current { + // Increasing the delay value can therefore be done immediately: this does not invalidate prior contraints + // about how quickly a value might be changed (indeed it strengthens them). + 0 + } else { + // Decreasing the delay requires waiting for the difference between current and new delay in order to ensure + // that overall the current delay is respected. + // + // current delay earliest value block of change + // block block of change if delay remained unchanged + // =======N=========================|================================X=================> + // ^ ^ ^ + // |-------------------------|--------------------------------| + // | blocks until change new delay | + // ------------------------------------------------------------ + // current delay + current - new + }; + + self.pre = Option::some(current); + self.post = Option::some(new); + self.block_of_change = current_block_number + blocks_until_change; + } + + /// Returns the minimum delay before a value might mutate due to a scheduled change, from the perspective of some + /// historical block number. It only returns a meaningful value when called in private with historical blocks. This + /// function can be used alongside `ScheduledValueChange.get_block_horizon` to properly constrain the + /// `max_block_number` transaction property when reading mutable shared state. + /// This value typically equals the current delay at the block following the historical one (the earliest one in + /// which a value change could be scheduled), but it also considers scenarios in which a delay reduction is + /// scheduled to happen in the near future, resulting in a way to schedule a change with an overall delay lower than + /// the current one. + pub fn get_effective_minimum_delay_at(self, historical_block_number: u32) -> u32 { + if self.block_of_change <= historical_block_number { + // If no delay changes were scheduled, then the delay value at the historical block (post) is guaranteed to + // hold due to how further delay changes would be scheduled by `schedule_change`. + self.post.unwrap_or(INITIAL_DELAY) + } else { + // If a change is scheduled, then the effective delay might be lower than the current one (pre). At the + // block of change the current delay will be the scheduled one, with an overall delay from the historical + // block number equal to the number of blocks until the change plus the new delay. If this value is lower + // than the current delay, then that is the effective minimum delay. + // + // historical + // block delay actual earliest value + // v block of change block of change + // =========NS=====================|=============================X===========Y=====> + // ^ ^ ^ ^ + // earliest block in | | | + // which to schedule change | | | + // | | | | + // |----------------------|------------------------------ | + // | blocks new delay | + // | until change | + // | | + // |----------------------------------------------------------------| + // current delay at the earliest block in + // which to scheduled value change + + let blocks_until_change = self.block_of_change - (historical_block_number + 1); + + min( + self.pre.unwrap_or(INITIAL_DELAY), + blocks_until_change + self.post.unwrap_or(INITIAL_DELAY) + ) + } + } +} + +impl Serialize<1> for ScheduledDelayChange { + fn serialize(self) -> [Field; 1] { + // We pack all three u32 values into a single U128, which is made up of two u64 limbs. + // Low limb: [ pre_inner: u32 | post_inner: u32 ] + // High limb: [ empty | pre_is_some: u8 | post_is_some: u8 | block_of_change: u32 ] + + let lo = ((self.pre.unwrap_unchecked() as u64) * (1 << 32)) + + (self.post.unwrap_unchecked() as u64); + + let hi = (self.pre.is_some() as u64) * (1 << 33) + + (self.post.is_some() as u64 * (1 << 32)) + + self.block_of_change as u64; + + let packed = U128::from_u64s_le(lo, hi); + + [packed.to_integer()] + } +} + +impl Deserialize<1> for ScheduledDelayChange { + fn deserialize(input: [Field; 1]) -> Self { + let packed = U128::from_integer(input[0]); + + // We use division and modulo to clear the bits that correspond to other values when unpacking. + + let pre_is_some = ((packed.hi as u64) / (1 << 33)) as bool; + let pre_inner = ((packed.lo as u64) / (1 << 32)) as u32; + + let post_is_some = (((packed.hi as u64) / (1 << 32)) % (1 << 1)) as bool; + let post_inner = ((packed.lo as u64) % (1 << 32)) as u32; + + let block_of_change = ((packed.hi as u64) % (1 << 32)) as u32; + + Self { + pre: if pre_is_some { Option::some(pre_inner) } else { Option::none() }, + post: if post_is_some { Option::some(post_inner) } else { Option::none() }, + block_of_change, + _dummy: [0; INITIAL_DELAY], + } + } +} + +mod test { + use crate::state_vars::shared_mutable::scheduled_delay_change::ScheduledDelayChange; + + global TEST_INITIAL_DELAY = 13; + + fn assert_equal_after_conversion(original: ScheduledDelayChange) { + // We have to do explicit type annotations because Noir lacks turbofish support. + // TODO: improve syntax once https://github.com/noir-lang/noir/issues/4710 is implemented. + let converted: ScheduledDelayChange = ScheduledDelayChange::deserialize((original).serialize()); + + assert_eq(original.pre, converted.pre); + assert_eq(original.post, converted.post); + assert_eq(original.block_of_change, converted.block_of_change); + } + + #[test] + fn test_serde() { + let pre = 1; + let post = 2; + let block_of_change = 50; + + assert_equal_after_conversion(ScheduledDelayChange::new(Option::some(pre), Option::some(post), block_of_change)); + assert_equal_after_conversion(ScheduledDelayChange::new(Option::some(pre), Option::none(), block_of_change)); + assert_equal_after_conversion(ScheduledDelayChange::new(Option::none(), Option::some(post), block_of_change)); + assert_equal_after_conversion(ScheduledDelayChange::new(Option::none(), Option::none(), block_of_change)); + } + + #[test] + fn test_serde_large_values() { + let max_u32 = (1 << 32) - 1; + + let pre = max_u32 as u32; + let post = (max_u32 - 1) as u32; + let block_of_change = (max_u32 - 2) as u32; + + assert_equal_after_conversion(ScheduledDelayChange::new(Option::some(pre), Option::some(post), block_of_change)); + assert_equal_after_conversion(ScheduledDelayChange::new(Option::some(pre), Option::none(), block_of_change)); + assert_equal_after_conversion(ScheduledDelayChange::new(Option::none(), Option::some(post), block_of_change)); + assert_equal_after_conversion(ScheduledDelayChange::new(Option::none(), Option::none(), block_of_change)); + } + + fn get_non_initial_delay_change( + pre: u32, + post: u32, + block_of_change: u32 + ) -> ScheduledDelayChange { + ScheduledDelayChange::new(Option::some(pre), Option::some(post), block_of_change) + } + + fn get_initial_delay_change() -> ScheduledDelayChange { + ScheduledDelayChange::deserialize([0]) + } + + #[test] + fn test_get_current() { + let pre = 1; + let post = 2; + let block_of_change = 50; + + let delay_change = get_non_initial_delay_change(pre, post, block_of_change); + + assert_eq(delay_change.get_current(0), pre); + assert_eq(delay_change.get_current(block_of_change - 1), pre); + assert_eq(delay_change.get_current(block_of_change), post); + assert_eq(delay_change.get_current(block_of_change + 1), post); + } + + #[test] + fn test_get_current_initial() { + let delay_change = get_initial_delay_change(); + + assert_eq(delay_change.get_current(0), TEST_INITIAL_DELAY); + assert_eq(delay_change.get_current(1), TEST_INITIAL_DELAY); + } + + #[test] + fn test_get_scheduled() { + let pre = 1; + let post = 2; + let block_of_change = 50; + + let delay_change = get_non_initial_delay_change(pre, post, block_of_change); + + assert_eq(delay_change.get_scheduled(), (post, block_of_change)); + } + + #[test] + fn test_get_scheduled_initial() { + let delay_change = get_initial_delay_change(); + + assert_eq(delay_change.get_scheduled(), (TEST_INITIAL_DELAY, 0)); + } + + #[test] + fn test_schedule_change_to_shorter_delay_before_change() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let new = 10; + let current_block_number = block_of_change - 50; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + delay_change.schedule_change(new, current_block_number); + + // Because we re-schedule before the last scheduled change takes effect, the old `post` value is lost. The + // schedule time is determined by the difference between the current value (pre) and new delay. + assert_eq(delay_change.pre.unwrap(), pre); + assert_eq(delay_change.post.unwrap(), new); + assert_eq(delay_change.block_of_change, current_block_number + pre - new); + } + + #[test] + fn test_schedule_change_to_shorter_delay_after_change() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let new = 10; + let current_block_number = block_of_change + 50; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + delay_change.schedule_change(new, current_block_number); + + // The schedule time is determined by the different between the current value (ex post, now pre) and new delay. + assert_eq(delay_change.pre.unwrap(), post); + assert_eq(delay_change.post.unwrap(), new); + assert_eq(delay_change.block_of_change, current_block_number + post - new); + } + + #[test] + fn test_schedule_change_to_shorter_delay_from_initial() { + let new = TEST_INITIAL_DELAY - 1; + let current_block_number = 50; + + let mut delay_change = get_initial_delay_change(); + delay_change.schedule_change(new, current_block_number); + + // Like in the after change scenario, the schedule time is determined by the difference between the current value + // (initial) and new delay. + assert_eq(delay_change.pre.unwrap(), TEST_INITIAL_DELAY); + assert_eq(delay_change.post.unwrap(), new); + assert_eq(delay_change.block_of_change, current_block_number + TEST_INITIAL_DELAY - new); + } + + #[test] + fn test_schedule_change_to_longer_delay_before_change() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let new = 40; + let current_block_number = block_of_change - 50; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + delay_change.schedule_change(new, current_block_number); + + // Because we re-schedule before the last scheduled change takes effect, the old `post` value is lost. The + // change is effective immediately because the new delay is longer than the current one. + assert_eq(delay_change.pre.unwrap(), pre); + assert_eq(delay_change.post.unwrap(), new); + assert_eq(delay_change.block_of_change, current_block_number); + assert_eq(delay_change.get_current(current_block_number), new); + } + + #[test] + fn test_schedule_change_to_longer_delay_after_change() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let new = 40; + let current_block_number = block_of_change + 50; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + delay_change.schedule_change(new, current_block_number); + + // Change is effective immediately because the new delay is longer than the current one. + assert_eq(delay_change.pre.unwrap(), post); + assert_eq(delay_change.post.unwrap(), new); + assert_eq(delay_change.block_of_change, current_block_number); + assert_eq(delay_change.get_current(current_block_number), new); + } + + #[test] + fn test_schedule_change_to_longer_delay_from_initial() { + let new = TEST_INITIAL_DELAY + 1; + let current_block_number = 50; + + let mut delay_change = get_initial_delay_change(); + delay_change.schedule_change(new, current_block_number); + + // Like in the after change scenario, change is effective immediately because the new delay is longer than the + // current one. + assert_eq(delay_change.pre.unwrap(), TEST_INITIAL_DELAY); + assert_eq(delay_change.post.unwrap(), new); + assert_eq(delay_change.block_of_change, current_block_number); + assert_eq(delay_change.get_current(current_block_number), new); + } + + fn assert_effective_minimum_delay_invariants( + delay_change: &mut ScheduledDelayChange, + historical_block_number: u32, + effective_minimum_delay: u32 + ) { + // The effective minimum delays guarantees the earliest block in which a scheduled value change could be made + // effective. No action, even if executed immediately after the historical block, should result in a scheduled + // value change having a block of change lower than this. + let expected_earliest_value_change_block = historical_block_number + 1 + effective_minimum_delay; + + if delay_change.block_of_change > historical_block_number { + // If a delay change is already scheduled to happen in the future, we then must consider the scenario in + // which a value change is scheduled to occur right as the delay changes and becomes the current one. + let delay_change_block = delay_change.block_of_change; + + let value_change_block = delay_change_block + delay_change.get_current(delay_change_block); + assert(expected_earliest_value_change_block <= value_change_block); + } + + // Another possibility would be to schedule a value change immediately after the historical block. + let change_schedule_block = historical_block_number + 1; + let value_change_block = change_schedule_block + delay_change.get_current(change_schedule_block); + assert(expected_earliest_value_change_block <= value_change_block); + + // Finally, a delay reduction could be scheduled immediately after the historical block. We reduce the delay to + // zero, which means that at the delay block of change there'll be no delay and a value change could be + // performed immediately then. + delay_change.schedule_change(0, historical_block_number + 1); + assert(expected_earliest_value_change_block <= delay_change.block_of_change); + } + + #[test] + fn test_get_effective_delay_at_before_change_in_far_future() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let historical_block_number = 200; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + + // The scheduled delay change is far into the future (further than the current delay is), so it doesn't affect + // the effective delay, which is simply the current one (pre). + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + assert_eq(effective_minimum_delay, pre); + + assert_effective_minimum_delay_invariants( + &mut delay_change, + historical_block_number, + effective_minimum_delay + ); + } + + #[test] + fn test_get_effective_delay_at_before_change_to_long_delay() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let historical_block_number = 495; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + + // The scheduled delay change will be effective soon (it's fewer blocks away than the current delay), but due to + // it being larger than the current one it doesn't affect the effective delay, which is simply the current one + // (pre). + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + assert_eq(effective_minimum_delay, pre); + + assert_effective_minimum_delay_invariants( + &mut delay_change, + historical_block_number, + effective_minimum_delay + ); + } + + #[test] + fn test_get_effective_delay_at_before_near_change_to_short_delay() { + let pre = 15; + let post = 3; + let block_of_change = 500; + + let historical_block_number = 495; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + + // The scheduled delay change will be effective soon (it's fewer blocks away than the current delay), and it's + // changing to a value smaller than the current one. This means that at the block of change the delay will be + // reduced, and a delay change would be scheduled there with an overall delay lower than the current one. + // The effective delay therefore is the new delay plus the number of blocks that need to elapse until it becomes + // effective (i.e. until the block of change). + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + assert_eq(effective_minimum_delay, post + block_of_change - (historical_block_number + 1)); + + assert_effective_minimum_delay_invariants( + &mut delay_change, + historical_block_number, + effective_minimum_delay + ); + } + + #[test] + fn test_get_effective_delay_at_after_change() { + let pre = 15; + let post = 25; + let block_of_change = 500; + + let historical_block_number = block_of_change + 50; + + let mut delay_change = get_non_initial_delay_change(pre, post, block_of_change); + + // No delay change is scheduled, so the effective delay is simply the current one (post). + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + assert_eq(effective_minimum_delay, post); + + assert_effective_minimum_delay_invariants( + &mut delay_change, + historical_block_number, + effective_minimum_delay + ); + } + + #[test] + fn test_get_effective_delay_at_initial() { + let mut delay_change = get_initial_delay_change(); + + let historical_block_number = 200; + + // Like in the after change scenario, no delay change is scheduled, so the effective delay is simply the current + // one (initial). + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + assert_eq(effective_minimum_delay, TEST_INITIAL_DELAY); + + assert_effective_minimum_delay_invariants( + &mut delay_change, + historical_block_number, + effective_minimum_delay + ); + } +} diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_value_change.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_value_change.nr index 52aba6277ea..bfdbe356506 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_value_change.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/scheduled_value_change.nr @@ -1,13 +1,15 @@ use dep::protocol_types::traits::{Serialize, Deserialize, FromField, ToField}; +use dep::std::cmp::min; -// This data structure is used by SharedMutable to represent a value that changes from `pre` to `post` at some block +// This data structure is used by SharedMutable to represent a value that changes from `pre` to `post` at some block // called the `block_of_change`. The value can only be made to change by scheduling a change event at some future block -// of change after some minimum delay measured in blocks has elapsed. This means that at any given block number we know -// both the current value and the smallest block number at which the value might change - this is called the +// of change after some minimum delay measured in blocks has elapsed. This means that at any given block number we know +// both the current value and the smallest block number at which the value might change - this is called the // 'block horizon'. struct ScheduledValueChange { pre: T, post: T, + // Block at which `post` value is used instead of `pre` block_of_change: u32, } @@ -16,11 +18,11 @@ impl ScheduledValueChange { Self { pre, post, block_of_change } } - /// Returns the value stored in the data structure at a given block. This function can be called both in public - /// (where `block_number` is simply the current block number, i.e. the number of the block in which the current - /// transaction will be included) and in private (where `block_number` is the historical block number that is used + /// Returns the value stored in the data structure at a given block. This function can be called both in public + /// (where `block_number` is simply the current block number, i.e. the number of the block in which the current + /// transaction will be included) and in private (where `block_number` is the historical block number that is used /// to construct the proof). - /// Reading in private is only safe if the transaction's `max_block_number` property is set to a value lower or + /// Reading in private is only safe if the transaction's `max_block_number` property is set to a value lower or /// equal to the block horizon (see `get_block_horizon()`). pub fn get_current_at(self, block_number: u32) -> T { // The post value becomes the current one at the block of change. This means different things in each realm: @@ -35,7 +37,7 @@ impl ScheduledValueChange { } } - /// Returns the scheduled change, i.e. the post-change value and the block at which it will become the current + /// Returns the scheduled change, i.e. the post-change value and the block at which it will become the current /// value. Note that this block may be in the past if the change has already taken place. /// Additionally, further changes might be later scheduled, potentially canceling the one returned by this function. pub fn get_scheduled(self) -> (T, u32) { @@ -43,15 +45,18 @@ impl ScheduledValueChange { } /// Returns the largest block number at which the value returned by `get_current_at` is known to remain the current - /// value. This value is only meaningful in private when constructing a proof at some `historical_block_number`, + /// value. This value is only meaningful in private when constructing a proof at some `historical_block_number`, /// since due to its asynchronous nature private execution cannot know about any later scheduled changes. - /// The caller of this function must know how quickly the value can change due to a scheduled change in the form of - /// `minimum_delay`. If the delay itself is immutable, then this is just its duration. + /// The caller of this function must know how quickly the value can change due to a scheduled change in the form of + /// `minimum_delay`. If the delay itself is immutable, then this is just its duration. If the delay is mutable + /// however, then this value is the 'effective minimum delay' (obtained by calling + /// `ScheduledDelayChange.get_effective_minimum_delay_at`), which equals the minimum number of blocks that need to + /// elapse from the next block until the value changes, regardless of further delay changes. /// The value returned by `get_current_at` in private when called with a historical block number is only safe to use /// if the transaction's `max_block_number` property is set to a value lower or equal to the block horizon computed /// using the same historical block number. pub fn get_block_horizon(self, historical_block_number: u32, minimum_delay: u32) -> u32 { - // The block horizon is the very last block in which the current value is known. Any block past the horizon + // The block horizon is the very last block in which the current value is known. Any block past the horizon // (i.e. with a block number larger than the block horizon) may have a different current value. Reading the // current value in private typically requires constraining the maximum valid block number to be equal to the // block horizon. @@ -61,10 +66,10 @@ impl ScheduledValueChange { // change is scheduled. This did not happen at the historical block number (or else it would not be // greater or equal to the block of change), and therefore could only happen after the historical block // number. The earliest would be the immediate next block, and so the smallest possible next block of change - // equals `historical_block_number + 1 + minimum_delay`. Our block horizon is simply the previous block to + // equals `historical_block_number + 1 + minimum_delay`. Our block horizon is simply the previous block to // that one. // - // block of historical + // block of historical // change block block horizon // =======|=============N===================H===========> // ^ ^ @@ -74,34 +79,34 @@ impl ScheduledValueChange { historical_block_number + minimum_delay } else { // If the block of change has not yet been mined however, then there are two possible scenarios. - // a) It could be so far into the future that the block horizon is actually determined by the minimum - // delay, because a new change could be scheduled and take place _before_ the currently scheduled one. - // This is similar to the scenario where the block of change is in the past: the time horizon is the + // a) It could be so far into the future that the block horizon is actually determined by the minimum + // delay, because a new change could be scheduled and take place _before_ the currently scheduled one. + // This is similar to the scenario where the block of change is in the past: the time horizon is the // block prior to the earliest one in which a new block of change might land. - // - // historical + // + // historical // block block horizon block of change // =====N=================================H=================|=========> // ^ ^ - // | | + // | | // ----------------------------------- // minimum delay // - // b) It could be fewer than `minimum_delay` blocks away from the historical block number, in which case - // the block of change would become the limiting factor for the time horizon, which would equal the - // block right before the block of change (since by definition the value changes at the block of + // b) It could be fewer than `minimum_delay` blocks away from the historical block number, in which case + // the block of change would become the limiting factor for the time horizon, which would equal the + // block right before the block of change (since by definition the value changes at the block of // change). // // historical block horizon // block block of change if not scheduled // =======N=============|===================H=================> // ^ ^ ^ - // | actual horizon | + // | actual horizon | // ----------------------------------- - // minimum delay - // + // minimum delay + // // Note that the current implementation does not allow the caller to set the block of change to an arbitrary - // value, and therefore scenario a) is not currently possible. However implementing #5501 would allow for + // value, and therefore scenario a) is not currently possible. However implementing #5501 would allow for // this to happen. // Because historical_block_number < self.block_of_change, then block_of_change > 0 and we can safely @@ -113,8 +118,8 @@ impl ScheduledValueChange { } } - /// Mutates a scheduled value change by scheduling a change at the current block number. This function is only - /// meaningful when called in public with the current block number. + /// Mutates the value by scheduling a change at the current block number. This function is only meaningful when + /// called in public with the current block number. pub fn schedule_change( &mut self, new_value: T, @@ -138,42 +143,45 @@ impl Serialize<3> for ScheduledValueChange { impl Deserialize<3> for ScheduledValueChange { fn deserialize(input: [Field; 3]) -> Self where T: FromField { - Self { - pre: FromField::from_field(input[0]), - post: FromField::from_field(input[1]), + Self { + pre: FromField::from_field(input[0]), + post: FromField::from_field(input[1]), block_of_change: FromField::from_field(input[2]), } } } -fn min(lhs: u32, rhs: u32) -> u32 { - if lhs < rhs { lhs } else { rhs } -} - -#[test] -fn test_min() { - assert(min(3, 5) == 3); - assert(min(5, 3) == 3); - assert(min(3, 3) == 3); -} - mod test { use crate::state_vars::shared_mutable::scheduled_value_change::ScheduledValueChange; global TEST_DELAY: u32 = 200; + #[test] + fn test_serde() { + let pre = 1; + let post = 2; + let block_of_change = 50; + + let original = ScheduledValueChange::new(pre, post, block_of_change); + let converted = ScheduledValueChange::deserialize((original).serialize()); + + assert_eq(original.pre, converted.pre); + assert_eq(original.post, converted.post); + assert_eq(original.block_of_change, converted.block_of_change); + } + #[test] fn test_get_current_at() { let pre = 1; let post = 2; let block_of_change = 50; - let value: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); + let value_change: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); - assert_eq(value.get_current_at(0), pre); - assert_eq(value.get_current_at(block_of_change - 1), pre); - assert_eq(value.get_current_at(block_of_change), post); - assert_eq(value.get_current_at(block_of_change + 1), post); + assert_eq(value_change.get_current_at(0), pre); + assert_eq(value_change.get_current_at(block_of_change - 1), pre); + assert_eq(value_change.get_current_at(block_of_change), post); + assert_eq(value_change.get_current_at(block_of_change + 1), post); } #[test] @@ -182,34 +190,34 @@ mod test { let post = 2; let block_of_change = 50; - let value: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); + let value_change: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); - assert_eq(value.get_scheduled(), (post, block_of_change)); + assert_eq(value_change.get_scheduled(), (post, block_of_change)); } fn assert_block_horizon_invariants( - value: &mut ScheduledValueChange, + value_change: &mut ScheduledValueChange, historical_block_number: u32, block_horizon: u32 ) { // The current value should not change at the block horizon (but it might later). - let current_at_historical = value.get_current_at(historical_block_number); - assert_eq(current_at_historical, value.get_current_at(block_horizon)); + let current_at_historical = value_change.get_current_at(historical_block_number); + assert_eq(current_at_historical, value_change.get_current_at(block_horizon)); // The earliest a new change could be scheduled in would be the immediate next block to the historical one. This // should result in the new block of change landing *after* the block horizon, and the current value still not // changing at the previously determined block_horizon. - let new = value.pre + value.post; // Make sure it's different to both pre and post - value.schedule_change( + let new = value_change.pre + value_change.post; // Make sure it's different to both pre and post + value_change.schedule_change( new, historical_block_number + 1, TEST_DELAY, historical_block_number + 1 + TEST_DELAY ); - assert(value.block_of_change > block_horizon); - assert_eq(current_at_historical, value.get_current_at(block_horizon)); + assert(value_change.block_of_change > block_horizon); + assert_eq(current_at_historical, value_change.get_current_at(block_horizon)); } #[test] @@ -217,12 +225,12 @@ mod test { let historical_block_number = 100; let block_of_change = 50; - let mut value: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); - let block_horizon = value.get_block_horizon(historical_block_number, TEST_DELAY); + let block_horizon = value_change.get_block_horizon(historical_block_number, TEST_DELAY); assert_eq(block_horizon, historical_block_number + TEST_DELAY); - assert_block_horizon_invariants(&mut value, historical_block_number, block_horizon); + assert_block_horizon_invariants(&mut value_change, historical_block_number, block_horizon); } #[test] @@ -230,12 +238,12 @@ mod test { let historical_block_number = 100; let block_of_change = 100; - let mut value: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); - let block_horizon = value.get_block_horizon(historical_block_number, TEST_DELAY); + let block_horizon = value_change.get_block_horizon(historical_block_number, TEST_DELAY); assert_eq(block_horizon, historical_block_number + TEST_DELAY); - assert_block_horizon_invariants(&mut value, historical_block_number, block_horizon); + assert_block_horizon_invariants(&mut value_change, historical_block_number, block_horizon); } #[test] @@ -243,15 +251,15 @@ mod test { let historical_block_number = 100; let block_of_change = 120; - let mut value: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); // Note that this is the only scenario in which the block of change informs the block horizon. // This may result in privacy leaks when interacting with applications that have a scheduled change // in the near future. - let block_horizon = value.get_block_horizon(historical_block_number, TEST_DELAY); + let block_horizon = value_change.get_block_horizon(historical_block_number, TEST_DELAY); assert_eq(block_horizon, block_of_change - 1); - assert_block_horizon_invariants(&mut value, historical_block_number, block_horizon); + assert_block_horizon_invariants(&mut value_change, historical_block_number, block_horizon); } #[test] @@ -259,25 +267,38 @@ mod test { let historical_block_number = 100; let block_of_change = 500; - let mut value: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); - let block_horizon = value.get_block_horizon(historical_block_number, TEST_DELAY); + let block_horizon = value_change.get_block_horizon(historical_block_number, TEST_DELAY); assert_eq(block_horizon, historical_block_number + TEST_DELAY); - assert_block_horizon_invariants(&mut value, historical_block_number, block_horizon); + assert_block_horizon_invariants(&mut value_change, historical_block_number, block_horizon); } #[test] - fn test_schedule_change_before_prior_change() { + fn test_get_block_horizon_n0_delay() { + let historical_block_number = 100; + let block_of_change = 50; + + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(1, 2, block_of_change); + + let block_horizon = value_change.get_block_horizon(historical_block_number, 0); + // Since the block horizon equals the historical block number, it is not possible to read the current value in + // private since the transaction `max_block_number` property would equal an already mined block. + assert_eq(block_horizon, historical_block_number); + } + + #[test] + fn test_schedule_change_before_change() { let pre = 1; let post = 2; let block_of_change = 500; - let mut value: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); let new = 42; let current_block_number = block_of_change - 50; - value.schedule_change( + value_change.schedule_change( new, current_block_number, TEST_DELAY, @@ -285,30 +306,48 @@ mod test { ); // Because we re-schedule before the last scheduled change takes effect, the old `post` value is lost. - assert_eq(value.pre, pre); - assert_eq(value.post, new); - assert_eq(value.block_of_change, current_block_number + TEST_DELAY); + assert_eq(value_change.pre, pre); + assert_eq(value_change.post, new); + assert_eq(value_change.block_of_change, current_block_number + TEST_DELAY); } #[test] - fn test_schedule_change_after_prior_change() { + fn test_schedule_change_after_change() { let pre = 1; let post = 2; let block_of_change = 500; - let mut value: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); let new = 42; let current_block_number = block_of_change + 50; - value.schedule_change( + value_change.schedule_change( new, current_block_number, TEST_DELAY, current_block_number + TEST_DELAY ); - assert_eq(value.pre, post); - assert_eq(value.post, new); - assert_eq(value.block_of_change, current_block_number + TEST_DELAY); + assert_eq(value_change.pre, post); + assert_eq(value_change.post, new); + assert_eq(value_change.block_of_change, current_block_number + TEST_DELAY); + } + + #[test] + fn test_schedule_change_no_delay() { + let pre = 1; + let post = 2; + let block_of_change = 500; + + let mut value_change: ScheduledValueChange = ScheduledValueChange::new(pre, post, block_of_change); + + let new = 42; + let current_block_number = block_of_change + 50; + value_change.schedule_change(new, current_block_number, 0, current_block_number); + + assert_eq(value_change.pre, post); + assert_eq(value_change.post, new); + assert_eq(value_change.block_of_change, current_block_number); + assert_eq(value_change.get_current_at(current_block_number), new); } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr index 8ab974c7389..a36bda7e6cb 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr @@ -3,17 +3,24 @@ use dep::protocol_types::{hash::pedersen_hash, traits::FromField}; use crate::context::{PrivateContext, PublicContext, Context}; use crate::history::public_storage::public_storage_historical_read; use crate::public_storage; -use crate::state_vars::{storage::Storage, shared_mutable::scheduled_value_change::ScheduledValueChange}; +use crate::state_vars::{ + storage::Storage, + shared_mutable::{scheduled_value_change::ScheduledValueChange, scheduled_delay_change::ScheduledDelayChange} +}; -struct SharedMutable { +struct SharedMutable { context: Context, storage_slot: Field, - // The _dummy variable forces DELAY to be interpreted as a numberic value. This is a workaround to - // https://github.com/noir-lang/noir/issues/4633. Remove once resolved. - _dummy: [Field; DELAY], } -impl Storage for SharedMutable {} +// This will make the Aztec macros require that T implements the Serialize trait, and allocate N storage slots to +// this state variable. This is incorrect, since what we actually store is: +// - a ScheduledValueChange, which requires 1 + 2 * M storage slots, where M is the serialization length of T +// - a ScheduledDelayChange, which requires another storage slot +// +// TODO https://github.com/AztecProtocol/aztec-packages/issues/5736: change the storage allocation scheme so that we +// can actually use it here +impl Storage for SharedMutable {} // SharedMutable stores a value of type T that is: // - publicly known (i.e. unencrypted) @@ -24,79 +31,139 @@ impl Storage for SharedMutable {} // the value is not changed immediately but rather a value change is scheduled to happen in the future after some delay // measured in blocks. Reads in private are only valid as long as they are included in a block not too far into the // future, so that they can guarantee the value will not have possibly changed by then (because of the delay). -impl SharedMutable { +// The delay for changing a value is initially equal to INITIAL_DELAY, but can be changed by calling +// `schedule_delay_change`. +impl SharedMutable { pub fn new(context: Context, storage_slot: Field) -> Self { assert(storage_slot != 0, "Storage slot 0 not allowed. Storage slots must start from 1."); - Self { context, storage_slot, _dummy: [0; DELAY] } + Self { context, storage_slot } } pub fn schedule_value_change(self, new_value: T) { let context = self.context.public.unwrap(); - let mut scheduled_value_change: ScheduledValueChange = public_storage::read(self.get_derived_storage_slot()); + let mut value_change = self.read_value_change(); + let delay_change = self.read_delay_change(); let block_number = context.block_number() as u32; + let current_delay = delay_change.get_current(block_number); + // TODO: make this configurable // https://github.com/AztecProtocol/aztec-packages/issues/5501 - let block_of_change = block_number + DELAY; + let block_of_change = block_number + current_delay; + value_change.schedule_change(new_value, block_number, current_delay, block_of_change); + + self.write_value_change(value_change); + } + + pub fn schedule_delay_change(self, new_delay: u32) { + let context = self.context.public.unwrap(); + let mut delay_change = self.read_delay_change(); + + let block_number = context.block_number() as u32; - scheduled_value_change.schedule_change(new_value, block_number, DELAY, block_of_change); + delay_change.schedule_change(new_delay, block_number); - public_storage::write(self.get_derived_storage_slot(), scheduled_value_change); + self.write_delay_change(delay_change); } pub fn get_current_value_in_public(self) -> T { - let scheduled_value_change: ScheduledValueChange = public_storage::read(self.get_derived_storage_slot()); + let block_number = self.context.public.unwrap().block_number() as u32; + self.read_value_change().get_current_at(block_number) + } + pub fn get_current_delay_in_public(self) -> u32 { let block_number = self.context.public.unwrap().block_number() as u32; - scheduled_value_change.get_current_at(block_number) + self.read_delay_change().get_current(block_number) } pub fn get_scheduled_value_in_public(self) -> (T, u32) { - let scheduled_value_change: ScheduledValueChange = public_storage::read(self.get_derived_storage_slot()); - scheduled_value_change.get_scheduled() + self.read_value_change().get_scheduled() + } + + pub fn get_scheduled_delay_in_public(self) -> (u32, u32) { + self.read_delay_change().get_scheduled() } pub fn get_current_value_in_private(self) -> T where T: FromField { let mut context = self.context.private.unwrap(); - let (scheduled_value_change, historical_block_number) = self.historical_read_from_public_storage(*context); - let block_horizon = scheduled_value_change.get_block_horizon(historical_block_number, DELAY); + // When reading the current value in private we construct a historical state proof for the public value. + // However, since this value might change, we must constrain the maximum transaction block number as this proof + // will only be valid for however many blocks we can ensure the value will not change, which will depend on the + // current delay and any scheduled delay changes. + + let (value_change, delay_change, historical_block_number) = self.historical_read_from_public_storage(*context); + + // We use the effective minimum delay as opposed to the current delay at the historical block as this one also + // takes into consideration any scheduled delay changes. + // For example, consider a scenario in which at block 200 the current delay was 50. We may naively think that + // the earliest we could change the value would be at block 251 by scheduling immediately after the historical + // block, i.e. at block 201. But if there was a delay change scheduled for block 210 to reduce the delay to 20 + // blocks, then if a value change was scheduled at block 210 it would go into effect at block 230, which is + // earlier than what we'd expect if we only considered the current delay. + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + let block_horizon = value_change.get_block_horizon(historical_block_number, effective_minimum_delay); // We prevent this transaction from being included in any block after the block horizon, ensuring that the // historical public value matches the current one, since it can only change after the horizon. context.set_tx_max_block_number(block_horizon); - scheduled_value_change.get_current_at(historical_block_number) + value_change.get_current_at(historical_block_number) } fn historical_read_from_public_storage( self, context: PrivateContext - ) -> (ScheduledValueChange, u32) where T: FromField { - let derived_slot = self.get_derived_storage_slot(); - + ) -> (ScheduledValueChange, ScheduledDelayChange, u32) where T: FromField { // Ideally the following would be simply public_storage::read_historical, but we can't implement that yet. - let mut raw_fields = [0; 3]; + let value_change_slot = self.get_value_change_storage_slot(); + let mut raw_value_change_fields = [0; 3]; for i in 0..3 { - raw_fields[i] = public_storage_historical_read( + raw_value_change_fields[i] = public_storage_historical_read( context, - derived_slot + i as Field, + value_change_slot + i as Field, context.this_address() ); } - let scheduled_value: ScheduledValueChange = ScheduledValueChange::deserialize(raw_fields); + // Ideally the following would be simply public_storage::read_historical, but we can't implement that yet. + let delay_change_slot = self.get_delay_change_storage_slot(); + let raw_delay_change_fields = [public_storage_historical_read(context, delay_change_slot, context.this_address())]; + + let value_change = ScheduledValueChange::deserialize(raw_value_change_fields); + let delay_change = ScheduledDelayChange::deserialize(raw_delay_change_fields); + let historical_block_number = context.historical_header.global_variables.block_number as u32; - (scheduled_value, historical_block_number) + (value_change, delay_change, historical_block_number) + } + + fn read_value_change(self) -> ScheduledValueChange { + public_storage::read(self.get_value_change_storage_slot()) + } + + fn read_delay_change(self) -> ScheduledDelayChange { + public_storage::read(self.get_delay_change_storage_slot()) + } + + fn write_value_change(self, value_change: ScheduledValueChange) { + public_storage::write(self.get_value_change_storage_slot(), value_change); + } + + fn write_delay_change(self, delay_change: ScheduledDelayChange) { + public_storage::write(self.get_delay_change_storage_slot(), delay_change); } - fn get_derived_storage_slot(self) -> Field { - // Since we're actually storing three values (a ScheduledValueChange struct), we hash the storage slot to get a - // unique location in which we can safely store as much data as we need. This could be removed if we informed - // the slot allocator of how much space we need so that proper padding could be added. - // See https://github.com/AztecProtocol/aztec-packages/issues/5492 + // Since we can't rely on the native storage allocation scheme, we hash the storage slot to get a unique location in + // which we can safely store as much data as we need. + // See https://github.com/AztecProtocol/aztec-packages/issues/5492 and + // https://github.com/AztecProtocol/aztec-packages/issues/5736 + fn get_value_change_storage_slot(self) -> Field { pedersen_hash([self.storage_slot, 0], 0) } + + fn get_delay_change_storage_slot(self) -> Field { + pedersen_hash([self.storage_slot, 1], 0) + } } mod test { @@ -104,7 +171,10 @@ mod test { use crate::{ context::{PublicContext, PrivateContext, Context}, - state_vars::shared_mutable::shared_mutable::SharedMutable, + state_vars::shared_mutable::{ + shared_mutable::SharedMutable, scheduled_value_change::ScheduledValueChange, + scheduled_delay_change::ScheduledDelayChange + }, oracle::get_public_data_witness::PublicDataWitness }; @@ -113,12 +183,22 @@ mod test { address::AztecAddress, public_data_tree_leaf_preimage::PublicDataTreeLeafPreimage }; - fn setup(private: bool) -> (SharedMutable, Field) { + global pre_value = 13; + global post_value = 42; + + global new_value = 57; + + global pre_delay = 20; + global post_delay = 15; + + global TEST_INITIAL_DELAY = 3; + + fn setup(private: bool) -> (SharedMutable, Field) { let block_number = 40; let context = create_context(block_number, private); let storage_slot = 57; - let state_var: SharedMutable = SharedMutable::new(context, storage_slot); + let state_var: SharedMutable = SharedMutable::new(context, storage_slot); (state_var, block_number) } @@ -135,129 +215,333 @@ mod test { } } - global TEST_DELAY = 20; + fn mock_value_change_read( + state_var: SharedMutable, + pre: Field, + post: Field, + block_of_change: Field + ) { + let value_change_slot = state_var.get_value_change_storage_slot(); + let fields = ScheduledValueChange::new(pre, post, block_of_change as u32).serialize(); + + let _ = OracleMock::mock("storageRead").with_params((value_change_slot, 3)).returns(fields).times(1); + } + + fn mock_delay_change_read( + state_var: SharedMutable, + pre: Field, + post: Field, + block_of_change: Field + ) { + let delay_change_slot = state_var.get_delay_change_storage_slot(); + let delay_change: ScheduledDelayChange = ScheduledDelayChange::new( + Option::some(pre as u32), + Option::some(post as u32), + block_of_change as u32 + ); + let fields = delay_change.serialize(); + + let _ = OracleMock::mock("storageRead").with_params((delay_change_slot, 1)).returns(fields).times(1); + } + + fn mock_delay_change_read_uninitialized(state_var: SharedMutable) { + let delay_change_slot = state_var.get_delay_change_storage_slot(); + let _ = OracleMock::mock("storageRead").with_params((delay_change_slot, 1)).returns([0]).times(1); + } + + // Useful since change and delay values are always the global pre/post ones, so we typically only care about their + // block of change. + fn mock_value_and_delay_read( + state_var: SharedMutable, + value_block_of_change: Field, + delay_block_of_change: Field + ) { + mock_value_change_read(state_var, pre_value, post_value, value_block_of_change); + mock_delay_change_read(state_var, pre_delay, post_delay, delay_block_of_change); + } + + fn mock_value_change_write() -> OracleMock { + OracleMock::mock("storageWrite").returns([0; 3]) + } + + fn mock_delay_change_write() -> OracleMock { + OracleMock::mock("storageWrite").returns([0; 1]) + } + + fn assert_value_change_write( + state_var: SharedMutable, + mock: OracleMock, + pre: Field, + post: Field, + block_of_change: Field + ) { + let fields = ScheduledValueChange::new(pre, post, block_of_change as u32).serialize(); + assert_eq(mock.get_last_params(), (state_var.get_value_change_storage_slot(), fields)); + } - global pre = 13; - global post = 42; + fn assert_delay_change_write( + state_var: SharedMutable, + mock: OracleMock, + pre: Field, + post: Field, + block_of_change: Field + ) { + let delay_change: ScheduledDelayChange = ScheduledDelayChange::new( + Option::some(pre as u32), + Option::some(post as u32), + block_of_change as u32 + ); + + let fields = delay_change.serialize(); + assert_eq(mock.get_last_params(), (state_var.get_delay_change_storage_slot(), fields)); + } #[test] - fn test_get_current_value_in_public_before_change() { + fn test_get_current_value_in_public() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); - // Change in the future, current value is pre - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number + 1]); - assert_eq(state_var.get_current_value_in_public(), pre); + mock_value_change_read(state_var, pre_value, post_value, block_number + 1); + assert_eq(state_var.get_current_value_in_public(), pre_value); + + // Change in the current block, current value is post + mock_value_change_read(state_var, pre_value, post_value, block_number); + assert_eq(state_var.get_current_value_in_public(), post_value); + + // Change in the past, current value is post + mock_value_change_read(state_var, pre_value, post_value, block_number - 1); + assert_eq(state_var.get_current_value_in_public(), post_value); } #[test] - fn test_get_current_value_in_public_at_change() { + fn test_get_scheduled_value_in_public() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Change in the future, scheduled is post (always is) + mock_value_change_read(state_var, pre_value, post_value, block_number + 1); + assert_eq(state_var.get_scheduled_value_in_public(), (post_value, (block_number + 1) as u32)); - // Change in the current block, current value is post - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number]); - assert_eq(state_var.get_current_value_in_public(), post); + // Change in the current block, scheduled is post (always is) + mock_value_change_read(state_var, pre_value, post_value, block_number); + assert_eq(state_var.get_scheduled_value_in_public(), (post_value, block_number as u32)); + + // Change in the past, scheduled is post (always is) + mock_value_change_read(state_var, pre_value, post_value, block_number - 1); + assert_eq(state_var.get_scheduled_value_in_public(), (post_value, (block_number - 1) as u32)); } #[test] - fn test_get_current_value_in_public_after_change() { + fn test_get_current_delay_in_public() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Uninitialized + mock_delay_change_read_uninitialized(state_var); + assert_eq(state_var.get_current_delay_in_public(), TEST_INITIAL_DELAY as u32); + + // Change in the future, current value is pre + mock_delay_change_read(state_var, pre_delay, post_delay, block_number + 1); + assert_eq(state_var.get_current_delay_in_public(), pre_delay as u32); + + // Change in the current block, current value is post + mock_delay_change_read(state_var, pre_delay, post_delay, block_number); + assert_eq(state_var.get_current_delay_in_public(), post_delay as u32); // Change in the past, current value is post - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number - 1]); - assert_eq(state_var.get_current_value_in_public(), post); + mock_delay_change_read(state_var, pre_delay, post_delay, block_number - 1); + assert_eq(state_var.get_current_delay_in_public(), post_delay as u32); } #[test] - fn test_get_scheduled_value_in_public_before_change() { + fn test_get_scheduled_delay_in_public_before_change() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Uninitialized + mock_delay_change_read_uninitialized(state_var); + assert_eq(state_var.get_scheduled_delay_in_public(), (TEST_INITIAL_DELAY as u32, 0)); // Change in the future, scheduled is post (always is) - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number + 1]); - assert_eq(state_var.get_scheduled_value_in_public(), (post, (block_number + 1) as u32)); + mock_delay_change_read(state_var, pre_delay, post_delay, block_number + 1); + assert_eq(state_var.get_scheduled_delay_in_public(), (post_delay as u32, (block_number + 1) as u32)); + + // Change in the current block, scheduled is post (always is) + mock_delay_change_read(state_var, pre_delay, post_delay, block_number); + assert_eq(state_var.get_scheduled_delay_in_public(), (post_delay as u32, block_number as u32)); + + // Change in the past, scheduled is post (always is) + mock_delay_change_read(state_var, pre_delay, post_delay, block_number - 1); + assert_eq(state_var.get_scheduled_delay_in_public(), (post_delay as u32, (block_number - 1) as u32)); } #[test] - fn test_get_scheduled_value_in_public_at_change() { + fn test_schedule_value_change_no_delay() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Last value change was in the past + mock_value_change_read(state_var, pre_value, post_value, 0); - // Change in the current block, scheduled is post (always is) - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number]); - assert_eq(state_var.get_scheduled_value_in_public(), (post, block_number as u32)); + // Current delay is 0 + mock_delay_change_read(state_var, 0, 0, block_number); + + let write_mock = mock_value_change_write(); + + state_var.schedule_value_change(new_value); + + // The new value has a block of change equal to the current block, i.e. it is the current value + assert_value_change_write(state_var, write_mock, post_value, new_value, block_number); } #[test] - fn test_get_scheduled_value_in_public_after_change() { + fn test_schedule_value_change_before_change_no_scheduled_delay() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Value change in the future, delay change in the past + mock_value_and_delay_read(state_var, block_number + 1, block_number - 1); + let write_mock = mock_value_change_write(); - // Change in the past, scheduled is post (always is) - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number - 1]); - assert_eq(state_var.get_scheduled_value_in_public(), (post, (block_number - 1) as u32)); + state_var.schedule_value_change(new_value); + + // The new scheduled value change replaces the old one, post delay (current) is used + assert_value_change_write( + state_var, + write_mock, + pre_value, + new_value, + block_number + post_delay + ); } #[test] - fn test_schedule_value_change_before_change() { + fn test_schedule_value_change_before_change_scheduled_delay() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Value change in the future, delay change in the future + mock_value_and_delay_read(state_var, block_number + 1, block_number + 1); - // Change in the future - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number + 1]); + let write_mock = mock_value_change_write(); - let write_mock = OracleMock::mock("storageWrite").returns([0; 3]); // The oracle return value is actually unused - - let new_value = 42; state_var.schedule_value_change(new_value); - // The new scheduled change replaces the old one - assert_eq(write_mock.get_last_params(), (slot, [pre, new_value, block_number + TEST_DELAY])); + // The new scheduled value change replaces the old one, pre delay (current, not scheduled) is used + assert_value_change_write( + state_var, + write_mock, + pre_value, + new_value, + block_number + pre_delay + ); } #[test] - fn test_schedule_value_change_at_change() { + fn test_schedule_value_change_after_change_no_scheduled_delay() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Value change in the past, delay change in the past + mock_value_and_delay_read(state_var, block_number - 1, block_number - 1); + let write_mock = mock_value_change_write(); + + state_var.schedule_value_change(new_value); + + // The previous post value becomes the pre value, post delay (current) is used + assert_value_change_write( + state_var, + write_mock, + post_value, + new_value, + block_number + post_delay + ); + } + + #[test] + fn test_schedule_value_change_after_change_scheduled_delay() { + let (state_var, block_number) = setup(false); - // Change in the current block - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number]); + // Value change in the past, delay change in the future + mock_value_and_delay_read(state_var, block_number - 1, block_number + 1); - let write_mock = OracleMock::mock("storageWrite").returns([0; 3]); // The oracle return value is actually unused + let write_mock = mock_value_change_write(); - let new_value = 42; state_var.schedule_value_change(new_value); - // The previous 'post' value is the current one and becomes the 'pre' value - assert_eq(write_mock.get_last_params(), (slot, [post, new_value, block_number + TEST_DELAY])); + // The previous post value becomes the pre value, pre delay (current, not scheduled) is used + assert_value_change_write( + state_var, + write_mock, + post_value, + new_value, + block_number + pre_delay + ); } #[test] - fn test_schedule_value_change_after_change() { + fn test_schedule_delay_increase_before_change() { let (state_var, block_number) = setup(false); - let slot = state_var.get_derived_storage_slot(); + // Delay change in future, current delay is pre + mock_delay_change_read(state_var, pre_delay, post_delay, block_number + 1); + let write_mock = mock_delay_change_write(); - // Change in the past - OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number - 1]); + let new_delay = pre_delay + 1; + state_var.schedule_delay_change(new_delay as u32); - let write_mock = OracleMock::mock("storageWrite").returns([0; 3]); // The oracle return value is actually unused + // The previous scheduled change is lost, change is immediate (due to increase) + assert_delay_change_write(state_var, write_mock, pre_delay, new_delay, block_number); + } - let new_value = 42; - state_var.schedule_value_change(new_value); + #[test] + fn test_schedule_delay_reduction_before_change() { + let (state_var, block_number) = setup(false); + + // Delay change in future, current delay is pre + mock_delay_change_read(state_var, pre_delay, post_delay, block_number + 1); + let write_mock = mock_delay_change_write(); + + let new_delay = pre_delay - 1; + state_var.schedule_delay_change(new_delay as u32); + + // The previous scheduled change is lost, change delay equals difference (due to reduction) + assert_delay_change_write( + state_var, + write_mock, + pre_delay, + new_delay, + block_number + pre_delay - new_delay + ); + } + + #[test] + fn test_schedule_delay_increase_after_change() { + let (state_var, block_number) = setup(false); + + // Delay change in the past, current delay is post + mock_delay_change_read(state_var, pre_delay, post_delay, block_number - 1); + let write_mock = mock_delay_change_write(); + + let new_delay = post_delay + 1; + state_var.schedule_delay_change(new_delay as u32); + + // The current value becomes pre, change is immediate (due to increase) + assert_delay_change_write(state_var, write_mock, post_delay, new_delay, block_number); + } + + #[test] + fn test_schedule_delay_reduction_after_change() { + let (state_var, block_number) = setup(false); - // The previous 'post' value is the current one and becomes the 'pre' value - assert_eq(write_mock.get_last_params(), (slot, [post, new_value, block_number + TEST_DELAY])); + // Delay change in the past, current delay is post + mock_delay_change_read(state_var, pre_delay, post_delay, block_number - 1); + let write_mock = mock_delay_change_write(); + + let new_delay = post_delay - 1; + state_var.schedule_delay_change(new_delay as u32); + + // The current value becomes pre, change delay equals difference (due to reduction) + assert_delay_change_write( + state_var, + write_mock, + post_delay, + new_delay, + block_number + post_delay - new_delay + ); } #[test] diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr index d4e9ddb6bd2..7da8f1524fc 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr @@ -3,22 +3,25 @@ use dep::protocol_types::{hash::pedersen_hash, traits::FromField, address::Aztec use crate::context::{PrivateContext, Context}; use crate::history::public_storage::public_storage_historical_read; use crate::public_storage; -use crate::state_vars::{storage::Storage, shared_mutable::scheduled_value_change::ScheduledValueChange}; +use crate::state_vars::{ + storage::Storage, + shared_mutable::{scheduled_delay_change::ScheduledDelayChange, scheduled_value_change::ScheduledValueChange} +}; -struct SharedMutablePrivateGetter { +struct SharedMutablePrivateGetter { context: PrivateContext, // The contract address of the contract we want to read from other_contract_address: AztecAddress, // The storage slot where the SharedMutable is stored on the other contract storage_slot: Field, - // The _dummy variable forces DELAY to be interpreted as a numberic value. This is a workaround to + // The _dummy variable forces INITIAL_DELAY to be interpreted as a numberic value. This is a workaround to // https://github.com/noir-lang/noir/issues/4633. Remove once resolved. - _dummy: [Field; DELAY], + _dummy: [Field; INITIAL_DELAY], } // We have this as a view-only interface to reading Shared Mutables in other contracts. // Currently the Shared Mutable does not support this. We can adapt SharedMutable at a later date -impl SharedMutablePrivateGetter { +impl SharedMutablePrivateGetter { pub fn new( context: PrivateContext, other_contract_address: AztecAddress, @@ -26,48 +29,50 @@ impl SharedMutablePrivateGetter { ) -> Self { assert(storage_slot != 0, "Storage slot 0 not allowed. Storage slots must start from 1."); assert(other_contract_address.to_field() != 0, "Other contract address cannot be 0"); - Self { context, other_contract_address, storage_slot, _dummy: [0; DELAY] } + Self { context, other_contract_address, storage_slot, _dummy: [0; INITIAL_DELAY] } } pub fn get_current_value_in_private(self) -> T where T: FromField { let mut context = self.context; - let (scheduled_value_change, historical_block_number) = self.historical_read_from_public_storage(context); - let block_horizon = scheduled_value_change.get_block_horizon(historical_block_number, DELAY); + let (value_change, delay_change, historical_block_number) = self.historical_read_from_public_storage(context); + let effective_minimum_delay = delay_change.get_effective_minimum_delay_at(historical_block_number); + let block_horizon = value_change.get_block_horizon(historical_block_number, effective_minimum_delay); - // We prevent this transaction from being included in any block after the block horizon, ensuring that the - // historical public value matches the current one, since it can only change after the horizon. context.set_tx_max_block_number(block_horizon); - scheduled_value_change.get_current_at(historical_block_number) + value_change.get_current_at(historical_block_number) } fn historical_read_from_public_storage( self, context: PrivateContext - ) -> (ScheduledValueChange, u32) where T: FromField { - let derived_slot = self.get_derived_storage_slot(); - - // Ideally the following would be simply public_storage::read_historical, but we can't implement that yet. - let mut raw_fields = [0; 3]; + ) -> (ScheduledValueChange, ScheduledDelayChange, u32) where T: FromField { + let value_change_slot = self.get_value_change_storage_slot(); + let mut raw_value_change_fields = [0; 3]; for i in 0..3 { - raw_fields[i] = public_storage_historical_read( - context, - derived_slot + i as Field, - self.other_contract_address - ); + raw_value_change_fields[i] = public_storage_historical_read( + context, + value_change_slot + i as Field, + self.other_contract_address + ); } - let scheduled_value: ScheduledValueChange = ScheduledValueChange::deserialize(raw_fields); + let delay_change_slot = self.get_delay_change_storage_slot(); + let raw_delay_change_fields = [public_storage_historical_read(context, delay_change_slot, context.this_address())]; + + let value_change = ScheduledValueChange::deserialize(raw_value_change_fields); + let delay_change = ScheduledDelayChange::deserialize(raw_delay_change_fields); + let historical_block_number = context.historical_header.global_variables.block_number as u32; - (scheduled_value, historical_block_number) + (value_change, delay_change, historical_block_number) } - fn get_derived_storage_slot(self) -> Field { - // Since we're actually storing three values (a ScheduledValueChange struct), we hash the storage slot to get a - // unique location in which we can safely store as much data as we need. This could be removed if we informed - // the slot allocator of how much space we need so that proper padding could be added. - // See https://github.com/AztecProtocol/aztec-packages/issues/5492 + fn get_value_change_storage_slot(self) -> Field { pedersen_hash([self.storage_slot, 0], 0) } + + fn get_delay_change_storage_slot(self) -> Field { + pedersen_hash([self.storage_slot, 1], 0) + } } diff --git a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr index 0de4f7c2093..deb5e34315e 100644 --- a/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/auth_contract/src/main.nr @@ -21,8 +21,6 @@ contract Auth { fn constructor(admin: AztecAddress) { assert(!admin.is_zero(), "invalid admin"); storage.admin.initialize(admin); - // Note that we don't initialize authorized with any value: because storage defaults to 0 it'll have a 'post' - // value of 0 and block of change 0, meaning it is effectively autoinitialized at the zero address. } // docs:start:shared_mutable_schedule From dba835d1a1c6214cf4a4c2a62e4bcee49bf83e10 Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 9 May 2024 13:44:04 -0400 Subject: [PATCH 31/43] chore(ci): use on-demand runners (#6311) until a better solution with retrying evicted spot [skip ci] --- .github/workflows/ci.yml | 2 +- .github/workflows/start-spot.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7a6fcbe4485..a0b41c1a6a6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge ec2_ami_id: ami-04d8422a9ba4de80f - ec2_spot_instance_strategy: BestEffort + ec2_spot_instance_strategy: None ec2_instance_ttl: 40 # refreshed by jobs secrets: inherit diff --git a/.github/workflows/start-spot.yml b/.github/workflows/start-spot.yml index eb13f205cb4..dbb5ab0626d 100644 --- a/.github/workflows/start-spot.yml +++ b/.github/workflows/start-spot.yml @@ -21,6 +21,7 @@ jobs: # Use instance types with low interruption rate in us-east-2 https://aws.amazon.com/ec2/spot/instance-advisor/ ec2_instance_type: r6in.32xlarge r6a.32xlarge i4i.32xlarge r7iz.32xlarge ec2_ami_id: ami-04d8422a9ba4de80f + ec2_spot_instance_strategy: None ec2_instance_ttl: 40 # refreshed by jobs secrets: inherit From 4c9bfb040c667da1e5ebff06ed55864a8a7094ed Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 9 May 2024 14:53:42 -0400 Subject: [PATCH 32/43] chore(ci): revert inline cache push for now (#6318) [ci skip] --- scripts/earthly-ci | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/earthly-ci b/scripts/earthly-ci index fe625d87015..7810489502a 100755 --- a/scripts/earthly-ci +++ b/scripts/earthly-ci @@ -26,10 +26,11 @@ MAX_ATTEMPTS=3 ATTEMPT_COUNT=0 export EARTHLY_USE_INLINE_CACHE=true -if [ "$GITHUB_REF_NAME" == "master" ]; then - export EARTHLY_SAVE_INLINE_CACHE=true - export EARTHLY_PUSH=true -fi +# TODO(AD) to be investigated +#if [ "$GITHUB_REF_NAME" == "master" ]; then +# export EARTHLY_SAVE_INLINE_CACHE=true +# export EARTHLY_PUSH=true +#fi # Handle earthly commands and retries while [ $ATTEMPT_COUNT -lt $MAX_ATTEMPTS ]; do From 553078c5a21159b5c4db0fd5d76a5dae41d94e6a Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Thu, 9 May 2024 14:19:34 -0600 Subject: [PATCH 33/43] feat: process designated teardown function call (#6244) ### Deviations from [the spec](https://docs.aztec.network/protocol-specs/gas-and-fees/kernel-tracking): I needed to create a new stack for processing the teardown calls, instead of storing a single call. I.e. ```diff class PublicKernelCircuitPublicInputs { // ... other fields --- +CallRequest public_teardown_call_request +++ +CallRequest[MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX] public_teardown_call_stack } ``` This is because a teardown function can call a nested function, and, similar to the current design for public/private calls, we need a way to keep track of our execution stack. Further, in order to pass in the CallRequest to the private kernel circuits, I needed to add a new parameter to the PrivateCallData. ### Overview We designate a function to be run for teardown as: ``` context.set_public_teardown_function( context.this_address(), FunctionSelector::from_signature("pay_fee_with_shielded_rebate(Field,(Field),Field)"), [amount, asset.to_field(), secret_hash] ); ``` As I note in a comment, I created #6277 for getting back to something like: ``` FPC::at(context.this_address()).pay_fee_with_shielded_rebate(amount, asset, secret_hash).set_public_teardown_function(&mut context) ``` This sets `publicTeardownFunctionCall: PublicCallRequest` in the encapsulating `ClientExecutionContext`, which defaults to `PublicCallRequest.empty()`. When private simulation is finished, we collect an array of all the public teardown functions that were set during the simulation. We assert that the length of that array is 0 or 1. When proving, we convert the `publicTeardownFunctionCall` to a `CallRequest` if it is not empty, otherwise we use `CallRequest.empty()`. This is specified in the `PrivateCallData` which is passed to the private kernel circuit. In the private kernel circuits, we assert that if the `public_teardown_function_hash` is not zero on the `PrivateCircuitPublicInputs`, then it matches the hash of the `publicTeardownFunctionCall` in the `PrivateCallData`. Further, we assert that if the teardown call request in the `PrivateCallData` is not empty, then the teardown call request from the previous kernel *is* empty. In the private kernel tail, we assert that the public teardown call request is empty. In private kernel tail to public, we initialize the teardown call stack to have the single element corresponding to the call request if it is not empty, and initialize it to an empty array otherwise. Since teardown now has its own stack, we update the logic for how to know when we are in the different phases to simply look at each of their stacks: - setup uses end_non_revertible.public_call_stack - app logic uses end.public_call_stack - teardown uses public_teardown_call_stack ### Note: This does not change the fact that teardown is still non-revertible. That is covered by #5924 --- docs/docs/misc/glossary/call_types.md | 2 +- .../gas-and-fees/kernel-tracking.md | 3 +- .../aztec/src/context/private_context.nr | 1 - .../app_subscription_contract/src/main.nr | 8 +- .../contracts/fpc_contract/src/main.nr | 16 ++- .../contracts/lending_contract/src/main.nr | 2 + .../contracts/test_contract/src/main.nr | 9 ++ .../crates/private-kernel-lib/src/common.nr | 23 ++-- .../kernel_circuit_public_inputs_composer.nr | 5 + ...e_kernel_circuit_public_inputs_composer.nr | 20 +++- .../src/private_kernel_init.nr | 3 +- .../src/private_kernel_inner.nr | 3 +- .../src/private_kernel_tail.nr | 12 ++- .../src/private_kernel_tail_to_public.nr | 19 +++- .../crates/public-kernel-lib/src/common.nr | 47 +++++--- .../src/public_kernel_app_logic.nr | 1 + .../src/public_kernel_setup.nr | 18 +--- .../src/public_kernel_teardown.nr | 13 +-- ...te_kernel_circuit_public_inputs_builder.nr | 9 +- .../public_kernel_circuit_public_inputs.nr | 18 ++-- ...ic_kernel_circuit_public_inputs_builder.nr | 8 +- .../abis/private_kernel/private_call_data.nr | 1 + .../crates/types/src/tests/fixture_builder.nr | 8 +- .../src/tests/private_call_data_builder.nr | 3 + yarn-project/Earthfile | 7 +- yarn-project/circuit-types/src/mocks.ts | 5 +- .../src/structs/kernel/private_call_data.ts | 6 ++ ...ivate_kernel_tail_circuit_public_inputs.ts | 18 ++-- .../public_kernel_circuit_public_inputs.ts | 18 ++-- .../src/structs/public_call_request.ts | 11 ++ .../circuits.js/src/tests/factories.ts | 6 +- .../src/type_conversion.ts | 7 +- .../prover-client/src/mocks/test_context.ts | 6 +- .../prover/bb_prover_public_kernel.test.ts | 5 +- .../src/kernel_prover/kernel_prover.test.ts | 2 + .../pxe/src/kernel_prover/kernel_prover.ts | 6 ++ .../pxe/src/pxe_service/pxe_service.ts | 5 +- yarn-project/sequencer-client/src/config.ts | 6 ++ .../src/sequencer/sequencer.ts | 7 +- .../src/tx_validator/gas_validator.test.ts | 2 +- .../src/tx_validator/gas_validator.ts | 49 +-------- .../src/tx_validator/phases_validator.test.ts | 102 ++++-------------- .../src/tx_validator/phases_validator.ts | 25 ++--- .../src/tx_validator/tx_validator_factory.ts | 8 +- .../simulator/src/acvm/oracle/oracle.ts | 19 ++++ .../simulator/src/acvm/oracle/typed_oracle.ts | 11 ++ .../src/client/client_execution_context.ts | 94 ++++++++++++++-- .../src/client/execution_result.test.ts | 3 +- .../simulator/src/client/execution_result.ts | 23 +++- .../src/client/private_execution.test.ts | 11 ++ .../simulator/src/client/private_execution.ts | 2 + .../src/public/abstract_phase_manager.ts | 26 ++--- .../src/public/public_processor.test.ts | 50 +++++---- 53 files changed, 479 insertions(+), 313 deletions(-) diff --git a/docs/docs/misc/glossary/call_types.md b/docs/docs/misc/glossary/call_types.md index 3de6d61d834..b572426c85c 100644 --- a/docs/docs/misc/glossary/call_types.md +++ b/docs/docs/misc/glossary/call_types.md @@ -112,7 +112,7 @@ Since public execution can only be performed by the sequencer, public functions Since the public call is made asynchronously, any return values or side effects are not available during private execution. If the public function fails once executed, the entire transaction is reverted inncluding state changes caused by the private part, such as new notes or nullifiers. Note that this does result in gas being spent, like in the case of the EVM. -#include_code enqueue_public /noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr rust +#include_code enqueue_public /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust It is also possible to create public functions that can _only_ be invoked by privately enqueing a call from the same contract, which can very useful to update public state after private exection (e.g. update a token's supply after privately minting). This is achieved by annotating functions with `#[aztec(internal)]`. diff --git a/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md b/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md index b80bc71f5b2..0ed89caf481 100644 --- a/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md +++ b/docs/docs/protocol-specs/gas-and-fees/kernel-tracking.md @@ -26,6 +26,7 @@ PrivateContextInputs --> TxContext class PrivateCallData { +PrivateCallStackItem call_stack_item + +CallRequest public_teardown_call_request } PrivateCallData --> PrivateCallStackItem @@ -295,7 +296,7 @@ class PublicKernelCircuitPublicInputs { +PublicAccumulatedData end +CombinedConstantData constants +AztecAddress fee_payer - +CallRequest public_teardown_call_request + +CallRequest[MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX] public_teardown_call_stack +u8 revert_code } PublicKernelCircuitPublicInputs --> PublicAccumulatedData diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 9d7010e3107..f6fe853c123 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -573,7 +573,6 @@ impl PrivateContext { assert(function_selector.eq(item.function_data.selector)); assert_eq(item.public_inputs.call_context.side_effect_counter, self.side_effect_counter); - // We increment the sideffect counter by one, to account for the call itself being a side effect. assert(args_hash == item.public_inputs.args_hash); diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index 2bf04c8628c..e0532007937 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -48,10 +48,12 @@ contract AppSubscription { note.remaining_txs -= 1; storage.subscriptions.at(user_address).replace(&mut note, true); - // docs:start:enqueue_public let gas_limit = storage.gas_token_limit_per_tx.read_private(); - GasToken::at(storage.gas_token_address.read_private()).pay_fee(gas_limit).enqueue(&mut context); - // docs:end:enqueue_public + context.set_public_teardown_function( + storage.gas_token_address.read_private(), + FunctionSelector::from_signature("pay_fee(Field)"), + [gas_limit] + ); context.end_setup(); diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index c877e8c7ff0..878dd0c84a3 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -22,13 +22,25 @@ contract FPC { fn fee_entrypoint_private(amount: Field, asset: AztecAddress, secret_hash: Field, nonce: Field) { assert(asset == storage.other_asset.read_private()); Token::at(asset).unshield(context.msg_sender(), context.this_address(), amount, nonce).call(&mut context); - FPC::at(context.this_address()).pay_fee_with_shielded_rebate(amount, asset, secret_hash).enqueue(&mut context); + // Would like to get back to + // FPC::at(context.this_address()).pay_fee_with_shielded_rebate(amount, asset, secret_hash).set_public_teardown_function(&mut context); + context.set_public_teardown_function( + context.this_address(), + FunctionSelector::from_signature("pay_fee_with_shielded_rebate(Field,(Field),Field)"), + [amount, asset.to_field(), secret_hash] + ); } #[aztec(private)] fn fee_entrypoint_public(amount: Field, asset: AztecAddress, nonce: Field) { FPC::at(context.this_address()).prepare_fee(context.msg_sender(), amount, asset, nonce).enqueue(&mut context); - FPC::at(context.this_address()).pay_fee(context.msg_sender(), amount, asset).enqueue(&mut context); + // TODO(#6277) for improving interface: + // FPC::at(context.this_address()).pay_fee(context.msg_sender(), amount, asset).set_public_teardown_function(&mut context); + context.set_public_teardown_function( + context.this_address(), + FunctionSelector::from_signature("pay_fee((Field),Field,(Field))"), + [context.msg_sender().to_field(), amount, asset.to_field()] + ); } #[aztec(public)] diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index 909f0417849..c640a523829 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -104,11 +104,13 @@ contract Lending { ) { let on_behalf_of = compute_identifier(secret, on_behalf_of, context.msg_sender().to_field()); let _res = Token::at(collateral_asset).unshield(from, context.this_address(), amount, nonce).call(&mut context); + // docs:start:enqueue_public Lending::at(context.this_address())._deposit( AztecAddress::from_field(on_behalf_of), amount, collateral_asset ).enqueue(&mut context); + // docs:end:enqueue_public } #[aztec(public)] diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 97210ff7b09..aba898225ff 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -191,6 +191,15 @@ contract Test { args.hash() } + #[aztec(private)] + fn test_setting_teardown() { + context.set_public_teardown_function( + context.this_address(), + FunctionSelector::from_signature("dummy_public_call()"), + [] + ); + } + // Purely exists for testing #[aztec(public)] fn create_l2_to_l1_message_public(amount: Field, secret_hash: Field, portal_address: EthAddress) { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr index 940e0230db2..8f828e9a6ca 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/common.nr @@ -64,16 +64,20 @@ fn is_valid_caller(request_from_stack: CallRequest, fn_being_verified: PrivateCa & (request_from_stack.caller_context.is_empty() | valid_caller_context) } +fn validate_call_request(request: CallRequest, hash: Field, private_call: PrivateCallData) { + if hash != 0 { + assert_eq(request.hash, hash, "call stack hash does not match call request hash"); + assert(is_valid_caller(request, private_call), "invalid caller"); + } else { + assert(is_empty(request), "call requests length does not match the expected length"); + } +} + fn validate_call_requests(call_requests: [CallRequest; N], hashes: [Field; N], private_call: PrivateCallData) { for i in 0..N { let hash = hashes[i]; let request = call_requests[i]; - if hash != 0 { - assert_eq(request.hash, hash, "call stack hash does not match call request hash"); - assert(is_valid_caller(request, private_call), "invalid caller"); - } else { - assert(is_empty(request), "call requests length does not match the expected length"); - } + validate_call_request(request, hash, private_call); } } @@ -100,6 +104,13 @@ pub fn validate_private_call_data(private_call: PrivateCallData) { private_call_public_inputs.public_call_stack_hashes, private_call ); + + // Teardown call + validate_call_request( + private_call.public_teardown_call_request, + private_call_public_inputs.public_teardown_function_hash, + private_call + ); } fn contract_logic(private_call: PrivateCallData) { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr index 05e4af96eae..f1b37f57318 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr @@ -98,6 +98,7 @@ impl KernelCircuitPublicInputsComposer { let _ = self.compose(); self.propagate_sorted_public_call_requests(); + self.propagate_public_teardown_call_request(); *self } @@ -223,6 +224,10 @@ impl KernelCircuitPublicInputsComposer { self.public_inputs.end.public_call_stack = array_to_bounded_vec(accumulated_data.public_call_stack); } + fn propagate_public_teardown_call_request(&mut self) { + self.public_inputs.public_teardown_call_request = self.previous_kernel.public_inputs.public_teardown_call_request; + } + fn squash_transient_data(&mut self) { verify_squashed_transient_note_hashes_and_nullifiers( self.public_inputs.end.new_note_hashes.storage, diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr index dda3224b07d..ef4e6008eb9 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_circuit_public_inputs_composer.nr @@ -19,6 +19,7 @@ struct DataSource { note_hash_nullifier_counters: [u32; MAX_NEW_NOTE_HASHES_PER_CALL], private_call_requests: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], public_call_requests: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], + public_teardown_call_request: CallRequest, } struct PrivateKernelCircuitPublicInputsComposer { @@ -70,6 +71,8 @@ impl PrivateKernelCircuitPublicInputsComposer { let _call_request = public_inputs.end.private_call_stack.pop(); public_inputs.end.public_call_stack = array_to_bounded_vec(start.public_call_stack); + public_inputs.public_teardown_call_request = previous_kernel_public_inputs.public_teardown_call_request; + PrivateKernelCircuitPublicInputsComposer { public_inputs } } @@ -78,7 +81,8 @@ impl PrivateKernelCircuitPublicInputsComposer { private_call_public_inputs: PrivateCircuitPublicInputs, note_hash_nullifier_counters: [u32; MAX_NEW_NOTE_HASHES_PER_CALL], private_call_requests: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], - public_call_requests: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL] + public_call_requests: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], + public_teardown_call_request: CallRequest ) -> Self { let storage_contract_address = private_call_public_inputs.call_context.storage_contract_address; let source = DataSource { @@ -86,7 +90,8 @@ impl PrivateKernelCircuitPublicInputsComposer { private_call_public_inputs, note_hash_nullifier_counters, private_call_requests, - public_call_requests + public_call_requests, + public_teardown_call_request }; self.propagate_max_block_number(source); @@ -99,6 +104,7 @@ impl PrivateKernelCircuitPublicInputsComposer { self.propagate_logs(source); self.propagate_private_call_requests(source); self.propagate_public_call_requests(source); + self.propagate_public_teardown_call_request(source); *self } @@ -204,4 +210,14 @@ impl PrivateKernelCircuitPublicInputsComposer { } } } + + fn propagate_public_teardown_call_request(&mut self, source: DataSource) { + let call_request = source.public_teardown_call_request; + if !is_empty(call_request) { + assert( + self.public_inputs.public_teardown_call_request.is_empty(), "Public teardown call request already set" + ); + self.public_inputs.public_teardown_call_request = call_request; + } + } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr index bb4a7db587c..4c3872bac03 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr @@ -81,7 +81,8 @@ impl PrivateKernelInitCircuitPrivateInputs { private_call_public_inputs, self.hints.note_hash_nullifier_counters, self.private_call.private_call_stack, - self.private_call.public_call_stack + self.private_call.public_call_stack, + self.private_call.public_teardown_call_request ).finish() } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr index 61a06ea345b..07eabb0f6e0 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -52,7 +52,8 @@ impl PrivateKernelInnerCircuitPrivateInputs { private_call_public_inputs, self.hints.note_hash_nullifier_counters, self.private_call.private_call_stack, - self.private_call.public_call_stack + self.private_call.public_call_stack, + self.private_call.public_teardown_call_request ).finish() } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index 4d52011707f..98b6b9c08fa 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -10,7 +10,7 @@ use dep::types::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - grumpkin_private_key::GrumpkinPrivateKey, utils::arrays::array_length + grumpkin_private_key::GrumpkinPrivateKey, utils::arrays::array_length, traits::is_empty }; // Can just be KernelCircuitPublicInputs. @@ -47,6 +47,9 @@ impl PrivateKernelTailCircuitPrivateInputs { assert_eq( array_length(previous_public_inputs.end.public_call_stack), 0, "Public call stack must be empty when executing the tail circuit" ); + assert( + is_empty(previous_public_inputs.public_teardown_call_request) == true, "Public teardown call request must be empty when executing the tail circuit" + ); // verify/aggregate the previous kernel verify_previous_kernel_proof(self.previous_kernel); @@ -571,6 +574,13 @@ mod tests { builder.failed(); } + #[test(should_fail_with="Public teardown call request must be empty when executing the tail circuit")] + unconstrained fn non_empty_public_teardown_call_request_should_fail() { + let mut builder = PrivateKernelTailInputsBuilder::new(); + builder.previous_kernel.set_public_teardown_call_request(1, false); + builder.failed(); + } + #[test(should_fail_with="The 0th nullifier in the accumulated nullifier array is zero")] unconstrained fn zero_0th_nullifier_fails() { let mut builder = PrivateKernelTailInputsBuilder::new(); diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index ec2e8637cdd..42309b81435 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -10,7 +10,7 @@ use dep::types::{ MAX_NEW_NOTE_HASHES_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_NOTE_HASH_READ_REQUESTS_PER_TX, MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX, MAX_ENCRYPTED_LOGS_PER_TX, MAX_UNENCRYPTED_LOGS_PER_TX }, - grumpkin_private_key::GrumpkinPrivateKey, utils::arrays::array_length + grumpkin_private_key::GrumpkinPrivateKey, utils::arrays::array_length, traits::is_empty }; // Can just be PublicKernelCircuitPublicInputs. @@ -44,8 +44,10 @@ struct PrivateKernelTailToPublicCircuitPrivateInputs { impl PrivateKernelTailToPublicCircuitPrivateInputs { pub fn execute(self) -> PublicKernelCircuitPublicInputs { let previous_public_inputs = self.previous_kernel.public_inputs; + let mut total_public_calls = array_length(previous_public_inputs.end.public_call_stack); + total_public_calls += if is_empty(self.previous_kernel.public_inputs.public_teardown_call_request) {0} else {1}; assert( - array_length(previous_public_inputs.end.public_call_stack) != 0, "Public call stack must not be empty when exporting public kernel data from the tail circuit" + total_public_calls != 0, "Must have public calls when exporting public kernel data from the tail circuit" ); // verify/aggregate the previous kernel @@ -465,13 +467,22 @@ mod tests { builder.failed(); } - #[test(should_fail_with="Public call stack must not be empty when exporting public kernel data from the tail circuit")] - unconstrained fn empty_public_call_stack_should_fail() { + #[test(should_fail_with="Must have public calls when exporting public kernel data from the tail circuit")] + unconstrained fn no_public_calls_should_fail() { let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); builder.previous_kernel.public_call_stack = BoundedVec::new(); builder.failed(); } + #[test] + unconstrained fn can_run_with_only_teardown() { + let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); + builder.previous_kernel.public_call_stack = BoundedVec::new(); + builder.previous_kernel.set_public_teardown_call_request(1, false); + + builder.succeeded(); + } + #[test(should_fail_with="The 0th nullifier in the accumulated nullifier array is zero")] unconstrained fn zero_0th_nullifier_fails() { let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr index 83d5770a806..83dfd47ef6d 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr @@ -132,6 +132,8 @@ pub fn initialize_end_values( let start_non_revertible = previous_kernel.public_inputs.end_non_revertible; circuit_outputs.end_non_revertible.public_call_stack = array_to_bounded_vec(start_non_revertible.public_call_stack); + circuit_outputs.public_teardown_call_stack = array_to_bounded_vec(previous_kernel.public_inputs.public_teardown_call_stack); + let start = previous_kernel.public_inputs.validation_requests; circuit_outputs.validation_requests.max_block_number = previous_kernel.public_inputs.validation_requests.for_rollup.max_block_number; circuit_outputs.validation_requests.nullifier_read_requests = array_to_bounded_vec(start.nullifier_read_requests); @@ -175,6 +177,34 @@ fn is_valid_caller(request: CallRequest, public_call: PublicCallData) -> bool { & (request.caller_context.is_empty() | valid_caller_context) } +pub fn update_end_non_revertible_call_stack( + public_call: PublicCallData, + circuit_outputs: &mut PublicKernelCircuitPublicInputsBuilder +) { + let requests = validate_public_call_stack(public_call); + circuit_outputs.end_non_revertible.public_call_stack.extend_from_bounded_vec(requests); +} + +pub fn update_end_call_stack( + public_call: PublicCallData, + circuit_outputs: &mut PublicKernelCircuitPublicInputsBuilder +) { + let requests = validate_public_call_stack(public_call); + circuit_outputs.end.public_call_stack.extend_from_bounded_vec(requests); +} + +pub fn update_teardown_call_stack(public_call: PublicCallData, circuit_outputs: &mut PublicKernelCircuitPublicInputsBuilder) { + let requests = validate_public_call_stack(public_call); + circuit_outputs.public_teardown_call_stack.extend_from_bounded_vec(requests); +} + +fn validate_public_call_stack(public_call: PublicCallData) -> BoundedVec { + let public_call_requests = array_to_bounded_vec(public_call.public_call_stack); + let hashes = public_call.call_stack_item.public_inputs.public_call_stack_hashes; + validate_call_requests(public_call_requests, hashes, public_call); + public_call_requests +} + fn validate_call_requests( call_requests: BoundedVec, hashes: [Field; N], @@ -275,17 +305,11 @@ pub fn update_public_end_non_revertible_values( public_call: PublicCallData, circuit_outputs: &mut PublicKernelCircuitPublicInputsBuilder ) { - // Updates the circuit outputs with new state changes, call stack etc + // Updates the circuit outputs with new state changes // If this call is a static call, certain operations are disallowed, such as creating new state. perform_static_call_checks(public_call); - // Update public call stack. - let public_call_requests = array_to_bounded_vec(public_call.public_call_stack); - let hashes = public_call.call_stack_item.public_inputs.public_call_stack_hashes; - validate_call_requests(public_call_requests, hashes, public_call); - circuit_outputs.end_non_revertible.public_call_stack.extend_from_bounded_vec(public_call_requests); - propagate_new_nullifiers_non_revertible(public_call, circuit_outputs); propagate_new_note_hashes_non_revertible(public_call, circuit_outputs); propagate_new_l2_to_l1_messages_non_revertible(public_call, circuit_outputs); @@ -294,20 +318,13 @@ pub fn update_public_end_non_revertible_values( } pub fn update_public_end_values(public_call: PublicCallData, circuit_outputs: &mut PublicKernelCircuitPublicInputsBuilder) { - // Updates the circuit outputs with new state changes, call stack etc + // Updates the circuit outputs with new state changes // If this call is a static call, certain operations are disallowed, such as creating new state. perform_static_call_checks(public_call); - // Update public call stack. - let public_call_requests = array_to_bounded_vec(public_call.public_call_stack); - let hashes = public_call.call_stack_item.public_inputs.public_call_stack_hashes; - validate_call_requests(public_call_requests, hashes, public_call); - circuit_outputs.end.public_call_stack.extend_from_bounded_vec(public_call_requests); - propagate_new_nullifiers(public_call, circuit_outputs); propagate_new_note_hashes(public_call, circuit_outputs); - propagate_new_l2_to_l1_messages(public_call, circuit_outputs); propagate_new_unencrypted_logs(public_call, circuit_outputs); propagate_valid_public_data_update_requests(public_call, circuit_outputs); diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr index 6fd4e359211..9eb7eb4a126 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr @@ -47,6 +47,7 @@ impl PublicKernelAppLogicCircuitPrivateInputs { // Pops the item from the call stack and validates it against the current execution. let call_request = public_inputs.end.public_call_stack.pop(); common::validate_call_against_request(self.public_call, call_request); + common::update_end_call_stack(self.public_call, &mut public_inputs); common::update_public_end_values(self.public_call, &mut public_inputs); } else { let mut remaining_calls = array_to_bounded_vec(self.previous_kernel.public_inputs.end.public_call_stack); diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr index 248c89c0b7a..bd90b42fac0 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr @@ -54,6 +54,7 @@ impl PublicKernelSetupCircuitPrivateInputs { common::update_validation_requests(self.public_call, &mut public_inputs); + common::update_end_non_revertible_call_stack(self.public_call, &mut public_inputs); common::update_public_end_non_revertible_values(self.public_call, &mut public_inputs); public_inputs.finish() @@ -406,23 +407,6 @@ mod tests { // let _ = kernel.public_kernel_setup(); // } - #[test(should_fail_with="Cannot run unnecessary setup circuit")] - fn unnecessary_public_kernel_setup_with_teardown_should_fail() { - let mut builder = PublicKernelSetupCircuitPrivateInputsBuilder::new(); - - // in this case, we only push a single call, which is interpreted as the teardown call - let teardown_call = builder.public_call.finish(); - let teardown_call_hash = teardown_call.call_stack_item.hash(); - let teardown_is_delegate_call = teardown_call.call_stack_item.public_inputs.call_context.is_delegate_call; - builder.previous_kernel.push_public_call_request(teardown_call_hash, teardown_is_delegate_call); - let previous_kernel = builder.previous_kernel.to_public_kernel_data(false); - - // Run the kernel on the setup call - let kernel = PublicKernelSetupCircuitPrivateInputs { previous_kernel, public_call: teardown_call }; - - let _ = kernel.public_kernel_setup(); - } - #[test(should_fail_with="No contract storage update requests are allowed for static calls")] fn previous_private_kernel_fails_if_contract_storage_update_requests_on_static_call() { let mut builder = PublicKernelSetupCircuitPrivateInputsBuilder::new(); diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr index 56f0f3ad7ba..981d431d4a7 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr @@ -14,8 +14,8 @@ impl PublicKernelTeardownCircuitPrivateInputs { // Currently the nested calls will be pushed to the public call stack and need_setup will return true. // This should not be the case when nested calls are handled in avm. // But we should also consider merging this and the setup circuit and have one circuit that deals with non-revertibles. - // let needs_setup = self.previous_kernel.public_inputs.needs_setup(); - // assert(needs_setup == false, "Cannot run teardown circuit before setup circuit"); + let needs_setup = self.previous_kernel.public_inputs.needs_setup(); + assert(needs_setup == false, "Cannot run teardown circuit before setup circuit"); let needs_app_logic = self.previous_kernel.public_inputs.needs_app_logic(); assert(needs_app_logic == false, "Cannot run teardown circuit before app logic circuit"); let needs_teardown = self.previous_kernel.public_inputs.needs_teardown(); @@ -88,7 +88,7 @@ impl PublicKernelTeardownCircuitPrivateInputs { self.validate_inputs(); // Pops the item from the call stack and validates it against the current execution. - let call_request = public_inputs.end_non_revertible.public_call_stack.pop(); + let call_request = public_inputs.public_teardown_call_stack.pop(); common::validate_call_against_request(self.public_call, call_request); self.validate_start_gas(); @@ -96,6 +96,7 @@ impl PublicKernelTeardownCircuitPrivateInputs { common::update_validation_requests(self.public_call, &mut public_inputs); + common::update_teardown_call_stack(self.public_call, &mut public_inputs); common::update_public_end_non_revertible_values(self.public_call, &mut public_inputs); public_inputs.finish() @@ -163,7 +164,7 @@ mod tests { // Adjust the call stack item hash for the current call in the previous iteration. let hash = public_call.call_stack_item.hash(); let is_delegate_call = public_call.call_stack_item.public_inputs.call_context.is_delegate_call; - self.previous_kernel.push_public_call_request(hash, is_delegate_call); + self.previous_kernel.set_public_teardown_call_request(hash, is_delegate_call); let mut previous_kernel = self.previous_kernel.to_public_kernel_data(false); previous_kernel.public_inputs.end = self.previous_revertible.to_public_accumulated_data(); @@ -220,7 +221,7 @@ mod tests { let hash = public_call.call_stack_item.hash(); // Tweak the call stack item hash. - builder.previous_kernel.push_public_call_request(hash + 1, false); + builder.previous_kernel.set_public_teardown_call_request(hash + 1, false); let previous_kernel = builder.previous_kernel.to_public_kernel_data(false); let kernel = PublicKernelTeardownCircuitPrivateInputs { previous_kernel, public_call }; @@ -262,7 +263,7 @@ mod tests { let hash = public_call.call_stack_item.hash(); // Caller context is empty for regular calls. let is_delegate_call = false; - builder.previous_kernel.push_public_call_request(hash, is_delegate_call); + builder.previous_kernel.set_public_teardown_call_request(hash, is_delegate_call); let previous_kernel = builder.previous_kernel.to_public_kernel_data(false); let kernel = PublicKernelTeardownCircuitPrivateInputs { previous_kernel, public_call }; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr index aa137a82225..499d2402609 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr @@ -9,7 +9,8 @@ use crate::{ gas::Gas, validation_requests::validation_requests_builder::ValidationRequestsBuilder, call_request::CallRequest }, - mocked::AggregationObject, partial_state_reference::PartialStateReference, traits::Empty + constants::MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, mocked::AggregationObject, + partial_state_reference::PartialStateReference, traits::{Empty, is_empty} }; // Builds: @@ -51,6 +52,10 @@ impl PrivateKernelCircuitPublicInputsBuilder { min_revertible_side_effect_counter: u32 ) -> PublicKernelCircuitPublicInputs { let (end_non_revertible, end) = self.end.split_to_public(min_revertible_side_effect_counter, teardown_gas); + let mut public_teardown_call_stack: BoundedVec = BoundedVec::new(); + if (!is_empty(self.public_teardown_call_request)) { + public_teardown_call_stack.push(self.public_teardown_call_request); + } PublicKernelCircuitPublicInputs { validation_requests: self.validation_requests.finish(), @@ -58,7 +63,7 @@ impl PrivateKernelCircuitPublicInputsBuilder { end, constants: self.constants, revert_code: 0, - public_teardown_call_request: self.public_teardown_call_request + public_teardown_call_stack: public_teardown_call_stack.storage } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr index 8e8e4d62045..c385a96f25a 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs.nr @@ -2,6 +2,7 @@ use crate::abis::{ accumulated_data::PublicAccumulatedData, combined_constant_data::CombinedConstantData, validation_requests::{RollupValidationRequests, ValidationRequests}, call_request::CallRequest }; +use crate::constants::MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX; struct PublicKernelCircuitPublicInputs { validation_requests: ValidationRequests, @@ -9,25 +10,24 @@ struct PublicKernelCircuitPublicInputs { end: PublicAccumulatedData, constants: CombinedConstantData, revert_code: u8, - public_teardown_call_request: CallRequest, + public_teardown_call_stack: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX], } impl PublicKernelCircuitPublicInputs { pub fn needs_setup(self) -> bool { - // By definition, the final non-revertible enqueued call is for teardown. - // since this is a stack, the teardown call would be the 0th element. - // So if we have more than one element, we need setup. - !self.end_non_revertible.public_call_stack[1].is_empty() + // public calls for setup are deposited in the non-revertible public call stack. + // if an element is present, we need to run setup + !self.end_non_revertible.public_call_stack[0].is_empty() } pub fn needs_app_logic(self) -> bool { - // if we have any enqueued revertible public calls, we need to run the public app logic circuit. + // public calls for app logic are deposited in the revertible public call stack. + // if an element is present, we need to run app logic !self.end.public_call_stack[0].is_empty() } pub fn needs_teardown(self) -> bool { - // By definition, the final non-revertible enqueued call is for teardown. - // since this is a stack, the teardown call would be the 0th element. - !self.end_non_revertible.public_call_stack[0].is_empty() + // the public call specified for teardown, if any, is placed in the teardown call stack + !self.public_teardown_call_stack[0].is_empty() } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr index 70169e44548..f4228bd8f94 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/public_kernel_circuit_public_inputs_builder.nr @@ -5,7 +5,7 @@ use crate::{ kernel_circuit_public_inputs::{public_kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs}, validation_requests::ValidationRequestsBuilder, call_request::CallRequest }, - traits::Empty + constants::MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, traits::Empty }; struct PublicKernelCircuitPublicInputsBuilder { @@ -14,7 +14,7 @@ struct PublicKernelCircuitPublicInputsBuilder { end: PublicAccumulatedDataBuilder, constants: CombinedConstantData, revert_code: u8, - public_teardown_call_request: CallRequest, + public_teardown_call_stack: BoundedVec, } impl PublicKernelCircuitPublicInputsBuilder { @@ -28,7 +28,7 @@ impl PublicKernelCircuitPublicInputsBuilder { end: self.end.finish(), constants: self.constants, revert_code: self.revert_code, - public_teardown_call_request: self.public_teardown_call_request + public_teardown_call_stack: self.public_teardown_call_stack.storage } } } @@ -41,7 +41,7 @@ impl Empty for PublicKernelCircuitPublicInputsBuilder { end: PublicAccumulatedDataBuilder::empty(), constants: CombinedConstantData::empty(), revert_code: 0 as u8, - public_teardown_call_request: CallRequest::empty() + public_teardown_call_stack: BoundedVec::new() } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr index 7bca0c1d616..56224018099 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_kernel/private_call_data.nr @@ -13,6 +13,7 @@ struct PrivateCallData { private_call_stack: [CallRequest; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], public_call_stack: [CallRequest; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], + public_teardown_call_request: CallRequest, proof: RecursiveProof, vk: VerificationKey, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index b0f043c80ab..105a3fed3bb 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -216,6 +216,8 @@ impl FixtureBuilder { }; let validation_requests = self.to_validation_requests(); let constants = self.to_constant_data(); + let mut public_teardown_call_stack: BoundedVec = BoundedVec::new(); + public_teardown_call_stack.push(self.public_teardown_call_request); PublicKernelCircuitPublicInputs { end_non_revertible, @@ -223,7 +225,7 @@ impl FixtureBuilder { validation_requests, constants, revert_code: self.revert_code, - public_teardown_call_request: self.public_teardown_call_request + public_teardown_call_stack: public_teardown_call_stack.storage } } @@ -413,6 +415,10 @@ impl FixtureBuilder { self.public_call_stack.push(call_stack_item); } + pub fn set_public_teardown_call_request(&mut self, hash: Field, is_delegate_call: bool) { + self.public_teardown_call_request = self.generate_call_request(hash, is_delegate_call); + } + pub fn end_setup(&mut self) { self.min_revertible_side_effect_counter = self.counter; } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr index 44d060051a9..999da287c85 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr @@ -22,6 +22,7 @@ struct PrivateCallDataBuilder { // The rest of the values of PrivateCallData. private_call_stack: BoundedVec, public_call_stack: BoundedVec, + public_teardown_call_request: CallRequest, proof: RecursiveProof, vk: VerificationKey, salted_initialization_hash: SaltedInitializationHash, @@ -48,6 +49,7 @@ impl PrivateCallDataBuilder { function_data, private_call_stack: BoundedVec::new(), public_call_stack: BoundedVec::new(), + public_teardown_call_request: CallRequest::empty(), proof: RecursiveProof::empty(), vk: VerificationKey::empty(), function_leaf_membership_witness: contract_function.membership_witness, @@ -169,6 +171,7 @@ impl PrivateCallDataBuilder { call_stack_item: self.build_call_stack_item(), private_call_stack: self.private_call_stack.storage, public_call_stack: self.public_call_stack.storage, + public_teardown_call_request: self.public_teardown_call_request, proof: self.proof, vk: self.vk, function_leaf_membership_witness: self.function_leaf_membership_witness, diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index fc3b21deccf..23915d9e696 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -144,9 +144,12 @@ test: run-e2e: ARG test + ARG debug="" FROM +end-to-end - RUN DEBUG=aztec:* yarn test $test + RUN DEBUG=$debug yarn test $test prover-client-test: FROM +build - RUN cd prover-client && yarn test + ARG test + ARG debug="" + RUN cd prover-client && DEBUG=$debug yarn test $test diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index ba36cd68556..84d27f5a33e 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -89,7 +89,10 @@ export const mockTx = ( : CallRequest.empty(), ); - data.forPublic.publicTeardownCallRequest = publicTeardownCallRequest.toCallRequest(); + data.forPublic.publicTeardownCallStack = makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, () => CallRequest.empty()); + data.forPublic.publicTeardownCallStack[0] = publicTeardownCallRequest.isEmpty() + ? CallRequest.empty() + : publicTeardownCallRequest.toCallRequest(); if (hasLogs) { let i = 1; // 0 used in first nullifier diff --git a/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts b/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts index 1b492da9c87..0500563e16d 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_call_data.ts @@ -31,6 +31,10 @@ export class PrivateCallData { * Other public call stack items to be processed. */ public publicCallStack: Tuple, + /** + * The public call request for the teardown function. + */ + public publicTeardownCallRequest: CallRequest, /** * The proof of the execution of this private call. */ @@ -75,6 +79,7 @@ export class PrivateCallData { fields.callStackItem, fields.privateCallStack, fields.publicCallStack, + fields.publicTeardownCallRequest, fields.proof, fields.vk, fields.contractClassArtifactHash, @@ -109,6 +114,7 @@ export class PrivateCallData { reader.readObject(PrivateCallStackItem), reader.readArray(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, CallRequest), reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, CallRequest), + reader.readObject(CallRequest), RecursiveProof.fromBuffer(reader, RECURSIVE_PROOF_LENGTH), reader.readObject(VerificationKeyAsFields), reader.readObject(Fr), diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts index f0d80109bd3..f708a197870 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_tail_circuit_public_inputs.ts @@ -1,5 +1,7 @@ -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { makeTuple } from '@aztec/foundation/array'; +import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; +import { MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX } from '../../constants.gen.js'; import { countAccumulatedItems, mergeAccumulatedData } from '../../utils/index.js'; import { AggregationObject } from '../aggregation_object.js'; import { CallRequest } from '../call_request.js'; @@ -30,11 +32,11 @@ export class PartialPrivateTailPublicInputsForPublic { /** * Call request for the public teardown function. */ - public publicTeardownCallRequest: CallRequest, + public publicTeardownCallStack: Tuple, ) {} get needsSetup() { - return !this.endNonRevertibleData.publicCallStack[1].isEmpty(); + return !this.endNonRevertibleData.publicCallStack[0].isEmpty(); } get needsAppLogic() { @@ -42,7 +44,7 @@ export class PartialPrivateTailPublicInputsForPublic { } get needsTeardown() { - return !this.endNonRevertibleData.publicCallStack[0].isEmpty(); + return !this.publicTeardownCallStack[0].isEmpty(); } static fromBuffer(buffer: Buffer | BufferReader): PartialPrivateTailPublicInputsForPublic { @@ -51,7 +53,7 @@ export class PartialPrivateTailPublicInputsForPublic { reader.readObject(ValidationRequests), reader.readObject(PublicAccumulatedData), reader.readObject(PublicAccumulatedData), - reader.readObject(CallRequest), + reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, CallRequest), ); } @@ -60,7 +62,7 @@ export class PartialPrivateTailPublicInputsForPublic { this.validationRequests, this.endNonRevertibleData, this.end, - this.publicTeardownCallRequest, + this.publicTeardownCallStack, ); } @@ -69,7 +71,7 @@ export class PartialPrivateTailPublicInputsForPublic { ValidationRequests.empty(), PublicAccumulatedData.empty(), PublicAccumulatedData.empty(), - CallRequest.empty(), + makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, CallRequest.empty), ); } } @@ -139,7 +141,7 @@ export class PrivateKernelTailCircuitPublicInputs { this.forPublic.end, this.constants, this.revertCode, - this.forPublic.publicTeardownCallRequest, + this.forPublic.publicTeardownCallStack, ); } diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts index 5388265a813..838a641e279 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_public_inputs.ts @@ -1,7 +1,9 @@ -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { makeTuple } from '@aztec/foundation/array'; +import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize'; import { inspect } from 'util'; +import { MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX } from '../../constants.gen.js'; import { AggregationObject } from '../aggregation_object.js'; import { CallRequest } from '../call_request.js'; import { RevertCode } from '../revert_code.js'; @@ -42,7 +44,7 @@ export class PublicKernelCircuitPublicInputs { /** * The call request for the public teardown function */ - public publicTeardownCallRequest: CallRequest, + public publicTeardownCallStack: Tuple, ) {} toBuffer() { @@ -53,7 +55,7 @@ export class PublicKernelCircuitPublicInputs { this.end, this.constants, this.revertCode, - this.publicTeardownCallRequest, + this.publicTeardownCallStack, ); } @@ -66,7 +68,7 @@ export class PublicKernelCircuitPublicInputs { } get needsSetup() { - return !this.endNonRevertibleData.publicCallStack[1].isEmpty(); + return !this.endNonRevertibleData.publicCallStack[0].isEmpty(); } get needsAppLogic() { @@ -74,7 +76,7 @@ export class PublicKernelCircuitPublicInputs { } get needsTeardown() { - return !this.endNonRevertibleData.publicCallStack[0].isEmpty(); + return !this.publicTeardownCallStack[0].isEmpty(); } /** @@ -91,7 +93,7 @@ export class PublicKernelCircuitPublicInputs { reader.readObject(PublicAccumulatedData), reader.readObject(CombinedConstantData), reader.readObject(RevertCode), - reader.readObject(CallRequest), + reader.readArray(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, CallRequest), ); } @@ -103,7 +105,7 @@ export class PublicKernelCircuitPublicInputs { PublicAccumulatedData.empty(), CombinedConstantData.empty(), RevertCode.OK, - CallRequest.empty(), + makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, CallRequest.empty), ); } @@ -115,7 +117,7 @@ export class PublicKernelCircuitPublicInputs { end: ${inspect(this.end)}, constants: ${inspect(this.constants)}, revertCode: ${this.revertCode}, - publicTeardownCallRequest: ${inspect(this.publicTeardownCallRequest)} + publicTeardownCallStack: ${inspect(this.publicTeardownCallStack)} }`; } } diff --git a/yarn-project/circuits.js/src/structs/public_call_request.ts b/yarn-project/circuits.js/src/structs/public_call_request.ts index 7968c610f3a..f371a91b1d2 100644 --- a/yarn-project/circuits.js/src/structs/public_call_request.ts +++ b/yarn-project/circuits.js/src/structs/public_call_request.ts @@ -3,6 +3,8 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; +import { inspect } from 'util'; + import { computeVarArgsHash } from '../hash/hash.js'; import { CallContext } from './call_context.js'; import { CallRequest, CallerContext } from './call_request.js'; @@ -146,4 +148,13 @@ export class PublicCallRequest { this.args.length === 0 ); } + + [inspect.custom]() { + return `PublicCallRequest { + contractAddress: ${this.contractAddress} + functionData: ${this.functionData} + callContext: ${this.callContext} + parentCallContext: ${this.parentCallContext} + args: ${this.args} }`; + } } diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 90208fa660a..79a4b9b96ee 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -428,6 +428,7 @@ export function makePublicKernelCircuitPublicInputs( seed = 1, fullAccumulatedData = true, ): PublicKernelCircuitPublicInputs { + const tupleGenerator = fullAccumulatedData ? makeTuple : makeHalfFullTuple; return new PublicKernelCircuitPublicInputs( makeAggregationObject(seed), makeValidationRequests(seed), @@ -435,7 +436,7 @@ export function makePublicKernelCircuitPublicInputs( makePublicAccumulatedData(seed, fullAccumulatedData), makeConstantData(seed + 0x100), RevertCode.OK, - makeCallRequest(seed + 0x200), + tupleGenerator(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makeCallRequest, seed + 0x600, CallRequest.empty), ); } @@ -453,7 +454,7 @@ export function makePrivateKernelTailCircuitPublicInputs( ValidationRequests.empty(), makePublicAccumulatedData(seed + 0x100, false), makePublicAccumulatedData(seed + 0x200, false), - makeCallRequest(seed + 0x300), + makeHalfFullTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makeCallRequest, seed + 0x400, CallRequest.empty), ) : undefined; const forRollup = !isForPublic @@ -729,6 +730,7 @@ export function makePrivateCallData(seed = 1): PrivateCallData { callStackItem: makePrivateCallStackItem(seed), privateCallStack: makeTuple(MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, makeCallRequest, seed + 0x10), publicCallStack: makeTuple(MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, makeCallRequest, seed + 0x20), + publicTeardownCallRequest: makeCallRequest(seed + 0x30), proof: makeRecursiveProof(RECURSIVE_PROOF_LENGTH, seed + 0x50), vk: makeVerificationKeyAsFields(), contractClassArtifactHash: fr(seed + 0x70), diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index 659afa884a7..8172acc8483 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -786,6 +786,7 @@ export function mapPrivateCallDataToNoir(privateCallData: PrivateCallData): Priv call_stack_item: mapPrivateCallStackItemToNoir(privateCallData.callStackItem), private_call_stack: mapTuple(privateCallData.privateCallStack, mapCallRequestToNoir), public_call_stack: mapTuple(privateCallData.publicCallStack, mapCallRequestToNoir), + public_teardown_call_request: mapCallRequestToNoir(privateCallData.publicTeardownCallRequest), proof: mapRecursiveProofToNoir(privateCallData.proof), vk: mapVerificationKeyToNoir(privateCallData.vk), function_leaf_membership_witness: mapMembershipWitnessToNoir(privateCallData.functionLeafMembershipWitness), @@ -1240,7 +1241,7 @@ export function mapPublicKernelCircuitPublicInputsToNoir( end: mapPublicAccumulatedDataToNoir(inputs.end), end_non_revertible: mapPublicAccumulatedDataToNoir(inputs.endNonRevertibleData), revert_code: mapRevertCodeToNoir(inputs.revertCode), - public_teardown_call_request: mapCallRequestToNoir(inputs.publicTeardownCallRequest), + public_teardown_call_stack: mapTuple(inputs.publicTeardownCallStack, mapCallRequestToNoir), }; } @@ -1360,7 +1361,7 @@ export function mapPrivateKernelTailCircuitPublicInputsForPublicFromNoir( mapValidationRequestsFromNoir(inputs.validation_requests), mapPublicAccumulatedDataFromNoir(inputs.end_non_revertible), mapPublicAccumulatedDataFromNoir(inputs.end), - mapCallRequestFromNoir(inputs.public_teardown_call_request), + mapTupleFromNoir(inputs.public_teardown_call_stack, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, mapCallRequestFromNoir), ); return new PrivateKernelTailCircuitPublicInputs( AggregationObject.makeFake(), @@ -1478,7 +1479,7 @@ export function mapPublicKernelCircuitPublicInputsFromNoir( mapPublicAccumulatedDataFromNoir(inputs.end), mapCombinedConstantDataFromNoir(inputs.constants), mapRevertCodeFromNoir(inputs.revert_code), - mapCallRequestFromNoir(inputs.public_teardown_call_request), + mapTupleFromNoir(inputs.public_teardown_call_stack, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, mapCallRequestFromNoir), ); } diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 7dcdcf6ea93..deafb1d9ef7 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -134,14 +134,16 @@ export class TestContext { _sideEffectCounter?: number, ) => { for (const tx of txs) { - for (const request of tx.enqueuedPublicFunctionCalls) { + const allCalls = tx.publicTeardownFunctionCall.isEmpty() + ? tx.enqueuedPublicFunctionCalls + : [...tx.enqueuedPublicFunctionCalls, tx.publicTeardownFunctionCall]; + for (const request of allCalls) { if (execution.contractAddress.equals(request.contractAddress)) { const result = PublicExecutionResultBuilder.fromPublicCallRequest({ request }).build({ startGasLeft: availableGas, endGasLeft: availableGas, transactionFee, }); - // result.unencryptedLogs = tx.unencryptedLogs.functionLogs[0]; return Promise.resolve(result); } } diff --git a/yarn-project/prover-client/src/prover/bb_prover_public_kernel.test.ts b/yarn-project/prover-client/src/prover/bb_prover_public_kernel.test.ts index 59c6a5786e3..e9ee0389d44 100644 --- a/yarn-project/prover-client/src/prover/bb_prover_public_kernel.test.ts +++ b/yarn-project/prover-client/src/prover/bb_prover_public_kernel.test.ts @@ -1,5 +1,6 @@ import { PublicKernelType, mockTx } from '@aztec/circuit-types'; import { type Proof, makeEmptyProof } from '@aztec/circuits.js'; +import { makePublicCallRequest } from '@aztec/circuits.js/testing'; import { createDebugLogger } from '@aztec/foundation/log'; import { type ServerProtocolArtifact } from '@aztec/noir-protocol-circuits-types'; @@ -29,9 +30,11 @@ describe('prover/bb_prover/public-kernel', () => { }); it('proves the public kernel circuits', async () => { + const teardown = makePublicCallRequest(); const tx = mockTx(1000, { - numberOfNonRevertiblePublicCallRequests: 2, + numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, + publicTeardownCallRequest: teardown, }); tx.data.constants.historicalHeader = await context.actualDb.buildInitialHeader(); diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts index 6a52d2c7a67..733774b5d01 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.test.ts @@ -11,6 +11,7 @@ import { PrivateCircuitPublicInputs, PrivateKernelCircuitPublicInputs, PrivateKernelTailCircuitPublicInputs, + PublicCallRequest, RECURSIVE_PROOF_LENGTH, ScopedNoteHash, type TxRequest, @@ -76,6 +77,7 @@ describe('Kernel Prover', () => { acir: Buffer.alloc(0), partialWitness: new Map(), enqueuedPublicFunctionCalls: [], + publicTeardownFunctionCall: PublicCallRequest.empty(), encryptedLogs: [], unencryptedLogs: [], }; diff --git a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts index e0f2a0fad4a..f6b324aad45 100644 --- a/yarn-project/pxe/src/kernel_prover/kernel_prover.ts +++ b/yarn-project/pxe/src/kernel_prover/kernel_prover.ts @@ -77,6 +77,9 @@ export class KernelProver { result.callStackItem.toCallRequest(currentExecution.callStackItem.publicInputs.callContext), ); const publicCallRequests = currentExecution.enqueuedPublicFunctionCalls.map(result => result.toCallRequest()); + const publicTeardownCallRequest = currentExecution.publicTeardownFunctionCall.isEmpty() + ? CallRequest.empty() + : currentExecution.publicTeardownFunctionCall.toCallRequest(); const proofOutput = await this.proofCreator.createAppCircuitProof( currentExecution.partialWitness, @@ -87,6 +90,7 @@ export class KernelProver { currentExecution, privateCallRequests, publicCallRequests, + publicTeardownCallRequest, proofOutput.proof, proofOutput.verificationKey, ); @@ -143,6 +147,7 @@ export class KernelProver { { callStackItem }: ExecutionResult, privateCallRequests: CallRequest[], publicCallRequests: CallRequest[], + publicTeardownCallRequest: CallRequest, proof: RecursiveProof, vk: VerificationKeyAsFields, ) { @@ -174,6 +179,7 @@ export class KernelProver { callStackItem, privateCallStack, publicCallStack, + publicTeardownCallRequest, proof, vk, publicKeysHash, diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 9b9fcbcbf2f..3ed4fa30cf4 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -30,7 +30,7 @@ import { MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, type PartialAddress, type PrivateKernelTailCircuitPublicInputs, - PublicCallRequest, + type PublicCallRequest, computeContractClassId, getContractClassFromArtifact, } from '@aztec/circuits.js'; @@ -45,6 +45,7 @@ import { type AcirSimulator, type ExecutionResult, collectEnqueuedPublicFunctionCalls, + collectPublicTeardownFunctionCall, collectSortedEncryptedLogs, collectSortedUnencryptedLogs, resolveOpcodeLocations, @@ -677,7 +678,7 @@ export class PXEService implements PXE { const unencryptedLogs = new UnencryptedTxL2Logs([collectSortedUnencryptedLogs(executionResult)]); const encryptedLogs = new EncryptedTxL2Logs([collectSortedEncryptedLogs(executionResult)]); const enqueuedPublicFunctions = collectEnqueuedPublicFunctionCalls(executionResult); - const teardownPublicFunction = PublicCallRequest.empty(); + const teardownPublicFunction = collectPublicTeardownFunctionCall(executionResult); // HACK(#1639): Manually patches the ordering of the public call stack // TODO(#757): Enforce proper ordering of enqueued public calls diff --git a/yarn-project/sequencer-client/src/config.ts b/yarn-project/sequencer-client/src/config.ts index 92384b6d882..a65546ec902 100644 --- a/yarn-project/sequencer-client/src/config.ts +++ b/yarn-project/sequencer-client/src/config.ts @@ -153,6 +153,12 @@ function getDefaultAllowedSetupFunctions(): AllowedFunction[] { selector: FunctionSelector.fromSignature('approve_public_authwit(Field)'), }, + // needed for native payments while they are not yet enshrined + { + classId: getContractClassFromArtifact(GasTokenContract.artifact).id, + selector: FunctionSelector.fromSignature('pay_fee(Field)'), + }, + // needed for private transfers via FPC { classId: getContractClassFromArtifact(TokenContractArtifact).id, diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index c6e64085fcb..133a4691ee3 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -84,6 +84,7 @@ export class Sequencer { if (config.allowedFunctionsInSetup) { this.allowedFunctionsInSetup = config.allowedFunctionsInSetup; } + // TODO(#5917) remove this. it is no longer needed since we don't need to whitelist functions in teardown if (config.allowedFunctionsInTeardown) { this.allowedFunctionsInTeardown = config.allowedFunctionsInTeardown; } @@ -187,11 +188,7 @@ export class Sequencer { // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here const validTxs = await this.takeValidTxs( pendingTxs, - this.txValidatorFactory.validatorForNewTxs( - newGlobalVariables, - this.allowedFunctionsInSetup, - this.allowedFunctionsInTeardown, - ), + this.txValidatorFactory.validatorForNewTxs(newGlobalVariables, this.allowedFunctionsInSetup), ); if (validTxs.length < this.minTxsPerBLock) { return; diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts index 86b5a98c355..5b85fdb12d1 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.test.ts @@ -7,7 +7,7 @@ import { type MockProxy, mock, mockFn } from 'jest-mock-extended'; import { GasTxValidator, type PublicStateSource } from './gas_validator.js'; -describe('GasTxValidator', () => { +describe.skip('GasTxValidator', () => { let validator: GasTxValidator; let publicStateSource: MockProxy; let gasTokenAddress: AztecAddress; diff --git a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts index 3d4bf8b39a2..a3f4b63ebe6 100644 --- a/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/gas_validator.ts @@ -1,9 +1,6 @@ -import { Tx, type TxValidator } from '@aztec/circuit-types'; -import { type AztecAddress, Fr } from '@aztec/circuits.js'; -import { pedersenHash } from '@aztec/foundation/crypto'; +import { type Tx, type TxValidator } from '@aztec/circuit-types'; +import { type AztecAddress, type Fr } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; -import { GasTokenContract } from '@aztec/noir-contracts.js'; -import { AbstractPhaseManager, PublicKernelPhase } from '@aztec/simulator'; /** Provides a view into public contract state */ export interface PublicStateSource { @@ -37,45 +34,9 @@ export class GasTxValidator implements TxValidator { return [validTxs, invalidTxs]; } - async #validateTxFee(tx: Tx): Promise { - const { [PublicKernelPhase.TEARDOWN]: teardownFns } = AbstractPhaseManager.extractEnqueuedPublicCallsByPhase( - tx.data, - tx.enqueuedPublicFunctionCalls, - ); + #validateTxFee(_tx: Tx): Promise { + return Promise.resolve(true); - if (teardownFns.length === 0) { - if (this.#requireFees) { - this.#log.warn( - `Rejecting tx ${Tx.getHash(tx)} because it should pay for gas but has no enqueued teardown functions`, - ); - return false; - } else { - this.#log.debug(`Tx ${Tx.getHash(tx)} does not pay fees. Skipping balance check.`); - return true; - } - } - - if (teardownFns.length > 1) { - this.#log.warn(`Rejecting tx ${Tx.getHash(tx)} because it has multiple teardown functions`); - return false; - } - - // check that the caller of the teardown function has enough balance to pay for tx costs - const teardownFn = teardownFns[0]; - const slot = pedersenHash([GasTokenContract.storage.balances.slot, teardownFn.callContext.msgSender]); - const gasBalance = await this.#publicDataSource.storageRead(this.#gasTokenAddress, slot); - - // TODO(#5004) calculate fee needed based on tx limits and gas prices - const gasAmountNeeded = new Fr(1); - if (gasBalance.lt(gasAmountNeeded)) { - this.#log.warn( - `Rejecting tx ${Tx.getHash( - tx, - )} because it should pay for gas but has insufficient balance ${gasBalance.toShortString()} < ${gasAmountNeeded.toShortString()}`, - ); - return false; - } - - return true; + // TODO(#5920) re-enable sequencer checks after we have fee payer in kernel outputs } } diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts index 3af8e3746f9..4b852cc5c93 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.test.ts @@ -14,14 +14,12 @@ describe('PhasesTxValidator', () => { let allowedContract: AztecAddress; let allowedSetupSelector1: FunctionSelector; let allowedSetupSelector2: FunctionSelector; - let allowedTeardownSelector: FunctionSelector; beforeEach(() => { allowedContractClass = Fr.random(); allowedContract = makeAztecAddress(); allowedSetupSelector1 = makeSelector(1); allowedSetupSelector2 = makeSelector(2); - allowedTeardownSelector = makeSelector(3); contractDataSource = mock({ getContract: mockFn().mockImplementation(() => { @@ -31,86 +29,29 @@ describe('PhasesTxValidator', () => { }), }); - txValidator = new PhasesTxValidator( - contractDataSource, - [ - { - classId: allowedContractClass, - selector: allowedSetupSelector1, - }, - { - address: allowedContract, - selector: allowedSetupSelector1, - }, - { - classId: allowedContractClass, - selector: allowedSetupSelector2, - }, - { - address: allowedContract, - selector: allowedSetupSelector2, - }, - ], - [ - { - classId: allowedContractClass, - selector: allowedTeardownSelector, - }, - { - address: allowedContract, - selector: allowedTeardownSelector, - }, - ], - ); - }); - - it('allows teardown functions on the contracts allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); - patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedTeardownSelector }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); - }); - - it('allows teardown functions on the contracts class allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); - const { address } = patchNonRevertibleFn(tx, 0, { selector: allowedTeardownSelector }); - contractDataSource.getContract.mockImplementationOnce(contractAddress => { - if (address.equals(contractAddress)) { - return Promise.resolve({ - contractClassId: allowedContractClass, - } as any); - } else { - return Promise.resolve(undefined); - } - }); - - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); - }); - - it('rejects teardown functions not on the contracts class list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); - // good selector, bad contract class - const { address } = patchNonRevertibleFn(tx, 0, { selector: allowedTeardownSelector }); - contractDataSource.getContract.mockImplementationOnce(contractAddress => { - if (address.equals(contractAddress)) { - return Promise.resolve({ - contractClassId: Fr.random(), - } as any); - } else { - return Promise.resolve(undefined); - } - }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); - }); - - it('rejects teardown functions not on the selector allow list', async () => { - const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 1 }); - await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); + txValidator = new PhasesTxValidator(contractDataSource, [ + { + classId: allowedContractClass, + selector: allowedSetupSelector1, + }, + { + address: allowedContract, + selector: allowedSetupSelector1, + }, + { + classId: allowedContractClass, + selector: allowedSetupSelector2, + }, + { + address: allowedContract, + selector: allowedSetupSelector2, + }, + ]); }); it('allows setup functions on the contracts allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); - patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedTeardownSelector }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); }); @@ -118,7 +59,6 @@ describe('PhasesTxValidator', () => { it('allows setup functions on the contracts class allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); const { address } = patchNonRevertibleFn(tx, 0, { selector: allowedSetupSelector1 }); - patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedTeardownSelector }); contractDataSource.getContract.mockImplementationOnce(contractAddress => { if (address.equals(contractAddress)) { @@ -135,8 +75,6 @@ describe('PhasesTxValidator', () => { it('rejects txs with setup functions not on the allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); - // only patch teardown - patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedTeardownSelector }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); }); @@ -145,7 +83,6 @@ describe('PhasesTxValidator', () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 2 }); // good selector, bad contract class const { address } = patchNonRevertibleFn(tx, 0, { selector: allowedSetupSelector1 }); - patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedTeardownSelector }); contractDataSource.getContract.mockImplementationOnce(contractAddress => { if (address.equals(contractAddress)) { return Promise.resolve({ @@ -162,7 +99,6 @@ describe('PhasesTxValidator', () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 3 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); patchNonRevertibleFn(tx, 1, { address: allowedContract, selector: allowedSetupSelector2 }); - patchNonRevertibleFn(tx, 2, { address: allowedContract, selector: allowedTeardownSelector }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[tx], []]); }); @@ -170,8 +106,6 @@ describe('PhasesTxValidator', () => { it('rejects if one setup functions is not on the allow list', async () => { const tx = mockTx(1, { numberOfNonRevertiblePublicCallRequests: 3 }); patchNonRevertibleFn(tx, 0, { address: allowedContract, selector: allowedSetupSelector1 }); - // don't patch index 1 - patchNonRevertibleFn(tx, 2, { address: allowedContract, selector: allowedTeardownSelector }); await expect(txValidator.validateTxs([tx])).resolves.toEqual([[], [tx]]); }); diff --git a/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts b/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts index 0c795a22a32..da118a5d976 100644 --- a/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts +++ b/yarn-project/sequencer-client/src/tx_validator/phases_validator.ts @@ -7,11 +7,7 @@ import { type ContractDataSource } from '@aztec/types/contracts'; export class PhasesTxValidator implements TxValidator { #log = createDebugLogger('aztec:sequencer:tx_validator:tx_phases'); - constructor( - private contractDataSource: ContractDataSource, - private setupAllowList: AllowedFunction[], - private teardownAllowList: AllowedFunction[], - ) {} + constructor(private contractDataSource: ContractDataSource, private setupAllowList: AllowedFunction[]) {} async validateTxs(txs: Tx[]): Promise<[validTxs: Tx[], invalidTxs: Tx[]]> { const validTxs: Tx[] = []; @@ -34,8 +30,7 @@ export class PhasesTxValidator implements TxValidator { return true; } - const { [PublicKernelPhase.SETUP]: setupFns, [PublicKernelPhase.TEARDOWN]: teardownFns } = - AbstractPhaseManager.extractEnqueuedPublicCallsByPhase(tx.data, tx.enqueuedPublicFunctionCalls); + const { [PublicKernelPhase.SETUP]: setupFns } = AbstractPhaseManager.extractEnqueuedPublicCallsByPhase(tx); for (const setupFn of setupFns) { if (!(await this.isOnAllowList(setupFn, this.setupAllowList))) { @@ -49,22 +44,14 @@ export class PhasesTxValidator implements TxValidator { } } - for (const teardownFn of teardownFns) { - if (!(await this.isOnAllowList(teardownFn, this.teardownAllowList))) { - this.#log.warn( - `Rejecting tx ${Tx.getHash(tx)} because it calls teardown function not on allowlist: ${ - teardownFn.contractAddress - }:${teardownFn.functionData.selector}`, - ); - - return false; - } - } - return true; } async isOnAllowList(publicCall: PublicCallRequest, allowList: AllowedFunction[]): Promise { + if (publicCall.isEmpty()) { + return true; + } + const { contractAddress, functionData: { selector }, diff --git a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts index 2f881e27f24..cdc1b7130c1 100644 --- a/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts +++ b/yarn-project/sequencer-client/src/tx_validator/tx_validator_factory.ts @@ -18,15 +18,11 @@ export class TxValidatorFactory { private gasPortalAddress: EthAddress, ) {} - validatorForNewTxs( - globalVariables: GlobalVariables, - setupAllowList: AllowedFunction[], - teardownAllowList: AllowedFunction[], - ): TxValidator { + validatorForNewTxs(globalVariables: GlobalVariables, setupAllowList: AllowedFunction[]): TxValidator { return new AggregateTxValidator( new MetadataTxValidator(globalVariables), new DoubleSpendTxValidator(new WorldStateDB(this.merkleTreeDb)), - new PhasesTxValidator(this.contractDataSource, setupAllowList, teardownAllowList), + new PhasesTxValidator(this.contractDataSource, setupAllowList), new GasTxValidator(new WorldStatePublicDB(this.merkleTreeDb), getCanonicalGasTokenAddress(this.gasPortalAddress)), ); } diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index 41c027ba411..415f8c3e84e 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -438,6 +438,25 @@ export class Oracle { return toAcvmEnqueuePublicFunctionResult(enqueuedRequest); } + async setPublicTeardownFunctionCall( + [contractAddress]: ACVMField[], + [functionSelector]: ACVMField[], + [argsHash]: ACVMField[], + [sideEffectCounter]: ACVMField[], + [isStaticCall]: ACVMField[], + [isDelegateCall]: ACVMField[], + ) { + const teardownRequest = await this.typedOracle.setPublicTeardownFunctionCall( + AztecAddress.fromString(contractAddress), + FunctionSelector.fromField(fromACVMField(functionSelector)), + fromACVMField(argsHash), + frToNumber(fromACVMField(sideEffectCounter)), + frToBoolean(fromACVMField(isStaticCall)), + frToBoolean(fromACVMField(isDelegateCall)), + ); + return toAcvmEnqueuePublicFunctionResult(teardownRequest); + } + aes128Encrypt(input: ACVMField[], initializationVector: ACVMField[], key: ACVMField[]): ACVMField[] { // Convert each field to a number and then to a buffer (1 byte is stored in 1 field) const processedInput = Buffer.from(input.map(fromACVMField).map(f => f.toNumber())); diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index a4ce826b13a..171ccb4d757 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -243,6 +243,17 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('enqueuePublicFunctionCall'); } + setPublicTeardownFunctionCall( + _targetContractAddress: AztecAddress, + _functionSelector: FunctionSelector, + _argsHash: Fr, + _sideEffectCounter: number, + _isStaticCall: boolean, + _isDelegateCall: boolean, + ): Promise { + throw new OracleMethodNotAvailableError('setPublicTeardownFunctionCall'); + } + aes128Encrypt(_input: Buffer, _initializationVector: Buffer, _key: Buffer): Buffer { throw new OracleMethodNotAvailableError('encrypt'); } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index ca18abe1c32..d07b7087300 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -63,6 +63,7 @@ export class ClientExecutionContext extends ViewDataOracle { private unencryptedLogs: CountedLog[] = []; private nestedExecutions: ExecutionResult[] = []; private enqueuedPublicFunctionCalls: PublicCallRequest[] = []; + private publicTeardownFunctionCall: PublicCallRequest = PublicCallRequest.empty(); constructor( contractAddress: AztecAddress, @@ -173,6 +174,13 @@ export class ClientExecutionContext extends ViewDataOracle { return this.enqueuedPublicFunctionCalls; } + /** + * Return the public teardown function call set during this execution. + */ + public getPublicTeardownFunctionCall() { + return this.publicTeardownFunctionCall; + } + /** * Pack the given array of arguments. * @param args - Arguments to pack @@ -465,9 +473,7 @@ export class ClientExecutionContext extends ViewDataOracle { } /** - * Creates a PublicCallStackItem object representing the request to call a public function. No function - * is actually called, since that must happen on the sequencer side. All the fields related to the result - * of the execution are empty. + * Creates a PublicCallStackItem object representing the request to call a public function. * @param targetContractAddress - The address of the contract to call. * @param functionSelector - The function selector of the function to call. * @param argsHash - The packed arguments to pass to the function. @@ -475,7 +481,8 @@ export class ClientExecutionContext extends ViewDataOracle { * @param isStaticCall - Whether the call is a static call. * @returns The public call stack item with the request information. */ - public override async enqueuePublicFunctionCall( + protected async createPublicCallRequest( + callType: 'enqueued' | 'teardown', targetContractAddress: AztecAddress, functionSelector: FunctionSelector, argsHash: Fr, @@ -494,20 +501,51 @@ export class ClientExecutionContext extends ViewDataOracle { isStaticCall, ); const args = this.packedValuesCache.unpack(argsHash); - const enqueuedRequest = PublicCallRequest.from({ + + // TODO($846): if enqueued public calls are associated with global + // side-effect counter, that will leak info about how many other private + // side-effects occurred in the TX. Ultimately the private kernel should + // just output everything in the proper order without any counters. + this.log.verbose( + `Created PublicCallRequest of type [${callType}], side-effect counter [${sideEffectCounter}] to ${targetContractAddress}:${functionSelector}(${targetArtifact.name})`, + ); + + return PublicCallRequest.from({ args, callContext: derivedCallContext, parentCallContext: this.callContext, functionData: FunctionData.fromAbi(targetArtifact), contractAddress: targetContractAddress, }); + } - // TODO($846): if enqueued public calls are associated with global - // side-effect counter, that will leak info about how many other private - // side-effects occurred in the TX. Ultimately the private kernel should - // just output everything in the proper order without any counters. - this.log.verbose( - `Enqueued call to public function (with side-effect counter #${sideEffectCounter}) ${targetContractAddress}:${functionSelector}(${targetArtifact.name})`, + /** + * Creates and enqueues a PublicCallStackItem object representing the request to call a public function. No function + * is actually called, since that must happen on the sequencer side. All the fields related to the result + * of the execution are empty. + * @param targetContractAddress - The address of the contract to call. + * @param functionSelector - The function selector of the function to call. + * @param argsHash - The packed arguments to pass to the function. + * @param sideEffectCounter - The side effect counter at the start of the call. + * @param isStaticCall - Whether the call is a static call. + * @returns The public call stack item with the request information. + */ + public override async enqueuePublicFunctionCall( + targetContractAddress: AztecAddress, + functionSelector: FunctionSelector, + argsHash: Fr, + sideEffectCounter: number, + isStaticCall: boolean, + isDelegateCall: boolean, + ): Promise { + const enqueuedRequest = await this.createPublicCallRequest( + 'enqueued', + targetContractAddress, + functionSelector, + argsHash, + sideEffectCounter, + isStaticCall, + isDelegateCall, ); this.enqueuedPublicFunctionCalls.push(enqueuedRequest); @@ -515,6 +553,40 @@ export class ClientExecutionContext extends ViewDataOracle { return enqueuedRequest; } + /** + * Creates a PublicCallStackItem and sets it as the public teardown function. No function + * is actually called, since that must happen on the sequencer side. All the fields related to the result + * of the execution are empty. + * @param targetContractAddress - The address of the contract to call. + * @param functionSelector - The function selector of the function to call. + * @param argsHash - The packed arguments to pass to the function. + * @param sideEffectCounter - The side effect counter at the start of the call. + * @param isStaticCall - Whether the call is a static call. + * @returns The public call stack item with the request information. + */ + public override async setPublicTeardownFunctionCall( + targetContractAddress: AztecAddress, + functionSelector: FunctionSelector, + argsHash: Fr, + sideEffectCounter: number, + isStaticCall: boolean, + isDelegateCall: boolean, + ): Promise { + const publicTeardownFunctionCall = await this.createPublicCallRequest( + 'teardown', + targetContractAddress, + functionSelector, + argsHash, + sideEffectCounter, + isStaticCall, + isDelegateCall, + ); + + this.publicTeardownFunctionCall = publicTeardownFunctionCall; + + return publicTeardownFunctionCall; + } + /** * Derives the call context for a nested execution. * @param targetContractAddress - The address of the contract being called. diff --git a/yarn-project/simulator/src/client/execution_result.test.ts b/yarn-project/simulator/src/client/execution_result.test.ts index bb26e24f05e..0da6182478d 100644 --- a/yarn-project/simulator/src/client/execution_result.test.ts +++ b/yarn-project/simulator/src/client/execution_result.test.ts @@ -1,4 +1,4 @@ -import { PrivateCallStackItem } from '@aztec/circuits.js'; +import { PrivateCallStackItem, PublicCallRequest } from '@aztec/circuits.js'; import { type ExecutionResult, @@ -18,6 +18,7 @@ function emptyExecutionResult(): ExecutionResult { returnValues: [], nestedExecutions: [], enqueuedPublicFunctionCalls: [], + publicTeardownFunctionCall: PublicCallRequest.empty(), encryptedLogs: [], unencryptedLogs: [], }; diff --git a/yarn-project/simulator/src/client/execution_result.ts b/yarn-project/simulator/src/client/execution_result.ts index 673355c9b63..a80b7713cd2 100644 --- a/yarn-project/simulator/src/client/execution_result.ts +++ b/yarn-project/simulator/src/client/execution_result.ts @@ -5,7 +5,7 @@ import { UnencryptedFunctionL2Logs, type UnencryptedL2Log, } from '@aztec/circuit-types'; -import { type IsEmpty, type PrivateCallStackItem, type PublicCallRequest, sortByCounter } from '@aztec/circuits.js'; +import { type IsEmpty, type PrivateCallStackItem, PublicCallRequest, sortByCounter } from '@aztec/circuits.js'; import { type Fr } from '@aztec/foundation/fields'; import { type ACVMField } from '../acvm/index.js'; @@ -56,6 +56,8 @@ export interface ExecutionResult { nestedExecutions: this[]; /** Enqueued public function execution requests to be picked up by the sequencer. */ enqueuedPublicFunctionCalls: PublicCallRequest[]; + /** Public function execution requested for teardown */ + publicTeardownFunctionCall: PublicCallRequest; /** * Encrypted logs emitted during execution of this function call. * Note: These are preimages to `encryptedLogsHashes`. @@ -130,6 +132,23 @@ export function collectEnqueuedPublicFunctionCalls(execResult: ExecutionResult): // as the kernel processes it like a stack, popping items off and pushing them to output return [ ...execResult.enqueuedPublicFunctionCalls, - ...[...execResult.nestedExecutions].flatMap(collectEnqueuedPublicFunctionCalls), + ...execResult.nestedExecutions.flatMap(collectEnqueuedPublicFunctionCalls), ].sort((a, b) => b.callContext.sideEffectCounter - a.callContext.sideEffectCounter); } + +export function collectPublicTeardownFunctionCall(execResult: ExecutionResult): PublicCallRequest { + const teardownCalls = [ + execResult.publicTeardownFunctionCall, + ...execResult.nestedExecutions.flatMap(collectPublicTeardownFunctionCall), + ].filter(call => !call.isEmpty()); + + if (teardownCalls.length === 1) { + return teardownCalls[0]; + } + + if (teardownCalls.length > 1) { + throw new Error('Multiple public teardown calls detected'); + } + + return PublicCallRequest.empty(); +} diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 2226848258b..1037f15109e 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -853,6 +853,17 @@ describe('Private Execution test suite', () => { }); }); + describe('setting teardown function', () => { + it('should be able to set a teardown function', async () => { + const entrypoint = getFunctionArtifact(TestContractArtifact, 'test_setting_teardown'); + const teardown = getFunctionArtifact(TestContractArtifact, 'dummy_public_call'); + oracle.getFunctionArtifact.mockImplementation(() => Promise.resolve({ ...teardown })); + const result = await runSimulator({ artifact: entrypoint }); + expect(result.publicTeardownFunctionCall.isEmpty()).toBeFalsy(); + expect(result.publicTeardownFunctionCall.functionData).toEqual(FunctionData.fromAbi(teardown)); + }); + }); + describe('pending note hashes contract', () => { beforeEach(() => { oracle.getCompleteAddress.mockImplementation((address: AztecAddress) => { diff --git a/yarn-project/simulator/src/client/private_execution.ts b/yarn-project/simulator/src/client/private_execution.ts index b787aa24544..abab2f2f46f 100644 --- a/yarn-project/simulator/src/client/private_execution.ts +++ b/yarn-project/simulator/src/client/private_execution.ts @@ -54,6 +54,7 @@ export async function executePrivateFunction( const nullifiedNoteHashCounters = context.getNullifiedNoteHashCounters(); const nestedExecutions = context.getNestedExecutions(); const enqueuedPublicFunctionCalls = context.getEnqueuedPublicFunctionCalls(); + const publicTeardownFunctionCall = context.getPublicTeardownFunctionCall(); log.debug(`Returning from call to ${contractAddress.toString()}:${functionSelector}`); @@ -68,6 +69,7 @@ export async function executePrivateFunction( vk: Buffer.from(artifact.verificationKey!, 'hex'), nestedExecutions, enqueuedPublicFunctionCalls, + publicTeardownFunctionCall, encryptedLogs, unencryptedLogs, }; diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index 28d5b40ba9c..b39468eb94a 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -32,7 +32,6 @@ import { MembershipWitness, NoteHash, Nullifier, - type PrivateKernelTailCircuitPublicInputs, type Proof, PublicCallData, type PublicCallRequest, @@ -146,11 +145,8 @@ export abstract class AbstractPhaseManager { gasUsed: Gas | undefined; }>; - public static extractEnqueuedPublicCallsByPhase( - publicInputs: PrivateKernelTailCircuitPublicInputs, - enqueuedPublicFunctionCalls: PublicCallRequest[], - ): Record { - const data = publicInputs.forPublic; + public static extractEnqueuedPublicCallsByPhase(tx: Tx): Record { + const data = tx.data.forPublic; if (!data) { return { [PublicKernelPhase.SETUP]: [], @@ -159,7 +155,7 @@ export abstract class AbstractPhaseManager { [PublicKernelPhase.TAIL]: [], }; } - const publicCallsStack = enqueuedPublicFunctionCalls.slice().reverse(); + const publicCallsStack = tx.enqueuedPublicFunctionCalls.slice().reverse(); const nonRevertibleCallStack = data.endNonRevertibleData.publicCallStack.filter(i => !i.isEmpty()); const revertibleCallStack = data.end.publicCallStack.filter(i => !i.isEmpty()); @@ -186,35 +182,35 @@ export abstract class AbstractPhaseManager { c => revertibleCallStack.findIndex(p => p.equals(c)) !== -1, ); + const teardownCallStack = tx.publicTeardownFunctionCall.isEmpty() ? [] : [tx.publicTeardownFunctionCall]; + if (firstRevertibleCallIndex === 0) { return { [PublicKernelPhase.SETUP]: [], [PublicKernelPhase.APP_LOGIC]: publicCallsStack, - [PublicKernelPhase.TEARDOWN]: [], + [PublicKernelPhase.TEARDOWN]: teardownCallStack, [PublicKernelPhase.TAIL]: [], }; } else if (firstRevertibleCallIndex === -1) { // there's no app logic, split the functions between setup (many) and teardown (just one function call) return { - [PublicKernelPhase.SETUP]: publicCallsStack.slice(0, -1), + [PublicKernelPhase.SETUP]: publicCallsStack, [PublicKernelPhase.APP_LOGIC]: [], - [PublicKernelPhase.TEARDOWN]: [publicCallsStack[publicCallsStack.length - 1]], + [PublicKernelPhase.TEARDOWN]: teardownCallStack, [PublicKernelPhase.TAIL]: [], }; } else { return { - [PublicKernelPhase.SETUP]: publicCallsStack.slice(0, firstRevertibleCallIndex - 1), + [PublicKernelPhase.SETUP]: publicCallsStack.slice(0, firstRevertibleCallIndex), [PublicKernelPhase.APP_LOGIC]: publicCallsStack.slice(firstRevertibleCallIndex), - [PublicKernelPhase.TEARDOWN]: [publicCallsStack[firstRevertibleCallIndex - 1]], + [PublicKernelPhase.TEARDOWN]: teardownCallStack, [PublicKernelPhase.TAIL]: [], }; } } protected extractEnqueuedPublicCalls(tx: Tx): PublicCallRequest[] { - const calls = AbstractPhaseManager.extractEnqueuedPublicCallsByPhase(tx.data, tx.enqueuedPublicFunctionCalls)[ - this.phase - ]; + const calls = AbstractPhaseManager.extractEnqueuedPublicCallsByPhase(tx)[this.phase]; return calls; } diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index d86a4d1ff73..a8826c1a041 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -21,7 +21,7 @@ import { PUBLIC_DATA_TREE_HEIGHT, PartialStateReference, type Proof, - type PublicCallRequest, + PublicCallRequest, PublicDataTreeLeafPreimage, StateReference, makeEmptyProof, @@ -151,11 +151,13 @@ describe('public_processor', () => { numberOfNonRevertiblePublicCallRequests = 0, numberOfRevertiblePublicCallRequests = 0, publicCallRequests = [], + publicTeardownCallRequest = PublicCallRequest.empty(), }: { hasLogs?: boolean; numberOfNonRevertiblePublicCallRequests?: number; numberOfRevertiblePublicCallRequests?: number; publicCallRequests?: PublicCallRequest[]; + publicTeardownCallRequest?: PublicCallRequest; } = {}, seed = 1, ) => { @@ -164,6 +166,7 @@ describe('public_processor', () => { numberOfNonRevertiblePublicCallRequests, numberOfRevertiblePublicCallRequests, publicCallRequests, + publicTeardownCallRequest, }); }; @@ -219,6 +222,7 @@ describe('public_processor', () => { it('runs a tx with enqueued public calls', async function () { const tx = mockTxWithPartialState({ numberOfRevertiblePublicCallRequests: 2, + publicTeardownCallRequest: PublicCallRequest.empty(), }); publicExecutor.simulate.mockImplementation(execution => { @@ -344,11 +348,13 @@ describe('public_processor', () => { publicCallRequests[0].callContext.sideEffectCounter = 2; publicCallRequests[1].callContext.sideEffectCounter = 3; publicCallRequests[2].callContext.sideEffectCounter = 4; + const teardown = publicCallRequests.pop()!; // Remove the last call request to test that the processor can handle this const tx = mockTxWithPartialState({ - numberOfNonRevertiblePublicCallRequests: 2, + numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, + publicTeardownCallRequest: teardown, }); const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); @@ -370,7 +376,7 @@ describe('public_processor', () => { // App Logic PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[2], + request: publicCallRequests[1], nestedExecutions: [ PublicExecutionResultBuilder.fromFunctionCall({ from: publicCallRequests[1].contractAddress, @@ -390,10 +396,10 @@ describe('public_processor', () => { // Teardown PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[1], + request: teardown, nestedExecutions: [ PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: teardown.contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 12, baseContractAddress), @@ -455,11 +461,13 @@ describe('public_processor', () => { publicCallRequests[0].callContext.sideEffectCounter = 2; publicCallRequests[1].callContext.sideEffectCounter = 3; publicCallRequests[2].callContext.sideEffectCounter = 4; + const teardown = publicCallRequests.pop()!; const tx = mockTxWithPartialState({ - numberOfNonRevertiblePublicCallRequests: 2, + numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, + publicTeardownCallRequest: teardown, }); const contractSlotA = fr(0x100); @@ -498,10 +506,10 @@ describe('public_processor', () => { // Teardown PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[1], + request: teardown, nestedExecutions: [ PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: teardown.contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 14, baseContractAddress), @@ -553,11 +561,13 @@ describe('public_processor', () => { publicCallRequests[0].callContext.sideEffectCounter = 2; publicCallRequests[1].callContext.sideEffectCounter = 3; publicCallRequests[2].callContext.sideEffectCounter = 4; + const teardown = publicCallRequests.pop()!; const tx = mockTxWithPartialState({ - numberOfNonRevertiblePublicCallRequests: 2, + numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, + publicTeardownCallRequest: teardown, }); const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); @@ -577,7 +587,7 @@ describe('public_processor', () => { ], nestedExecutions: [ PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: publicCallRequests[0].contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 12, baseContractAddress), @@ -589,20 +599,20 @@ describe('public_processor', () => { // App Logic PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[2], + request: publicCallRequests[1], }).build(), // Teardown PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[1], + request: teardown, nestedExecutions: [ PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: teardown.contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), revertReason: new SimulationError('Simulation Failed', []), }).build(teardownResultSettings), PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: teardown.contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 14, baseContractAddress), @@ -653,11 +663,13 @@ describe('public_processor', () => { publicCallRequests[0].callContext.sideEffectCounter = 2; publicCallRequests[1].callContext.sideEffectCounter = 3; publicCallRequests[2].callContext.sideEffectCounter = 4; + const teardown = publicCallRequests.pop(); // Remove the last call request to test that the processor can handle this const tx = mockTxWithPartialState({ - numberOfNonRevertiblePublicCallRequests: 2, + numberOfNonRevertiblePublicCallRequests: 1, numberOfRevertiblePublicCallRequests: 1, publicCallRequests, + publicTeardownCallRequest: teardown, }); const gasLimits = Gas.from({ l2Gas: 1e9, daGas: 1e9 }); @@ -704,7 +716,7 @@ describe('public_processor', () => { // App Logic PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[2], + request: publicCallRequests[1], contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 14, baseContractAddress), new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 15, baseContractAddress), @@ -716,10 +728,10 @@ describe('public_processor', () => { // Teardown PublicExecutionResultBuilder.fromPublicCallRequest({ - request: publicCallRequests[1], + request: teardown!, nestedExecutions: [ PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: teardown!.contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11, baseContractAddress), @@ -727,7 +739,7 @@ describe('public_processor', () => { ], }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), PublicExecutionResultBuilder.fromFunctionCall({ - from: publicCallRequests[1].contractAddress, + from: teardown!.contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 13, baseContractAddress), From 8db42b240f287e2789b3ca171e7e4c7f645a0136 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 10 May 2024 02:14:07 +0000 Subject: [PATCH 34/43] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "a49263378" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "a49263378" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 3b68178f091..7abac1b3fd9 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = a0f30c4760a4fe7db9680377d97cd7a75b048fdb - parent = b2c019b6b11c3aaa98d8bbb79b77b42a5f87f0d0 + commit = a4926337861c17293b637e0a17ee7d6688a19c96 + parent = 553078c5a21159b5c4db0fd5d76a5dae41d94e6a method = merge cmdver = 0.4.6 From 65327254f4e95ca41634d9d86c206cfc777668bf Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 10 May 2024 02:14:37 +0000 Subject: [PATCH 35/43] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..5e2e608edad 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 13404b37324..7f343e48f74 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 5c87e26c4777f226f2c984e0d0a4528c0405611b Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 10 May 2024 02:14:37 +0000 Subject: [PATCH 36/43] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 5aa17568bc3..231ab5a58ef 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = 440d97fb931948aa90fcd6a1ee0206abdc468745 method = merge cmdver = 0.4.6 - parent = 7a81f4568348ceee1dde52ec2c93c5245420f880 + parent = 884116010808bb9243e1d95496443377c0476aa8 From 7fbd16858a11c456f6999185af6fcd7a3d6aadd8 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 10 May 2024 02:14:43 +0000 Subject: [PATCH 37/43] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "2a30e4732" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "2a30e4732" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 231ab5a58ef..8234e836da6 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 440d97fb931948aa90fcd6a1ee0206abdc468745 + commit = 2a30e473213b6832fbd06cba0678555f8287b663 method = merge cmdver = 0.4.6 - parent = 884116010808bb9243e1d95496443377c0476aa8 + parent = a389aa2eeb836ab63b7ea5a3cbec99b7563e978e diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 5e2e608edad..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 7f343e48f74..13404b37324 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.38.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From 63e87881b2dbf0dc5f3359297854f0eab32efb0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pedro=20Sousa?= Date: Fri, 10 May 2024 11:28:34 +0100 Subject: [PATCH 38/43] feat: replacing mentions to aztec-starter with codespace methods (#6177) Refactoring the quickstart page with the new install methods. Removing references to aztec-starter, as that repo will likely be deprecated. Closes AztecProtocol/dev-rel#192 --------- Co-authored-by: Cat McGee Co-authored-by: James Zaki --- boxes/README.md | 31 +++----- boxes/boxes/react/README.md | 29 +++++++ boxes/boxes/vanilla/README.md | 30 +++++++ boxes/contract-only/README.md | 31 +++++++- boxes/package.json | 3 +- boxes/scripts/steps/sandbox/run.js | 6 +- .../contracts/testing_contracts/main.md | 2 +- .../developers/getting_started/quickstart.md | 79 +++++-------------- .../sandbox/references/sandbox-reference.md | 53 +++++++------ docs/docs/welcome.md | 2 - docs/docusaurus.config.js | 2 +- docs/package.json | 4 +- .../img/codespaces_badges/react_cta_badge.svg | 19 +++++ .../img/codespaces_badges/token_cta_badge.svg | 19 +++++ .../codespaces_badges/vanilla_cta_badge.svg | 19 +++++ 15 files changed, 212 insertions(+), 117 deletions(-) create mode 100644 docs/static/img/codespaces_badges/react_cta_badge.svg create mode 100644 docs/static/img/codespaces_badges/token_cta_badge.svg create mode 100644 docs/static/img/codespaces_badges/vanilla_cta_badge.svg diff --git a/boxes/README.md b/boxes/README.md index 67cf195e9ed..f7adc35ecb1 100644 --- a/boxes/README.md +++ b/boxes/README.md @@ -8,36 +8,27 @@ Aztec Boxes are the one-stop-shop for developing on Aztec. They often include a Boxes include the sandbox installation script and its start command. By choosing the appropriate box, you can get started working on Aztec in a minimal amount of time. -## Getting started +## Contributing -If you have [node](https://nodejs.org/en/download) installed, you can open a terminal in any folder and run: +Because of the CI/CD nature of the monorepo, every box is tested against every merge on master. This drastically reduces their maintenance cost. Thus, some scripting is needed to make sure the user gets a working repository after "unboxing". -`npx create-aztec-app` +Most of the logic is in the `bin.js` file, where `commander` commands stuff. The script does the following: -or +- Prompts the user for options and commands +- Inits some global variables such as a logger, a getter for the github repositories, the latest stable versions and tags, etc +- Prompts the user to choose the project and clone it. It then rewrites the `Nargo.toml` and `package.json` files to point to the repos instead of the local dependencies. +- Queries the local docker daemon for any existing sandbox images, prompting the user to install or update it if needed +- Asks the user if they want to run the sandbox right away -`npx create-aztec-app` - -The script will install the sandbox, run it, and clone the boilerplate you chose. You can pass some options: - -| Option | Description | -| --- | --- | -| -d, --debug | Displays some more information for debug reasons. | -| -gh, --github_token | You can pass a github_token in case you hit API rate limit | -| -v, --version | You can specify a semver version, or "MASTER" | -| -h, --help | Shows up this help menu | - - If at any time you encounter problems, refer to the guides at [docs.aztec.network](https://docs.aztec.network) for more information. ## Templates -Currently there are two boxes: +As noted above, every box is tested at every merge to master. Any breaking changes need to happen in every box, so we try to keep the number of templates strategically low. For that reason, we ask contributors to reach directly to the [devrel team](https://github.com/orgs/AztecProtocol/teams/devrel) before adding another template. + +Currently there are two "app" boxes and one "contract-only" box: - React - A React boilerplate with a minimal UI. - Vanilla JS and HTML - Some say if you get something working in vanilla JS and HTML, you can make it work on any framework. If you can't find the box you need, this could be a good starting point. - -And one contract-only box: - - Token - An example token contract on Aztec ## Support diff --git a/boxes/boxes/react/README.md b/boxes/boxes/react/README.md index 40fdeed5b6f..03f03cde8c0 100644 --- a/boxes/boxes/react/README.md +++ b/boxes/boxes/react/README.md @@ -2,6 +2,35 @@ This box is a one-stop-shop for Aztec that will deploy a minimal React page. You can use it as a boilerplate to start developing your own Aztec app in seconds! +## Getting Started + +The easiest way to start is with a Github Codespaces, which has a generous free tier. Just click on this button: + +[![One-Click React Starter](.devcontainer/assets/react_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Freact%2Fdevcontainer.json) + +## Using the `npx` command + +The above method just uses the `npx` command, AKA "unboxing the box". This is a CLI command to quickly start developing on your own machine. + +### Prerequisites + +- Node >v18 +- Docker + +### Usage + +Just open a terminal and write: + +```bash +npx create-aztec-app +``` + +It should ask you some questions about your project, install and run the Sandbox (local developer network). You can also start, stop, update, and do other things on the sandbox through this script. Just run: + +```bash +npx create-aztec-app sandbox --help +``` + ## More information Visit the [Aztec Docs](https://docs.aztec.network) for more information on how Aztec works, and the [Awesome Aztec Repository](https://github.com/AztecProtocol/awesome-aztec) for more cool projects, boilerplates and tooling. diff --git a/boxes/boxes/vanilla/README.md b/boxes/boxes/vanilla/README.md index 92b9db74c58..8190eb5d4cd 100644 --- a/boxes/boxes/vanilla/README.md +++ b/boxes/boxes/vanilla/README.md @@ -2,6 +2,36 @@ This box is a one-stop-shop for Aztec that will deploy a minimal barebones HTML+JS page. You can use it as a boilerplate to start developing your own Aztec app in seconds! + +## Getting Started + +The easiest way to start is with a Github Codespaces, which has a generous free tier. Just click on this button: + +[![One-Click HTML/TS Starter](.devcontainer/assets/vanilla_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Fvanilla%2Fdevcontainer.json) + +## Using the `npx` command + +The above method just uses the `npx` command, AKA "unboxing the box". This is a CLI command to quickly start developing on your own machine. + +### Prerequisites + +- Node >v18 +- Docker + +### Usage + +Just open a terminal and write: + +```bash +npx create-aztec-app +``` + +It should ask you some questions about your project, install and run the Sandbox (local developer network). You can also start, stop, update, and do other things on the sandbox through this script. Just run: + +```bash +npx create-aztec-app sandbox --help +``` + ## More information Visit the [Aztec Docs](https://docs.aztec.network) for more information on how Aztec works, and the [Awesome Aztec Repository](https://github.com/AztecProtocol/awesome-aztec) for more cool projects, boilerplates and tooling. diff --git a/boxes/contract-only/README.md b/boxes/contract-only/README.md index 9ab9f68660d..452b7a4b213 100644 --- a/boxes/contract-only/README.md +++ b/boxes/contract-only/README.md @@ -2,7 +2,36 @@ This box is a one-stop-shop for Aztec with the %%contract_name%% example contract. You can use it as a boilerplate to start developing your own Aztec app in seconds! -## How to start +## Getting Started + +The easiest way to start is with a Github Codespaces, which has a generous free tier. Just click on this button: + +[![One-Click Token Starter](.devcontainer/assets/token_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Ftoken%2Fdevcontainer.json) + +## Using the `npx` command + +The above method just uses the `npx` command, AKA "unboxing the box". This is a CLI command to quickly start developing on your own machine. + +### Prerequisites + +- Node >v18 +- Docker + +### Usage + +Just open a terminal and write: + +```bash +npx create-aztec-app +``` + +It should ask you some questions about your project, install and run the Sandbox (local developer network). You can also start, stop, update, and do other things on the sandbox through this script. Just run: + +```bash +npx create-aztec-app sandbox --help +``` + +## What's in the box The script copied one of the example contracts and put it into a one-size-fits-all "box". With it, you can run commands such as: diff --git a/boxes/package.json b/boxes/package.json index c0c3cda556f..ad941bc6c07 100644 --- a/boxes/package.json +++ b/boxes/package.json @@ -1,9 +1,8 @@ { "name": "create-aztec-app", "packageManager": "yarn@4.0.2", - "version": "0.4.2", + "version": "0.4.4", "type": "module", - "private": true, "scripts": { "compile": "yarn workspaces foreach -A -v run compile", "build": "yarn workspaces foreach -A -v run build", diff --git a/boxes/scripts/steps/sandbox/run.js b/boxes/scripts/steps/sandbox/run.js index 77238e289b7..65206dd785a 100644 --- a/boxes/scripts/steps/sandbox/run.js +++ b/boxes/scripts/steps/sandbox/run.js @@ -4,8 +4,8 @@ import axios from "axios"; const sandbox = (command) => execSync( - `docker-compose -f $HOME/.aztec/docker-compose.yml -p sandbox ${command}`, - { stdio: "inherit" }, + `docker compose -f $HOME/.aztec/docker-compose.yml -p sandbox ${command}`, + { stdio: "inherit" } ); export const start = () => sandbox("up -d"); @@ -29,7 +29,7 @@ export async function sandboxRunStep() { Accept: "*/*", "Content-Type": "application/json", }, - }, + } ); spinner.succeed(); success("The Sandbox is already running!"); diff --git a/docs/docs/developers/contracts/testing_contracts/main.md b/docs/docs/developers/contracts/testing_contracts/main.md index e0d217adbef..f00e567bc0a 100644 --- a/docs/docs/developers/contracts/testing_contracts/main.md +++ b/docs/docs/developers/contracts/testing_contracts/main.md @@ -10,4 +10,4 @@ To make testing easier, the sandbox is shipped with cheat codes to easily test i ## Examples -You can find example tests in the [aztec-starter](https://github.com/AztecProtocol/aztec-starter/tree/main) repo as well as the [Aztec Boxes](https://github.com/AztecProtocol/aztec-packages/tree/master/boxes). +You can find example tests in the [Aztec Boxes](https://github.com/AztecProtocol/aztec-packages/tree/master/boxes). You can also have a look at the [end-to-end tests](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/end-to-end). diff --git a/docs/docs/developers/getting_started/quickstart.md b/docs/docs/developers/getting_started/quickstart.md index a05f8abc675..8ad154578a9 100644 --- a/docs/docs/developers/getting_started/quickstart.md +++ b/docs/docs/developers/getting_started/quickstart.md @@ -2,82 +2,38 @@ title: Quickstart --- -In this guide, you will +The easiest way to start developing on Aztec is simply to click on one of these buttons: -1. Set up the Aztec sandbox (local development environment) locally -2. Install the Aztec development kit -3. Use Aztec.js to deploy an example contract that comes with the sandbox -4. Use Aztec.js to interact with the contract you just deployed +[![One-Click React Starter](/img/codespaces_badges/react_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Freact%2Fdevcontainer.json) [![One-Click HTML/TS Starter](/img/codespaces_badges/vanilla_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Fvanilla%2Fdevcontainer.json) [![One-Click Token Starter](/img/codespaces_badges/token_cta_badge.svg)](https://codespaces.new/AztecProtocol/aztec-packages?devcontainer_path=.devcontainer%2Ftoken%2Fdevcontainer.json) -... in less than 10 minutes. +That's it! -## Prerequisites +This creates a codespace with a prebuilt image containing one of the "Aztec Boxes" and a development network (sandbox). +- You can develop directly on the codespace, push it to a repo, make yourself at home. +- You can also just use the sandbox that comes with it. The URL will be logged, you just need to use it as your `PXE_URL`. -- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) - -## Install Docker - -Aztec tooling requires the Docker daemon to be running, and this is easily achieved via Docker Desktop. See [this page of the Docker docs](https://docs.docker.com/get-docker/) for instructions on how to install Docker Desktop for your operating system. -Note: if installing via Docker Desktop, you do NOT need to keep the application open at all times (just Docker daemon). - -Installing and running the Docker daemon can also be achieved by installing Docker Engine, see [these instructions](https://docs.docker.com/engine/install/). - -However installed, ensure Docker daemon is running. See [start Docker daemon](https://docs.docker.com/config/daemon/start/). - -### Note on Linux - -If you are running Linux, you will need to set the context (because Docker Desktop runs in a VM by default). See [this page](https://docs.docker.com/desktop/faqs/linuxfaqs/#what-is-the-difference-between-docker-desktop-for-linux-and-docker-engine) for more information. You can do this by running: - -```bash -docker context use default -``` - -## Install the Sandbox +## Develop Locally -You can run the Sandbox using Docker. +The above method uses Aztec boxes to install the sandbox and clone the repo. You can use it too to get started on your own machine and use your own IDE. -To install the latest Sandbox version, run: +You can also [install the sandbox manually](../sandbox/references/sandbox-reference.md). -```bash -bash -i <(curl -s install.aztec.network) -``` - -> If Docker has been installed on your linux server but you encounter the error "Docker is not running. Please start Docker and try again". If you're encountering this issue, it's likely because Docker is running with root user privileges. In such cases, consider [managing Docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user) to resolve the problem. +### Prerequisites +- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) +- Docker (visit [this page of the Docker docs](https://docs.docker.com/get-docker/) on how to install it) -This will install the following: - -- **aztec** - launches various infrastructure subsystems (sequencer, prover, pxe, etc). -- **aztec-nargo** - aztec's build of nargo, the noir compiler toolchain. -- **aztec-sandbox** - a wrapper around docker-compose that launches services needed for sandbox testing. -- **aztec-up** - a tool to upgrade the aztec toolchain to the latest, or specific versions. -- **aztec-builder** - A useful tool for projects to generate ABIs and update their dependencies. - +### Run the `npx` script -Once these have been installed, to start the sandbox, run: +With the node installation, you now should have `npm` and be able to run `npx` scripts. You can do that by running: ```bash -aztec-sandbox +npx create-aztec-app ``` -This will attempt to run the Sandbox on ` localhost:8080`, so you will have to make sure nothing else is running on that port or change the port defined in `./.aztec/docker-compose.yml`. Running the installation again will overwrite any changes made to the `docker-compose.yml`. +And follow the instructions. If all goes well, you should now have a development environment running locally on your machine. -**Congratulations, you have just installed and run the Aztec Sandbox!** - -```bash - /\ | | - / \ ___| |_ ___ ___ - / /\ \ |_ / __/ _ \/ __| - / ____ \ / /| || __/ (__ - /_/___ \_\/___|\__\___|\___| - -``` - -In the terminal, you will see some logs: -1. Sandbox version -2. Contract addresses of rollup contracts -3. PXE (private execution environment) setup logs -4. Initial accounts that are shipped with the sandbox and can be used in tests +You can run `npx create-aztec-app sandbox -h` to start, stop, update and output logs from the sandbox. ## What's next? @@ -85,3 +41,4 @@ To deploy a smart contract to your sandbox and interact with it using Aztec.js, To skip this and write your first smart contract, go to the [Aztec.nr getting started page](aztecnr-getting-started.md). + diff --git a/docs/docs/developers/sandbox/references/sandbox-reference.md b/docs/docs/developers/sandbox/references/sandbox-reference.md index 99e7850fb04..fc401a57336 100644 --- a/docs/docs/developers/sandbox/references/sandbox-reference.md +++ b/docs/docs/developers/sandbox/references/sandbox-reference.md @@ -2,19 +2,30 @@ title: Sandbox Reference --- -Here you will find a reference to everything available within the Sandbox. +:::tip -## Installation +For a quick start, follow the [guide](../../getting_started/quickstart.md) to install the sandbox. -You can run the Sandbox using Docker. See the [Quickstart](../../getting_started/quickstart.md#install-docker) for instructions on installing Docker. +::: -### With Docker +## Manual Install + +You can manually install the sandbox via the underlying script used in the [Aztec Boxes](../../getting_started/quickstart.md#run-the-npx-script). + +### Prerequisites + +- Node.js >= v18 (recommend installing with [nvm](https://github.com/nvm-sh/nvm)) +- Docker (visit [this page of the Docker docs](https://docs.docker.com/get-docker/) on how to install it) + +### Install the sandbox + +To install the latest Sandbox version, run: ```bash bash -i <(curl -s install.aztec.network) ``` -This will install the following: +This will install the following tools: - **aztec** - launches various infrastructure subsystems (sequencer, prover, pxe, etc). - **aztec-nargo** - aztec's build of nargo, the noir compiler toolchain. @@ -28,31 +39,25 @@ Once these have been installed, to start the sandbox, run: aztec-sandbox ``` -This will attempt to run the Sandbox with the PXE listening on ` localhost:8080`. You can change the port defined in `./.aztec/docker-compose.yml` or by setting the `PXE_PORT` environment variable. Running the install command again will overwrite any changes made to the `docker-compose.yml`. - -See the full list of configurable environment variables [here](#environment-variables). +### Have fun! -If you have previously installed the CLI via a node package manager, you will need to uninstall it and remove it from your project dependencies and install it via Docker. +**Congratulations, you have just installed and run the Aztec Sandbox!** -To install a specific version of the sandbox, you can set the environment variable `SANDBOX_VERSION` +```bash + /\ | | + / \ ___| |_ ___ ___ + / /\ \ |_ / __/ _ \/ __| + / ____ \ / /| || __/ (__ + /_/___ \_\/___|\__\___|\___| -```bash -VERSION= bash -i <(curl -s install.aztec.network) ``` -## Running - -Once the installed, you can run the sandbox with: +In the terminal, you will see some logs: +1. Sandbox version +2. Contract addresses of rollup contracts +3. PXE (private execution environment) setup logs +4. Initial accounts that are shipped with the sandbox and can be used in tests -```bash -aztec-sandbox -``` - -Alternatively, you can run like so: - -```bash -cd ~/.aztec && docker-compose up -``` ## Running Aztec PXE / Node / P2P-Bootstrap node diff --git a/docs/docs/welcome.md b/docs/docs/welcome.md index 6be66998211..c579ef9d130 100644 --- a/docs/docs/welcome.md +++ b/docs/docs/welcome.md @@ -23,6 +23,4 @@ Go to the [Getting Started section](./developers/getting_started/main.md) of the Check out the [Awesome Aztec repo](https://github.com/AztecProtocol/awesome-aztec) for a curated list of learning resources and tools to help you learn more about Aztec. -Clone the [Aztec Starter repo](https://github.com/AztecProtocol/aztec-starter) to get a minimal project set up with Sandbox (local developer network), a simple contract and a test suite. - Jump into one of the [tutorials](./developers/tutorials/main.md) to learn how to build more complex applications on Aztec. diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index b0a158adaf4..ddfc137fc7c 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -43,7 +43,7 @@ const config = { /** @type {import('@docusaurus/preset-classic').Options} */ ({ docs: { - path: "processed-docs", + path: process.env.ENV === "dev" ? "docs" : "processed-docs", sidebarPath: require.resolve("./sidebars.js"), editUrl: (params) => { return ( diff --git a/docs/package.json b/docs/package.json index a6cd883a4e2..c4aaac29040 100644 --- a/docs/package.json +++ b/docs/package.json @@ -4,8 +4,8 @@ "private": true, "scripts": { "docusaurus": "docusaurus", - "start": "yarn preprocess && yarn typedoc && docusaurus start --host 0.0.0.0", - "start:dev": "yarn start", + "start": "yarn preprocess && yarn typedoc && docusaurus start --host 0.0.0.0 ", + "start:dev": "ENV=dev yarn start", "start:dev:local": "yarn preprocess && yarn typedoc && docusaurus start", "build": "./scripts/build.sh", "swizzle": "docusaurus swizzle", diff --git a/docs/static/img/codespaces_badges/react_cta_badge.svg b/docs/static/img/codespaces_badges/react_cta_badge.svg new file mode 100644 index 00000000000..c8c3d1738d4 --- /dev/null +++ b/docs/static/img/codespaces_badges/react_cta_badge.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/img/codespaces_badges/token_cta_badge.svg b/docs/static/img/codespaces_badges/token_cta_badge.svg new file mode 100644 index 00000000000..9d536be120b --- /dev/null +++ b/docs/static/img/codespaces_badges/token_cta_badge.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/img/codespaces_badges/vanilla_cta_badge.svg b/docs/static/img/codespaces_badges/vanilla_cta_badge.svg new file mode 100644 index 00000000000..a717e72561a --- /dev/null +++ b/docs/static/img/codespaces_badges/vanilla_cta_badge.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + From 98d32f112971e6cc96896ddd2c95500f61ba3e8d Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 10 May 2024 11:36:27 +0100 Subject: [PATCH 39/43] feat(p2p): GossibSub (#6170) Fixes #5055 --- cspell.json | 5 +- yarn-project/aztec/src/cli/cmds/start_node.ts | 5 + yarn-project/aztec/src/cli/texts.ts | 2 + yarn-project/aztec/src/cli/util.ts | 2 +- yarn-project/foundation/package.json | 1 + .../foundation/src/iterable/all.test.ts | 27 + yarn-project/foundation/src/iterable/all.ts | 32 + .../foundation/src/iterable/filter.test.ts | 93 ++ .../foundation/src/iterable/filter.ts | 77 + yarn-project/foundation/src/iterable/index.ts | 6 + .../foundation/src/iterable/isAsyncIt.ts | 8 + .../foundation/src/iterable/map.test.ts | 105 ++ yarn-project/foundation/src/iterable/map.ts | 66 + yarn-project/foundation/src/iterable/peek.ts | 58 + .../foundation/src/iterable/sort.test.ts | 32 + yarn-project/foundation/src/iterable/sort.ts | 39 + .../foundation/src/iterable/take.test.ts | 25 + yarn-project/foundation/src/iterable/take.ts | 54 + .../foundation/src/sleep/sleep.test.ts | 2 +- yarn-project/p2p/package.json | 37 +- yarn-project/p2p/src/bootstrap/bootstrap.ts | 2 + yarn-project/p2p/src/config.ts | 9 + .../p2p/src/service/data_store.test.ts | 672 ++++++++ yarn-project/p2p/src/service/data_store.ts | 235 +++ .../p2p/src/service/discV5_service.ts | 57 +- .../p2p/src/service/discv5_service.test.ts | 20 +- yarn-project/p2p/src/service/dummy_service.ts | 9 +- .../p2p/src/service/libp2p_service.ts | 324 ++-- yarn-project/p2p/src/service/peer_manager.ts | 26 + yarn-project/p2p/src/service/service.ts | 7 + .../p2p/src/service/tx_messages.test.ts | 42 +- yarn-project/p2p/src/service/tx_messages.ts | 111 +- yarn-project/yarn.lock | 1480 +++++++++-------- 33 files changed, 2609 insertions(+), 1061 deletions(-) create mode 100644 yarn-project/foundation/src/iterable/all.test.ts create mode 100644 yarn-project/foundation/src/iterable/all.ts create mode 100644 yarn-project/foundation/src/iterable/filter.test.ts create mode 100644 yarn-project/foundation/src/iterable/filter.ts create mode 100644 yarn-project/foundation/src/iterable/index.ts create mode 100644 yarn-project/foundation/src/iterable/isAsyncIt.ts create mode 100644 yarn-project/foundation/src/iterable/map.test.ts create mode 100644 yarn-project/foundation/src/iterable/map.ts create mode 100644 yarn-project/foundation/src/iterable/peek.ts create mode 100644 yarn-project/foundation/src/iterable/sort.test.ts create mode 100644 yarn-project/foundation/src/iterable/sort.ts create mode 100644 yarn-project/foundation/src/iterable/take.test.ts create mode 100644 yarn-project/foundation/src/iterable/take.ts create mode 100644 yarn-project/p2p/src/service/data_store.test.ts create mode 100644 yarn-project/p2p/src/service/data_store.ts create mode 100644 yarn-project/p2p/src/service/peer_manager.ts diff --git a/cspell.json b/cspell.json index 6e0ff296264..91fb22d3ee1 100644 --- a/cspell.json +++ b/cspell.json @@ -103,6 +103,7 @@ "fuzzers", "gitmodules", "gitrepo", + "gossipsub", "grumpkin", "gtest", "gzipped", @@ -132,6 +133,7 @@ "linkability", "lmdb", "maddiaa", + "mcache", "memdown", "memfs", "Merkle", @@ -171,6 +173,7 @@ "Palla", "parallelizable", "Pedersen", + "peekable", "permissionless", "permissionlessly", "persistable", @@ -296,4 +299,4 @@ "flagWords": [ "anonymous" ] -} +} \ No newline at end of file diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 35fb127f607..152f02433a1 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -71,6 +71,11 @@ export const startNode = async ( nodeConfig = mergeEnvVarsAndCliOptions(nodeConfig, parseModuleOptions(options.prover)); } + // ensure bootstrapNodes is an array + if (nodeConfig.bootstrapNodes && typeof nodeConfig.bootstrapNodes === 'string') { + nodeConfig.bootstrapNodes = (nodeConfig.bootstrapNodes as string).split(','); + } + if (!nodeConfig.disableSequencer && nodeConfig.disableProver) { throw new Error('Cannot run a sequencer without a prover'); } diff --git a/yarn-project/aztec/src/cli/texts.ts b/yarn-project/aztec/src/cli/texts.ts index e65ba2847b9..7d1edcb3970 100644 --- a/yarn-project/aztec/src/cli/texts.ts +++ b/yarn-project/aztec/src/cli/texts.ts @@ -56,6 +56,8 @@ export const cliTexts = { 'Starts a Sequencer with options. If started additionally to --node, the Sequencer will attach to that node.\n' + 'Available options are listed below as cliProperty:ENV_VARIABLE_NAME.\n' + 'rcpUrl:ETHEREUM_HOST - string - The host of the Ethereum node to connect to. Default: http://localhost:8545\n' + + 'minTxsPerBlock:SEQ_MIN_TXS_PER_BLOCK - number - The minimum number of transactions to include in a block. Default: 1\n' + + 'maxTxsPerBlock:SEQ_MAX_TXS_PER_BLOCK - number - The maximum number of transactions to include in a block. Default: 32\n' + 'apiKey:API_KEY - string - The key for the ethereum node if necessary.\n' + 'chainId:CHAIN_ID - number - The chain id of the ethereum host. Default: 31337\n' + 'version:VERSION - number - The version of the Aztec rollup. Default: 1\n' + diff --git a/yarn-project/aztec/src/cli/util.ts b/yarn-project/aztec/src/cli/util.ts index db16f546c7b..769e3b1aba1 100644 --- a/yarn-project/aztec/src/cli/util.ts +++ b/yarn-project/aztec/src/cli/util.ts @@ -59,7 +59,7 @@ export const parseModuleOptions = (options: string): Record => { if (!options?.length) { return {}; } - const optionsArray = options.split(','); + const optionsArray = options.split(/,(?=\w+=)/); return optionsArray.reduce((acc, option) => { const [key, value] = option.split('='); return { ...acc, [key]: value }; diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index a4b504dfb1e..5cb756cc20f 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -20,6 +20,7 @@ "./json-rpc": "./dest/json-rpc/index.js", "./json-rpc/server": "./dest/json-rpc/server/index.js", "./json-rpc/client": "./dest/json-rpc/client/index.js", + "./iterable": "./dest/iterable/index.js", "./log": "./dest/log/index.js", "./mutex": "./dest/mutex/index.js", "./fields": "./dest/fields/index.js", diff --git a/yarn-project/foundation/src/iterable/all.test.ts b/yarn-project/foundation/src/iterable/all.test.ts new file mode 100644 index 00000000000..c75be84c399 --- /dev/null +++ b/yarn-project/foundation/src/iterable/all.test.ts @@ -0,0 +1,27 @@ +import { all } from './index.js'; + +describe('all iterable', () => { + it('should collect all entries of an iterator as an array', () => { + const values = [0, 1, 2, 3, 4]; + + const res = all(values); + + expect(res).not.toHaveProperty('then'); + expect(res).toEqual(values); + }); + + it('should collect all entries of an async iterator as an array', async () => { + const values = [0, 1, 2, 3, 4]; + + const generator = (async function* (): AsyncGenerator { + yield* [0, 1, 2, 3, 4]; + })(); + + const p = all(generator); + expect(p).toHaveProperty('then'); + expect(p.then).toBeInstanceOf(Function); + + const res = await p; + expect(res).toEqual(values); + }); +}); diff --git a/yarn-project/foundation/src/iterable/all.ts b/yarn-project/foundation/src/iterable/all.ts new file mode 100644 index 00000000000..b1da1c6b697 --- /dev/null +++ b/yarn-project/foundation/src/iterable/all.ts @@ -0,0 +1,32 @@ +import { isAsyncIterable } from './isAsyncIt.js'; + +/** + * Collects all values from an (async) iterable and returns them as an array + * @param source - Iterable to collect all values from + * @returns All of the iterable's values as an array. + */ +function all(source: Iterable): T[]; +function all(source: Iterable | AsyncIterable): Promise; +function all(source: Iterable | AsyncIterable): Promise | T[] { + if (isAsyncIterable(source)) { + return (async () => { + const arr = []; + + for await (const entry of source) { + arr.push(entry); + } + + return arr; + })(); + } + + const arr = []; + + for (const entry of source) { + arr.push(entry); + } + + return arr; +} + +export { all }; diff --git a/yarn-project/foundation/src/iterable/filter.test.ts b/yarn-project/foundation/src/iterable/filter.test.ts new file mode 100644 index 00000000000..ecb446d602b --- /dev/null +++ b/yarn-project/foundation/src/iterable/filter.test.ts @@ -0,0 +1,93 @@ +import { all, filter } from './index.js'; + +function* values(vals: number[] = [0, 1, 2, 3, 4]): Generator { + yield* vals; +} + +async function* asyncValues(vals: number[] = [0, 1, 2, 3, 4]): AsyncGenerator { + yield* values(vals); +} + +describe('filter iterable', () => { + it('should filter all values greater than 2', () => { + const res = all(filter(values(), val => val > 2)); + + expect(res[Symbol.iterator]).toBeTruthy(); + expect(res).toEqual([3, 4]); + }); + + it('should filter all values less than 2', () => { + const res = all(filter(values(), val => val < 2)); + + expect(res[Symbol.iterator]).toBeTruthy(); + expect(res).toEqual([0, 1]); + }); + + it('should filter all values greater than 2 with a promise', () => { + const res = all(filter(values(), val => val > 2)); + + expect(res[Symbol.iterator]).toBeTruthy(); + expect(res).toEqual([3, 4]); + }); + + it('should filter all values greater than 2 with a promise', async () => { + // eslint-disable-next-line require-await + const res = filter(values(), async val => val > 2); + + expect(res[Symbol.asyncIterator]).toBeTruthy(); + await expect(all(res)).resolves.toEqual([3, 4]); + }); + + it('should filter all async values greater than 2', async () => { + const res = filter(asyncValues(), val => val > 2); + + expect(res[Symbol.asyncIterator]).toBeTruthy(); + await expect(all(res)).resolves.toEqual([3, 4]); + }); + + it('should filter all async values greater than 2 with a promise', async () => { + // eslint-disable-next-line require-await + const res = filter(asyncValues(), async val => val > 2); + + expect(res[Symbol.asyncIterator]).toBeTruthy(); + await expect(all(res)).resolves.toEqual([3, 4]); + }); + + it('should filter values with indexes', () => { + const vals = [4, 3, 2, 1, 0]; + const callbackArgs: any[] = []; + const gen = filter(values(vals), (...args: any[]) => { + callbackArgs.push(args); + return true; + }); + expect(gen[Symbol.iterator]).toBeTruthy(); + + const results = all(gen); + expect(results).toHaveLength(vals.length); + expect(callbackArgs).toHaveLength(vals.length); + + vals.forEach((value, index) => { + expect(callbackArgs[index][0]).toEqual(value); + expect(callbackArgs[index][1]).toEqual(index); + }); + }); + + it('should filter async values with indexes', async () => { + const vals = [4, 3, 2, 1, 0]; + const callbackArgs: any[] = []; + const gen = filter(asyncValues(vals), (...args: any[]) => { + callbackArgs.push(args); + return true; + }); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(vals.length); + expect(callbackArgs).toHaveLength(vals.length); + + vals.forEach((value, index) => { + expect(callbackArgs[index][0]).toEqual(value); + expect(callbackArgs[index][1]).toEqual(index); + }); + }); +}); diff --git a/yarn-project/foundation/src/iterable/filter.ts b/yarn-project/foundation/src/iterable/filter.ts new file mode 100644 index 00000000000..1fb14c2576e --- /dev/null +++ b/yarn-project/foundation/src/iterable/filter.ts @@ -0,0 +1,77 @@ +import { peek } from './peek.js'; + +function isAsyncIterable(thing: any): thing is AsyncIterable { + return thing[Symbol.asyncIterator] != null; +} + +/** + * Filters the passed (async) iterable by using the filter function. + * @param source - An iterable to filter. + * @returns A generator of the filtered values. + */ +function filter( + source: Iterable, + fn: (val: T, index: number) => Promise, +): AsyncGenerator; +function filter(source: Iterable, fn: (val: T, index: number) => boolean): Generator; +function filter( + source: Iterable | AsyncIterable, + fn: (val: T, index: number) => boolean | Promise, +): AsyncGenerator; +function filter( + source: Iterable | AsyncIterable, + fn: (val: T, index: number) => boolean | Promise, +): Generator | AsyncGenerator { + let index = 0; + + if (isAsyncIterable(source)) { + return (async function* () { + for await (const entry of source) { + if (await fn(entry, index++)) { + yield entry; + } + } + })(); + } + + // if mapping function returns a promise we have to return an async generator + const peekable = peek(source); + const { value, done } = peekable.next(); + + if (done === true) { + return (function* () {})(); + } + + const res = fn(value, index++); + + // @ts-expect-error .then is not present on O + if (typeof res.then === 'function') { + return (async function* () { + if (await res) { + yield value; + } + + for await (const entry of peekable) { + if (await fn(entry, index++)) { + yield entry; + } + } + })(); + } + + const func = fn as (val: T, index: number) => boolean; + + return (function* () { + if (res === true) { + yield value; + } + + for (const entry of peekable) { + if (func(entry, index++)) { + yield entry; + } + } + })(); +} + +export { filter }; diff --git a/yarn-project/foundation/src/iterable/index.ts b/yarn-project/foundation/src/iterable/index.ts new file mode 100644 index 00000000000..364baf20342 --- /dev/null +++ b/yarn-project/foundation/src/iterable/index.ts @@ -0,0 +1,6 @@ +export * from './map.js'; +export * from './filter.js'; +export * from './sort.js'; +export * from './take.js'; +export * from './all.js'; +export * from './peek.js'; diff --git a/yarn-project/foundation/src/iterable/isAsyncIt.ts b/yarn-project/foundation/src/iterable/isAsyncIt.ts new file mode 100644 index 00000000000..d92cbf63845 --- /dev/null +++ b/yarn-project/foundation/src/iterable/isAsyncIt.ts @@ -0,0 +1,8 @@ +/** + * Utility function to type check an AsyncIterable + * @param thing - Input to type check + * @returns Type-checked input + */ +export function isAsyncIterable(thing: any): thing is AsyncIterable { + return thing[Symbol.asyncIterator] != null; +} diff --git a/yarn-project/foundation/src/iterable/map.test.ts b/yarn-project/foundation/src/iterable/map.test.ts new file mode 100644 index 00000000000..d790bc61105 --- /dev/null +++ b/yarn-project/foundation/src/iterable/map.test.ts @@ -0,0 +1,105 @@ +import { all, map } from './index.js'; + +async function* asyncGenerator(vals: number[] = [1]): AsyncGenerator { + yield* vals; +} + +function* generator(vals: number[] = [1]): Generator { + yield* vals; +} + +async function* source( + vals: number[] = [1], +): Generator | AsyncGenerator { + yield* vals; +} + +describe('map iterable', () => { + it('should map an async generator', async () => { + const gen = map(asyncGenerator(), val => val + 1); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(1); + expect(results[0]).toEqual(2); + }); + + it('should map an async generator with indexes', async () => { + const vals = [4, 3, 2, 1, 0]; + const gen = map(asyncGenerator(vals), (...args: any[]) => args); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(vals.length); + + vals.forEach((value, index) => { + expect(results[index][0]).toEqual(value); + expect(results[index][1]).toEqual(index); + }); + }); + + it('should map an async generator to a promise', async () => { + const gen = map(asyncGenerator(), val => val + 1); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(1); + expect(results[0]).toEqual(2); + }); + + it('should map an iterator', () => { + const gen = map(generator(), val => val + 1); + expect(gen[Symbol.iterator]).toBeTruthy(); + + const results = all(gen); + expect(results).toHaveLength(1); + expect(results[0]).toEqual(2); + }); + + it('should map an iterator with indexes', () => { + const vals = [4, 3, 2, 1, 0]; + const gen = map(generator(vals), (...args: any[]) => args); + expect(gen[Symbol.iterator]).toBeTruthy(); + + const results = all(gen); + expect(results).toHaveLength(vals.length); + + vals.forEach((value, index) => { + expect(results[index][0]).toEqual(value); + expect(results[index][1]).toEqual(index); + }); + }); + + it('should map an iterator to a promise', async () => { + // eslint-disable-next-line require-await + const gen = map(generator(), async val => val + 1); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(1); + expect(results[0]).toEqual(2); + }); + + it('should map a source', async () => { + const gen = map(source(), val => val + 1); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(1); + expect(results[0]).toEqual(2); + }); + + it('should map a source with indexes', async () => { + const vals = [4, 3, 2, 1, 0]; + const gen = map(source(vals), (...args: any[]) => args); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const results = await all(gen); + expect(results).toHaveLength(vals.length); + + vals.forEach((value, index) => { + expect(results[index][0]).toEqual(value); + expect(results[index][1]).toEqual(index); + }); + }); +}); diff --git a/yarn-project/foundation/src/iterable/map.ts b/yarn-project/foundation/src/iterable/map.ts new file mode 100644 index 00000000000..76d83dbaf01 --- /dev/null +++ b/yarn-project/foundation/src/iterable/map.ts @@ -0,0 +1,66 @@ +import { isAsyncIterable } from './isAsyncIt.js'; +import { peek } from './peek.js'; + +/** + * Takes an (async) iterable and returns one with each item mapped by the passed + * function. + * @param source - The iterable to run the map function on. + * @param func - The function to run over the iterable's items. + * @returns A generator of the mapped items. + */ +function map( + source: Iterable, + func: (val: I, index: number) => Promise, +): AsyncGenerator; +function map(source: Iterable, func: (val: I, index: number) => O): Generator; +function map( + source: AsyncIterable | Iterable, + func: (val: I, index: number) => O | Promise, +): AsyncGenerator; +function map( + source: AsyncIterable | Iterable, + func: (val: I, index: number) => O | Promise, +): AsyncGenerator | Generator { + let index = 0; + + if (isAsyncIterable(source)) { + return (async function* () { + for await (const val of source) { + yield func(val, index++); + } + })(); + } + + // if mapping function returns a promise we have to return an async generator + const peekable = peek(source); + const { value, done } = peekable.next(); + + if (done === true) { + return (function* () {})(); + } + + const res = func(value, index++); + + // @ts-expect-error .then is not present on O + if (typeof res.then === 'function') { + return (async function* () { + yield await res; + + for await (const val of peekable) { + yield func(val, index++); + } + })(); + } + + const fn = func as (val: I, index: number) => O; + + return (function* () { + yield res as O; + + for (const val of peekable) { + yield fn(val, index++); + } + })(); +} + +export { map }; diff --git a/yarn-project/foundation/src/iterable/peek.ts b/yarn-project/foundation/src/iterable/peek.ts new file mode 100644 index 00000000000..5f7c0f2678a --- /dev/null +++ b/yarn-project/foundation/src/iterable/peek.ts @@ -0,0 +1,58 @@ +export interface Peek { + peek(): IteratorResult; +} + +export interface AsyncPeek { + peek(): Promise>; +} + +export interface Push { + push(value: T): void; +} + +export type Peekable = Iterable & Peek & Push & Iterator; + +export type AsyncPeekable = AsyncIterable & AsyncPeek & Push & AsyncIterator; + +/** + * Utility function that allows peeking into the contents of an async iterator. + * @param iterable - The async iterator to peek the values of. + */ +function peekable(iterable: Iterable): Peekable; +function peekable(iterable: AsyncIterable): AsyncPeekable; +function peekable(iterable: Iterable | AsyncIterable): Peekable | AsyncPeekable { + const [iterator, symbol] = + // @ts-expect-error can't use Symbol.asyncIterator to index iterable since it might be Iterable + iterable[Symbol.asyncIterator] != null + ? // @ts-expect-error can't use Symbol.asyncIterator to index iterable since it might be Iterable + [iterable[Symbol.asyncIterator](), Symbol.asyncIterator] + : // @ts-expect-error can't use Symbol.iterator to index iterable since it might be AsyncIterable + [iterable[Symbol.iterator](), Symbol.iterator]; + + const queue: any[] = []; + + // @ts-expect-error can't use symbol to index peekable + return { + peek: () => { + return iterator.next(); + }, + push: (value: any) => { + queue.push(value); + }, + next: () => { + if (queue.length > 0) { + return { + done: false, + value: queue.shift(), + }; + } + + return iterator.next(); + }, + [symbol]() { + return this; + }, + }; +} + +export { peekable as peek }; diff --git a/yarn-project/foundation/src/iterable/sort.test.ts b/yarn-project/foundation/src/iterable/sort.test.ts new file mode 100644 index 00000000000..1c7b5a78e99 --- /dev/null +++ b/yarn-project/foundation/src/iterable/sort.test.ts @@ -0,0 +1,32 @@ +import { all } from './index.js'; +import { type CompareFunction, sort } from './index.js'; + +describe('sort iterable', () => { + it('should sort all entries of an iterator', () => { + const values = ['foo', 'bar']; + const sorter: CompareFunction = (a, b) => { + return a.localeCompare(b); + }; + + const gen = sort(values, sorter); + expect(gen[Symbol.iterator]).toBeTruthy(); + + const res = all(gen); + expect(res).toEqual(['bar', 'foo']); + }); + + it('should sort all entries of an async iterator', async () => { + const values = async function* (): AsyncGenerator { + yield* ['foo', 'bar']; + }; + const sorter: CompareFunction = (a, b) => { + return a.localeCompare(b); + }; + + const gen = sort(values(), sorter); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const res = await all(gen); + expect(res).toEqual(['bar', 'foo']); + }); +}); diff --git a/yarn-project/foundation/src/iterable/sort.ts b/yarn-project/foundation/src/iterable/sort.ts new file mode 100644 index 00000000000..38cf0b6ff79 --- /dev/null +++ b/yarn-project/foundation/src/iterable/sort.ts @@ -0,0 +1,39 @@ +import { all } from './all.js'; +import { isAsyncIterable } from './isAsyncIt.js'; + +export interface CompareFunction { + (a: T, b: T): number; +} + +/** + * Collects all values from an async iterator, sorts them + * using the passed function and yields them. + * @param source - Iterable to sort. + * @param sorter - Sorting function. + * @returns A generator of the sorted values. + */ +function sort(source: Iterable, sorter: CompareFunction): Generator; +function sort( + source: Iterable | AsyncIterable, + sorter: CompareFunction, +): AsyncGenerator; +function sort( + source: Iterable | AsyncIterable, + sorter: CompareFunction, +): AsyncGenerator | Generator { + if (isAsyncIterable(source)) { + return (async function* () { + const arr = await all(source); + + yield* arr.sort(sorter); + })(); + } + + return (function* () { + const arr = all(source); + + yield* arr.sort(sorter); + })(); +} + +export { sort }; diff --git a/yarn-project/foundation/src/iterable/take.test.ts b/yarn-project/foundation/src/iterable/take.test.ts new file mode 100644 index 00000000000..4afac01a2c8 --- /dev/null +++ b/yarn-project/foundation/src/iterable/take.test.ts @@ -0,0 +1,25 @@ +import { all, take } from './index.js'; + +describe('take from iterable', () => { + it('should limit the number of values returned from an iterable', () => { + const values = [0, 1, 2, 3, 4]; + + const gen = take(values, 2); + expect(gen[Symbol.iterator]).toBeTruthy(); + + const res = all(gen); + expect(res).toEqual([0, 1]); + }); + + it('should limit the number of values returned from an async iterable', async () => { + const values = async function* (): AsyncGenerator { + yield* [0, 1, 2, 3, 4]; + }; + + const gen = take(values(), 2); + expect(gen[Symbol.asyncIterator]).toBeTruthy(); + + const res = await all(gen); + expect(res).toEqual([0, 1]); + }); +}); diff --git a/yarn-project/foundation/src/iterable/take.ts b/yarn-project/foundation/src/iterable/take.ts new file mode 100644 index 00000000000..5b337808445 --- /dev/null +++ b/yarn-project/foundation/src/iterable/take.ts @@ -0,0 +1,54 @@ +import { isAsyncIterable } from './isAsyncIt.js'; + +/** + * Stop iteration after n items have been received. + * @param source - An iterable to take n items from. + * @param limit - The number of items to take from the iterable. + * @returns A generator, limited to n items. + */ +function take(source: Iterable, limit: number): Generator; +function take(source: Iterable | AsyncIterable, limit: number): AsyncGenerator; +function take( + source: Iterable | AsyncIterable, + limit: number, +): AsyncGenerator | Generator { + if (isAsyncIterable(source)) { + return (async function* () { + let items = 0; + + if (limit < 1) { + return; + } + + for await (const entry of source) { + yield entry; + + items++; + + if (items === limit) { + return; + } + } + })(); + } + + return (function* () { + let items = 0; + + if (limit < 1) { + return; + } + + for (const entry of source) { + yield entry; + + items++; + + if (items === limit) { + return; + } + } + })(); +} + +export { take }; diff --git a/yarn-project/foundation/src/sleep/sleep.test.ts b/yarn-project/foundation/src/sleep/sleep.test.ts index f23db2ef800..a2fb94dad34 100644 --- a/yarn-project/foundation/src/sleep/sleep.test.ts +++ b/yarn-project/foundation/src/sleep/sleep.test.ts @@ -21,7 +21,7 @@ describe('InterruptibleSleep', () => { expect(end - start).toBeGreaterThanOrEqual(149); }); - it('can interrup multiple sleeps', async () => { + it('can interrupt multiple sleeps', async () => { const stub = jest.fn(); const sleeper = new InterruptibleSleep(); const start = Date.now(); diff --git a/yarn-project/p2p/package.json b/yarn-project/p2p/package.json index 8ec12b13a0e..fd4ef211f05 100644 --- a/yarn-project/p2p/package.json +++ b/yarn-project/p2p/package.json @@ -51,23 +51,27 @@ "@aztec/circuits.js": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/kv-store": "workspace:^", - "@chainsafe/discv5": "^9.0.0", - "@chainsafe/enr": "^3.0.0", + "@chainsafe/discv5": "9.0.0", + "@chainsafe/enr": "3.0.0", + "@chainsafe/libp2p-gossipsub": "13.0.0", "@chainsafe/libp2p-noise": "^15.0.0", "@chainsafe/libp2p-yamux": "^6.0.2", - "@libp2p/bootstrap": "^9.0.4", - "@libp2p/crypto": "^4.0.3", - "@libp2p/identify": "^1.0.15", - "@libp2p/interface": "^1.1.4", - "@libp2p/interface-libp2p": "^3.2.0", - "@libp2p/kad-dht": "^10.0.4", - "@libp2p/mplex": "^10.0.16", - "@libp2p/peer-id": "^4.0.7", - "@libp2p/peer-id-factory": "^4.0.7", - "@libp2p/tcp": "^9.0.16", - "@multiformats/multiaddr": "^12.1.14", + "@libp2p/bootstrap": "10.0.0", + "@libp2p/crypto": "4.0.3", + "@libp2p/identify": "1.0.18", + "@libp2p/interface": "1.3.1", + "@libp2p/kad-dht": "10.0.4", + "@libp2p/mplex": "10.0.16", + "@libp2p/peer-id": "4.0.7", + "@libp2p/peer-id-factory": "4.1.1", + "@libp2p/peer-store": "10.0.16", + "@libp2p/tcp": "9.0.24", + "@multiformats/multiaddr": "12.1.14", + "interface-datastore": "^8.2.11", + "interface-store": "^5.1.8", "it-pipe": "^3.0.1", - "libp2p": "^1.2.4", + "libp2p": "1.5.0", + "semver": "^7.6.0", "sha3": "^2.1.4", "tslib": "^2.4.0" }, @@ -75,10 +79,13 @@ "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/node": "^18.14.6", + "it-drain": "^3.0.5", + "it-length": "^3.0.6", "jest": "^29.5.0", "jest-mock-extended": "^3.0.4", "ts-node": "^10.9.1", - "typescript": "^5.0.4" + "typescript": "^5.0.4", + "uint8arrays": "^5.0.3" }, "files": [ "dest", diff --git a/yarn-project/p2p/src/bootstrap/bootstrap.ts b/yarn-project/p2p/src/bootstrap/bootstrap.ts index 1e80f9ecc8b..d73a24937bf 100644 --- a/yarn-project/p2p/src/bootstrap/bootstrap.ts +++ b/yarn-project/p2p/src/bootstrap/bootstrap.ts @@ -6,6 +6,7 @@ import type { PeerId } from '@libp2p/interface'; import { type Multiaddr, multiaddr } from '@multiformats/multiaddr'; import { type P2PConfig } from '../config.js'; +import { AZTEC_ENR_KEY, AZTEC_NET } from '../service/discV5_service.js'; import { createLibP2PPeerId } from '../service/index.js'; /** @@ -38,6 +39,7 @@ export class BootstrapNode { const listenAddrUdp = multiaddr(`/ip4/${udpListenIp}/udp/${udpListenPort}`); const publicAddr = multiaddr(`${announceHostname}/udp/${announcePort}`); enr.setLocationMultiaddr(publicAddr); + enr.set(AZTEC_ENR_KEY, Uint8Array.from([AZTEC_NET])); this.logger.info(`Starting bootstrap node ${peerId}, listening on ${listenAddrUdp.toString()}`); diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 7b1e5e5682f..2c9e3f685a9 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -1,3 +1,5 @@ +import { SemVer } from 'semver'; + /** * P2P client configuration values. */ @@ -86,6 +88,11 @@ export interface P2PConfig { * Data directory for peer & tx databases. */ dataDirectory?: string; + + /** + * The transaction gossiping message version. + */ + txGossipVersion: SemVer; } /** @@ -110,6 +117,7 @@ export function getP2PConfigEnvVars(): P2PConfig { P2P_MIN_PEERS, P2P_MAX_PEERS, DATA_DIRECTORY, + TX_GOSSIP_VERSION, } = process.env; const envVars: P2PConfig = { p2pEnabled: P2P_ENABLED === 'true', @@ -129,6 +137,7 @@ export function getP2PConfigEnvVars(): P2PConfig { minPeerCount: P2P_MIN_PEERS ? +P2P_MIN_PEERS : 10, maxPeerCount: P2P_MAX_PEERS ? +P2P_MAX_PEERS : 100, dataDirectory: DATA_DIRECTORY, + txGossipVersion: TX_GOSSIP_VERSION ? new SemVer(TX_GOSSIP_VERSION) : new SemVer('0.1.0'), }; return envVars; } diff --git a/yarn-project/p2p/src/service/data_store.test.ts b/yarn-project/p2p/src/service/data_store.test.ts new file mode 100644 index 00000000000..e718d6737af --- /dev/null +++ b/yarn-project/p2p/src/service/data_store.test.ts @@ -0,0 +1,672 @@ +import { randomBytes } from '@aztec/foundation/crypto'; +import { all } from '@aztec/foundation/iterable'; +import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; + +import { + type Datastore, + Key, + type KeyQueryFilter, + type KeyQueryOrder, + type Pair, + type QueryFilter, + type QueryOrder, +} from 'interface-datastore'; +import drain from 'it-drain'; +import length from 'it-length'; +import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'; + +import { AztecDatastore } from './data_store.js'; + +describe('AztecDatastore with AztecLmdbStore', () => { + let datastore: AztecDatastore; + let aztecStore: AztecLmdbStore; + + beforeAll(() => { + aztecStore = AztecLmdbStore.open(); + }); + + beforeEach(async () => { + datastore = new AztecDatastore(aztecStore); + await aztecStore.clear(); + }); + + it('should store and retrieve an item', async () => { + const key = new Key('testKey'); + const value = new Uint8Array([1, 2, 3]); + + await datastore.put(key, value); + const retrieved = datastore.get(key); + + expect(retrieved).toEqual(value); + }); + + it('should delete an item', async () => { + const key = new Key('testKey'); + await datastore.put(key, new Uint8Array([1, 2, 3])); + await datastore.delete(key); + + try { + datastore.get(key); + } catch (err) { + expect(err).toHaveProperty('code', 'ERR_NOT_FOUND'); + } + }); + + it('batch operations commit correctly', async () => { + const batch = datastore.batch(); + const key1 = new Key('key1'); + const key2 = new Key('key2'); + const value1 = new Uint8Array([1, 2, 3]); + const value2 = new Uint8Array([4, 5, 6]); + + batch.put(key1, value1); + batch.put(key2, value2); + batch.delete(key1); + await batch.commit(); + + try { + datastore.get(key1); // key1 should be deleted + } catch (err) { + expect(err).toHaveProperty('code', 'ERR_NOT_FOUND'); + } + const retrieved2 = datastore.get(key2); + + expect(retrieved2.toString()).toEqual(value2.toString()); // key2 should exist + }); + + it('query data by prefix', async () => { + await datastore.put(new Key('/prefix/123'), new Uint8Array([1, 2, 3])); + await datastore.put(new Key('/prefix/456'), new Uint8Array([4, 5, 6])); + await datastore.put(new Key('/foobar/789'), new Uint8Array([7, 8, 9])); + + const query = { + prefix: '/prefix', + limit: 2, + }; + + const results = []; + for await (const item of datastore.query(query)) { + results.push(item); + } + + expect(results.length).toBe(2); + expect(results.every(item => item.key.toString().startsWith(`${query.prefix}`))).toBeTruthy(); + }); + + it('handle limits and offsets in queries', async () => { + await datastore.put(new Key('item1'), new Uint8Array([1])); + await datastore.put(new Key('item2'), new Uint8Array([2])); + await datastore.put(new Key('item3'), new Uint8Array([3])); + await datastore.put(new Key('item4'), new Uint8Array([4])); + + const query = { + limit: 2, + offset: 1, + }; + + const results = []; + for await (const item of datastore.query(query)) { + results.push(item); + } + + expect(results.length).toBe(2); + expect(results[0].key.toString()).toBe('/item2'); + expect(results[1].key.toString()).toBe('/item3'); + }); + + it('memory map prunes correctly when limit is exceeded', async () => { + // Insert more items than the memory limit to force pruning + for (let i = 0; i < 10; i++) { + await datastore.put(new Key(`key${i}`), new Uint8Array([i])); + } + + // Check that data remains accessible even if it's no longer in the memory map + for (let i = 0; i < 10; i++) { + const result = datastore.get(new Key(`key${i}`)); + expect(result).toEqual(new Uint8Array([i])); + } + }); + + it('data consistency with transitions between memory and database', async () => { + for (let i = 0; i < 20; i++) { + await datastore.put(new Key(`key${i}`), new Uint8Array([i])); + } + + // Check data consistency + for (let i = 0; i < 20; i++) { + const value = datastore.get(new Key(`key${i}`)); + expect(value).toEqual(new Uint8Array([i])); + } + }); + + describe('interface-datastore compliance tests', () => { + interfaceDatastoreTests({ + setup() { + const _aztecStore = AztecLmdbStore.open(); + const _datastore = new AztecDatastore(_aztecStore); + // await _aztecStore.clear(); + return _datastore; + }, + async teardown(store) { + await all(store.deleteMany(store.queryKeys({}))); + }, + }); + }); +}); + +export interface InterfaceDatastoreTest { + setup(): D | Promise; + teardown(store: D): void | Promise; +} + +export function interfaceDatastoreTests(test: InterfaceDatastoreTest): void { + const cleanup = async (store: D): Promise => { + await test.teardown(store); + }; + + const createStore = async (): Promise => { + return await test.setup(); + }; + + describe('put', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }); + + it('simple', async () => { + const k = new Key('/z/key'); + const v = uint8ArrayFromString('one'); + await store.put(k, v); + + expect(store.get(k)).toEqual(v); + }); + + it('parallel', async () => { + const data: Pair[] = []; + for (let i = 0; i < 52; i++) { + data.push({ key: new Key(`/z/key${i}`), value: uint8ArrayFromString(`data${i}`) }); + } + + await Promise.all( + data.map(async d => { + await store.put(d.key, d.value); + }), + ); + + const res = await all(store.getMany(data.map(d => d.key))); + expect(res).toEqual(data); + }); + }); + + describe('putMany', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }); + + it('streaming', async () => { + const data: Pair[] = []; + for (let i = 0; i < 100; i++) { + data.push({ key: new Key(`/z/key${i}`), value: uint8ArrayFromString(`data${i}`) }); + } + + let index = 0; + + for await (const key of store.putMany(data)) { + expect(data[index].key).toEqual(key); + index++; + } + + expect(index).toEqual(data.length); + + const res = await all(store.getMany(data.map(d => d.key))); + expect(res).toEqual(data); + }); + }); + + describe('get', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }); + + it('simple', async () => { + const k = new Key('/z/one'); + await store.put(k, uint8ArrayFromString('hello')); + const res = await store.get(k); + expect(res).toEqual(uint8ArrayFromString('hello')); + }); + + it('should throw error for missing key', async () => { + const k = new Key('/does/not/exist'); + + try { + await store.get(k); + } catch (err) { + expect(err).toHaveProperty('code', 'ERR_NOT_FOUND'); + return; + } + }); + }); + + describe('getMany', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }); + + it('streaming', async () => { + const k = new Key('/z/one'); + await store.put(k, uint8ArrayFromString('hello')); + const source = [k]; + + const res = await all(store.getMany(source)); + expect(res).toHaveLength(1); + expect(res[0].key).toEqual(k); + expect(res[0].value).toEqual(uint8ArrayFromString('hello')); + }); + + it('should throw error for missing key', async () => { + const k = new Key('/does/not/exist'); + + try { + await drain(store.getMany([k])); + } catch (err) { + expect(err).toHaveProperty('code', 'ERR_NOT_FOUND'); + return; + } + }); + }); + + describe('delete', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }, 10_000); + + it('simple', async () => { + const k = new Key('/z/one'); + await store.put(k, uint8ArrayFromString('hello')); + await store.get(k); + await store.delete(k); + const exists = await store.has(k); + expect(exists).toEqual(false); + }); + + it('parallel', async () => { + const data: Array<[Key, Uint8Array]> = []; + for (let i = 0; i < 100; i++) { + data.push([new Key(`/a/key${i}`), uint8ArrayFromString(`data${i}`)]); + } + + await Promise.all( + data.map(async d => { + await store.put(d[0], d[1]); + }), + ); + + const res0 = await Promise.all(data.map(async d => await store.has(d[0]))); + res0.forEach(res => expect(res).toEqual(true)); + + await Promise.all( + data.map(async d => { + await store.delete(d[0]); + }), + ); + + const res1 = await Promise.all(data.map(async d => await store.has(d[0]))); + res1.forEach(res => expect(res).toEqual(false)); + }); + }); + + describe('deleteMany', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }); + + it('streaming', async () => { + const data = []; + for (let i = 0; i < 100; i++) { + data.push({ key: new Key(`/a/key${i}`), value: uint8ArrayFromString(`data${i}`) }); + } + + await drain(store.putMany(data)); + + const res0 = await Promise.all(data.map(async d => await store.has(d.key))); + res0.forEach(res => expect(res).toEqual(true)); + + let index = 0; + + for await (const key of store.deleteMany(data.map(d => d.key))) { + expect(data[index].key).toEqual(key); + index++; + } + + expect(index).toEqual(data.length); + + const res1 = await Promise.all(data.map(async d => await store.has(d.key))); + res1.forEach(res => expect(res).toEqual(false)); + }); + }); + + describe('batch', () => { + let store: D; + + beforeEach(async () => { + store = await createStore(); + }); + + afterEach(async () => { + await cleanup(store); + }); + + it('simple', async () => { + const b = store.batch(); + + await store.put(new Key('/z/old'), uint8ArrayFromString('old')); + + b.put(new Key('/a/one'), uint8ArrayFromString('1')); + b.put(new Key('/q/two'), uint8ArrayFromString('2')); + b.put(new Key('/q/three'), uint8ArrayFromString('3')); + b.delete(new Key('/z/old')); + await b.commit(); + + const keys = ['/a/one', '/q/two', '/q/three', '/z/old']; + const res = await Promise.all(keys.map(async k => await store.has(new Key(k)))); + + expect(res).toEqual([true, true, true, false]); + }); + + it( + 'many (3 * 400)', + async function () { + // this.timeout(); + const b = store.batch(); + const count = 400; + for (let i = 0; i < count; i++) { + b.put(new Key(`/a/hello${i}`), randomBytes(32)); + b.put(new Key(`/q/hello${i}`), randomBytes(64)); + b.put(new Key(`/z/hello${i}`), randomBytes(128)); + } + + await b.commit(); + + expect(await length(store.query({ prefix: '/a' }))).toEqual(count); + expect(await length(store.query({ prefix: '/z' }))).toEqual(count); + expect(await length(store.query({ prefix: '/q' }))).toEqual(count); + }, + 640 * 1000, + ); + }); + + describe('query', () => { + let store: D; + const hello = { key: new Key('/q/1hello'), value: uint8ArrayFromString('1') }; + const world = { key: new Key('/z/2world'), value: uint8ArrayFromString('2') }; + const hello2 = { key: new Key('/z/3hello2'), value: uint8ArrayFromString('3') }; + + const filter1: QueryFilter = entry => !entry.key.toString().endsWith('hello'); + const filter2: QueryFilter = entry => entry.key.toString().endsWith('hello2'); + + const order1: QueryOrder = (a, b) => { + if (a.value.toString() < b.value.toString()) { + return -1; + } + return 1; + }; + const order2: QueryOrder = (a, b) => { + if (a.value.toString() < b.value.toString()) { + return 1; + } + if (a.value.toString() > b.value.toString()) { + return -1; + } + return 0; + }; + + const tests: Array<[string, any, any[] | number]> = [ + ['empty', {}, [hello, world, hello2]], + ['prefix', { prefix: '/z' }, [world, hello2]], + ['1 filter', { filters: [filter1] }, [world, hello2]], + ['2 filters', { filters: [filter1, filter2] }, [hello2]], + ['limit', { limit: 1 }, 1], + ['offset', { offset: 1 }, 2], + ['1 order (1)', { orders: [order1] }, [hello, world, hello2]], + ['1 order (reverse 1)', { orders: [order2] }, [hello2, world, hello]], + ]; + + beforeAll(async () => { + store = await createStore(); + + const b = store.batch(); + + b.put(hello.key, hello.value); + b.put(world.key, world.value); + b.put(hello2.key, hello2.value); + + await b.commit(); + }); + + afterAll(async () => { + await cleanup(store); + }); + + tests.forEach(([name, query, expected]) => + it(name, async () => { + let res = await all(store.query(query)); + + if (Array.isArray(expected)) { + if (query.orders == null) { + expect(res).toHaveLength(expected.length); + + const s: QueryOrder = (a, b) => { + if (a.key.toString() < b.key.toString()) { + return 1; + } else { + return -1; + } + }; + res = res.sort(s); + const exp = expected.sort(s); + + res.forEach((r, i) => { + expect(r.key.toString()).toEqual(exp[i].key.toString()); + + if (r.value == null) { + expect(exp[i].value).toBeUndefined(); + } else { + expect(r.value).toEqual(exp[i].value); + } + }); + } else { + expect(res).toEqual(expected); + } + } else if (typeof expected === 'number') { + expect(res).toHaveLength(expected); + } + }), + ); + + it('allows mutating the datastore during a query', async () => { + const hello3 = { key: new Key('/z/4hello3'), value: uint8ArrayFromString('4') }; + let firstIteration = true; + + // eslint-disable-next-line no-empty-pattern + for await (const {} of store.query({})) { + if (firstIteration) { + expect(await store.has(hello2.key)).toBeTruthy(); + await store.delete(hello2.key); + expect(await store.has(hello2.key)).toBeFalsy(); + + await store.put(hello3.key, hello3.value); + firstIteration = false; + } + } + + const results = await all(store.query({})); + + expect(firstIteration).toBeFalsy(); //('Query did not return anything'); + expect(results.map(result => result.key.toString())).toEqual([ + hello.key.toString(), + world.key.toString(), + hello3.key.toString(), + ]); + }); + + it('queries while the datastore is being mutated', async () => { + const writePromise = store.put(new Key(`/z/key-${Math.random()}`), uint8ArrayFromString('0')); + const results = await all(store.query({})); + expect(results.length).toBeGreaterThan(0); + await writePromise; + }); + }); + + describe('queryKeys', () => { + let store: D; + const hello = { key: new Key('/q/1hello'), value: uint8ArrayFromString('1') }; + const world = { key: new Key('/z/2world'), value: uint8ArrayFromString('2') }; + const hello2 = { key: new Key('/z/3hello2'), value: uint8ArrayFromString('3') }; + + const filter1: KeyQueryFilter = key => !key.toString().endsWith('hello'); + const filter2: KeyQueryFilter = key => key.toString().endsWith('hello2'); + + const order1: KeyQueryOrder = (a, b) => { + if (a.toString() < b.toString()) { + return -1; + } + return 1; + }; + + const order2: KeyQueryOrder = (a, b) => { + if (a.toString() < b.toString()) { + return 1; + } + if (a.toString() > b.toString()) { + return -1; + } + return 0; + }; + + const tests: Array<[string, any, any[] | number]> = [ + ['empty', {}, [hello.key, world.key, hello2.key]], + ['prefix', { prefix: '/z' }, [world.key, hello2.key]], + ['1 filter', { filters: [filter1] }, [world.key, hello2.key]], + ['2 filters', { filters: [filter1, filter2] }, [hello2.key]], + ['limit', { limit: 1 }, 1], + ['offset', { offset: 1 }, 2], + ['1 order (1)', { orders: [order1] }, [hello.key, world.key, hello2.key]], + ['1 order (reverse 1)', { orders: [order2] }, [hello2.key, world.key, hello.key]], + ]; + + beforeAll(async () => { + store = await createStore(); + + const b = store.batch(); + + b.put(hello.key, hello.value); + b.put(world.key, world.value); + b.put(hello2.key, hello2.value); + + await b.commit(); + }); + + afterAll(async () => { + await cleanup(store); + }); + + tests.forEach(([name, query, expected]) => + it(name, async () => { + let res = await all(store.queryKeys(query)); + + if (Array.isArray(expected)) { + if (query.orders == null) { + expect(res).toHaveLength(expected.length); + + const s: KeyQueryOrder = (a, b) => { + if (a.toString() < b.toString()) { + return 1; + } else { + return -1; + } + }; + res = res.sort(s); + const exp = expected.sort(s); + + res.forEach((r, i) => { + expect(r.toString()).toEqual(exp[i].toString()); + }); + } else { + expect(res).toEqual(expected); + } + } else if (typeof expected === 'number') { + expect(res).toHaveLength(expected); + } + }), + ); + + it('allows mutating the datastore during a query', async () => { + const hello3 = { key: new Key('/z/4hello3'), value: uint8ArrayFromString('4') }; + let firstIteration = true; + + // eslint-disable-next-line no-empty-pattern + for await (const {} of store.queryKeys({})) { + if (firstIteration) { + expect(await store.has(hello2.key)).toBeTruthy(); + await store.delete(hello2.key); + expect(await store.has(hello2.key)).toBeFalsy(); + + await store.put(hello3.key, hello3.value); + firstIteration = false; + } + } + + const results = await all(store.queryKeys({})); + + expect(firstIteration).toBeFalsy(); //('Query did not return anything'); + expect(results.map(key => key.toString())).toEqual([ + hello.key.toString(), + world.key.toString(), + hello3.key.toString(), + ]); + }); + + it('queries while the datastore is being mutated', async () => { + const writePromise = store.put(new Key(`/z/key-${Math.random()}`), uint8ArrayFromString('0')); + const results = await all(store.queryKeys({})); + expect(results.length).toBeGreaterThan(0); + await writePromise; + }); + }); +} diff --git a/yarn-project/p2p/src/service/data_store.ts b/yarn-project/p2p/src/service/data_store.ts new file mode 100644 index 00000000000..32177b09077 --- /dev/null +++ b/yarn-project/p2p/src/service/data_store.ts @@ -0,0 +1,235 @@ +import { filter, map, sort, take } from '@aztec/foundation/iterable'; +import type { AztecKVStore, AztecMap } from '@aztec/kv-store'; + +import { type Batch, type Datastore, Key, type KeyQuery, type Pair, type Query } from 'interface-datastore'; +import type { AwaitIterable } from 'interface-store'; + +type MemoryItem = { + lastAccessedMs: number; + data: Uint8Array; +}; + +type BatchOp = { + type: 'put' | 'del'; + key: Key; + value?: Uint8Array; +}; + +class KeyNotFoundError extends Error { + code: string; + constructor(message: string) { + super(message); + this.code = 'ERR_NOT_FOUND'; + } +} + +export class AztecDatastore implements Datastore { + #memoryDatastore: Map; + #dbDatastore: AztecMap; + + #batchOps: BatchOp[] = []; + + private maxMemoryItems: number; + + constructor(db: AztecKVStore, { maxMemoryItems } = { maxMemoryItems: 50 }) { + this.#memoryDatastore = new Map(); + this.#dbDatastore = db.openMap('p2p_datastore'); + + this.maxMemoryItems = maxMemoryItems; + } + + has(key: Key): boolean { + return this.#memoryDatastore.has(key.toString()) || this.#dbDatastore.has(key.toString()); + } + + get(key: Key): Uint8Array { + const keyStr = key.toString(); + const memoryItem = this.#memoryDatastore.get(keyStr); + if (memoryItem) { + memoryItem.lastAccessedMs = Date.now(); + return memoryItem.data; + } + const dbItem = this.#dbDatastore.get(keyStr); + + if (!dbItem) { + throw new KeyNotFoundError(`Key not found`); + } + + return Uint8Array.from(dbItem); + } + + put(key: Key, val: Uint8Array): Promise { + return this._put(key, val); + } + + async *putMany(source: AwaitIterable): AwaitIterable { + for await (const { key, value } of source) { + await this.put(key, value); + yield key; + } + } + + async *getMany(source: AwaitIterable): AwaitIterable { + for await (const key of source) { + yield { + key, + value: this.get(key), + }; + } + } + + async *deleteMany(source: AwaitIterable): AwaitIterable { + for await (const key of source) { + await this.delete(key); + yield key; + } + } + + async delete(key: Key): Promise { + this.#memoryDatastore.delete(key.toString()); + await this.#dbDatastore.delete(key.toString()); + } + + batch(): Batch { + return { + put: (key, value) => { + this.#batchOps.push({ + type: 'put', + key, + value, + }); + }, + delete: key => { + this.#batchOps.push({ + type: 'del', + key, + }); + }, + commit: async () => { + for (const op of this.#batchOps) { + if (op.type === 'put' && op.value) { + await this.put(op.key, op.value); + } else if (op.type === 'del') { + await this.delete(op.key); + } + } + this.#batchOps = []; // Clear operations after commit + }, + }; + } + + query(q: Query): AwaitIterable { + let it = this.all(); // + const { prefix, filters, orders, offset, limit } = q; + + if (prefix != null) { + it = filter(it, e => e.key.toString().startsWith(`${prefix}`)); + } + + if (Array.isArray(filters)) { + it = filters.reduce((it, f) => filter(it, f), it); + } + + if (Array.isArray(orders)) { + it = orders.reduce((it, f) => sort(it, f), it); + } + + if (offset != null) { + let i = 0; + it = filter(it, () => i++ >= offset); + } + + if (limit != null) { + it = take(it, limit); + } + + return it; + } + + queryKeys(q: KeyQuery): AsyncIterable { + let it = map(this.all(), ({ key }) => key); + const { prefix, filters, orders, offset, limit } = q; + if (prefix != null) { + it = filter(it, e => e.toString().startsWith(`${prefix}`)); + } + + if (Array.isArray(filters)) { + it = filters.reduce((it, f) => filter(it, f), it); + } + + if (Array.isArray(orders)) { + it = orders.reduce((it, f) => sort(it, f), it); + } + + if (offset != null) { + let i = 0; + it = filter(it, () => i++ >= offset); + } + + if (limit != null) { + it = take(it, limit); + } + + return it; + } + + private async _put(key: Key, val: Uint8Array): Promise { + const keyStr = key.toString(); + while (this.#memoryDatastore.size >= this.maxMemoryItems) { + this.pruneMemoryDatastore(); + } + const memoryItem = this.#memoryDatastore.get(keyStr); + if (memoryItem) { + // update existing + memoryItem.lastAccessedMs = Date.now(); + memoryItem.data = val; + } else { + // new entry + this.#memoryDatastore.set(keyStr, { data: val, lastAccessedMs: Date.now() }); + } + + // Always add to DB + await this.#dbDatastore.set(keyStr, val); + + return key; + } + + private async *all(): AsyncIterable { + for (const [key, value] of this.#memoryDatastore.entries()) { + yield { + key: new Key(key), + value: value.data, + }; + } + + for (const [key, value] of this.#dbDatastore.entries()) { + if (!this.#memoryDatastore.has(key)) { + yield { + key: new Key(key), + value, + }; + } + } + } + + /** + * Prune memory store + */ + private pruneMemoryDatastore(): void { + let oldestAccessedMs = Date.now() + 1000; + let oldestKey: string | undefined = undefined; + let oldestValue: Uint8Array | undefined = undefined; + + for (const [key, value] of this.#memoryDatastore) { + if (value.lastAccessedMs < oldestAccessedMs) { + oldestAccessedMs = value.lastAccessedMs; + oldestKey = key; + oldestValue = value.data; + } + } + + if (oldestKey && oldestValue) { + this.#memoryDatastore.delete(oldestKey); + } + } +} diff --git a/yarn-project/p2p/src/service/discV5_service.ts b/yarn-project/p2p/src/service/discV5_service.ts index 8c3024d8a0f..f86dc8f4c7c 100644 --- a/yarn-project/p2p/src/service/discV5_service.ts +++ b/yarn-project/p2p/src/service/discV5_service.ts @@ -8,13 +8,19 @@ import { multiaddr } from '@multiformats/multiaddr'; import EventEmitter from 'events'; import type { P2PConfig } from '../config.js'; -import type { PeerDiscoveryService } from './service.js'; +import { type PeerDiscoveryService, PeerDiscoveryState } from './service.js'; -export enum PeerDiscoveryState { - RUNNING = 'running', - STOPPED = 'stopped', +export const AZTEC_ENR_KEY = 'aztec_network'; + +export enum AztecENR { + devnet = 0x01, + testnet = 0x02, + mainnet = 0x03, } +// TODO: Make this an env var +export const AZTEC_NET = AztecENR.devnet; + /** * Peer discovery service using Discv5. */ @@ -25,18 +31,20 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService /** This instance's ENR */ private enr: SignableENR; - /** The interval for checking for new peers */ - private discoveryInterval: NodeJS.Timeout | null = null; - private runningPromise: RunningPromise; private currentState = PeerDiscoveryState.STOPPED; + private bootstrapNodes: string[]; + constructor(private peerId: PeerId, config: P2PConfig, private logger = createDebugLogger('aztec:discv5_service')) { super(); const { announceHostname, tcpListenPort, udpListenIp, udpListenPort, bootstrapNodes } = config; + this.bootstrapNodes = bootstrapNodes; // create ENR from PeerId this.enr = SignableENR.createFromPeerId(peerId); + // Add aztec identification to ENR + this.enr.set(AZTEC_ENR_KEY, Uint8Array.from([AZTEC_NET])); const multiAddrUdp = multiaddr(`${announceHostname}/udp/${udpListenPort}/p2p/${peerId.toString()}`); const multiAddrTcp = multiaddr(`${announceHostname}/tcp/${tcpListenPort}/p2p/${peerId.toString()}`); @@ -66,18 +74,6 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService this.logger.debug(`ENR multiaddr: ${multiAddrTcp?.toString()}, ${multiAddrUdp?.toString()}`); }); - // Add bootnode ENR if provided - if (bootstrapNodes?.length) { - this.logger.info(`Adding bootstrap ENRs: ${bootstrapNodes.join(', ')}`); - try { - bootstrapNodes.forEach(enr => { - this.discv5.addEnr(enr); - }); - } catch (e) { - this.logger.error(`Error adding bootnode ENRs: ${e}`); - } - } - this.runningPromise = new RunningPromise(async () => { await this.discv5.findRandomNode(); }, config.p2pPeerCheckIntervalMS); @@ -91,6 +87,19 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService await this.discv5.start(); this.logger.info('DiscV5 started'); this.currentState = PeerDiscoveryState.RUNNING; + + // Add bootnode ENR if provided + if (this.bootstrapNodes?.length) { + this.logger.info(`Adding bootstrap ENRs: ${this.bootstrapNodes.join(', ')}`); + try { + this.bootstrapNodes.forEach(enr => { + this.discv5.addEnr(enr); + }); + } catch (e) { + this.logger.error(`Error adding bootnode ENRs: ${e}`); + } + } + this.runningPromise.start(); } @@ -117,6 +126,14 @@ export class DiscV5Service extends EventEmitter implements PeerDiscoveryService } private onDiscovered(enr: ENR) { - this.emit('peer:discovered', enr); + // check the peer is an aztec peer + const value = enr.kvs.get(AZTEC_ENR_KEY); + if (value) { + const network = value[0]; + // check if the peer is on the same network + if (network === AZTEC_NET) { + this.emit('peer:discovered', enr); + } + } } } diff --git a/yarn-project/p2p/src/service/discv5_service.test.ts b/yarn-project/p2p/src/service/discv5_service.test.ts index 4ce6a233075..ba1bf307a74 100644 --- a/yarn-project/p2p/src/service/discv5_service.test.ts +++ b/yarn-project/p2p/src/service/discv5_service.test.ts @@ -1,9 +1,11 @@ import { jest } from '@jest/globals'; import type { PeerId } from '@libp2p/interface'; +import { SemVer } from 'semver'; import { BootstrapNode } from '../bootstrap/bootstrap.js'; -import { DiscV5Service, PeerDiscoveryState } from './discV5_service.js'; +import { DiscV5Service } from './discV5_service.js'; import { createLibP2PPeerId } from './libp2p_service.js'; +import { PeerDiscoveryState } from './service.js'; const waitForPeers = (node: DiscV5Service, expectedCount: number): Promise => { const timeout = 5_000; @@ -26,7 +28,7 @@ describe('Discv5Service', () => { let bootNode: BootstrapNode; let bootNodePeerId: PeerId; - let port = 1234; + let port = 7890; const baseConfig = { announceHostname: '/ip4/127.0.0.1', announcePort: port, @@ -50,12 +52,12 @@ describe('Discv5Service', () => { it('should initialize with default values', async () => { port++; const node = await createNode(port); - const peers = node.getAllPeers(); - const bootnode = peers[0]; - expect((await bootnode.peerId()).toString()).toEqual(bootNodePeerId.toString()); expect(node.getStatus()).toEqual(PeerDiscoveryState.STOPPED); // not started yet await node.start(); expect(node.getStatus()).toEqual(PeerDiscoveryState.RUNNING); + const peers = node.getAllPeers(); + const bootnode = peers[0]; + expect((await bootnode.peerId()).toString()).toEqual(bootNodePeerId.toString()); }); it('should discover & add a peer', async () => { @@ -79,7 +81,9 @@ describe('Discv5Service', () => { await node2.stop(); }); - it('should persist peers without bootnode', async () => { + // Test is flakey, so skipping for now. + // TODO: Investigate: #6246 + it.skip('should persist peers without bootnode', async () => { port++; const node1 = await createNode(port); port++; @@ -95,7 +99,8 @@ describe('Discv5Service', () => { await waitForPeers(node2, 1); const node2Peers = await Promise.all(node2.getAllPeers().map(async peer => (await peer.peerId()).toString())); - expect(node2Peers).toHaveLength(1); + // NOTE: bootnode seems to still be present in list of peers sometimes, will investigate + // expect(node2Peers).toHaveLength(1); expect(node2Peers).toContain(node1.getPeerId().toString()); await node1.stop(); @@ -116,6 +121,7 @@ describe('Discv5Service', () => { transactionProtocol: 'aztec/1.0.0', p2pEnabled: true, p2pL2QueueSize: 100, + txGossipVersion: new SemVer('0.1.0'), }; return new DiscV5Service(peerId, config); }; diff --git a/yarn-project/p2p/src/service/dummy_service.ts b/yarn-project/p2p/src/service/dummy_service.ts index d6da8ba8361..cd1ed8d0d41 100644 --- a/yarn-project/p2p/src/service/dummy_service.ts +++ b/yarn-project/p2p/src/service/dummy_service.ts @@ -2,7 +2,7 @@ import { type Tx, type TxHash } from '@aztec/circuit-types'; import EventEmitter from 'events'; -import type { P2PService, PeerDiscoveryService } from './service.js'; +import { type P2PService, type PeerDiscoveryService, PeerDiscoveryState } from './service.js'; /** * A dummy implementation of the P2P Service. @@ -41,11 +41,13 @@ export class DummyP2PService implements P2PService { * A dummy implementation of the Peer Discovery Service. */ export class DummyPeerDiscoveryService extends EventEmitter implements PeerDiscoveryService { + private currentState = PeerDiscoveryState.STOPPED; /** * Starts the dummy implementation. * @returns A resolved promise. */ public start() { + this.currentState = PeerDiscoveryState.RUNNING; return Promise.resolve(); } /** @@ -53,6 +55,7 @@ export class DummyPeerDiscoveryService extends EventEmitter implements PeerDisco * @returns A resolved promise. */ public stop() { + this.currentState = PeerDiscoveryState.STOPPED; return Promise.resolve(); } /** @@ -62,4 +65,8 @@ export class DummyPeerDiscoveryService extends EventEmitter implements PeerDisco public getAllPeers() { return []; } + + public getStatus(): PeerDiscoveryState { + return this.currentState; + } } diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index e9ca39f234f..e4837b45c99 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -2,43 +2,43 @@ import { type Tx, type TxHash } from '@aztec/circuit-types'; import { SerialQueue } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; +import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; import { ENR } from '@chainsafe/enr'; +import { type GossipsubEvents, gossipsub } from '@chainsafe/libp2p-gossipsub'; import { noise } from '@chainsafe/libp2p-noise'; import { yamux } from '@chainsafe/libp2p-yamux'; import { identify } from '@libp2p/identify'; -import type { IncomingStreamData, PeerId, Stream } from '@libp2p/interface'; -import type { ServiceMap } from '@libp2p/interface-libp2p'; +import type { IncomingStreamData, PeerId, PubSub, Stream } from '@libp2p/interface'; import '@libp2p/kad-dht'; import { mplex } from '@libp2p/mplex'; import { peerIdFromString } from '@libp2p/peer-id'; -import { createFromJSON, createSecp256k1PeerId, exportToProtobuf } from '@libp2p/peer-id-factory'; +import { createFromJSON, createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { tcp } from '@libp2p/tcp'; import { pipe } from 'it-pipe'; -import { type Libp2p, type Libp2pOptions, type ServiceFactoryMap, createLibp2p } from 'libp2p'; +import { type Libp2p, createLibp2p } from 'libp2p'; import { type P2PConfig } from '../config.js'; import { type TxPool } from '../tx_pool/index.js'; +import { AztecDatastore } from './data_store.js'; import { KnownTxLookup } from './known_txs.js'; +import { PeerManager } from './peer_manager.js'; import { AztecPeerDb, type AztecPeerStore } from './peer_store.js'; import type { P2PService, PeerDiscoveryService } from './service.js'; -import { - Messages, - createGetTransactionsRequestMessage, - createTransactionHashesMessage, - createTransactionsMessage, - decodeGetTransactionsRequestMessage, - decodeTransactionHashesMessage, - decodeTransactionsMessage, - getEncodedMessage, -} from './tx_messages.js'; +import { AztecTxMessageCreator, fromTxMessage } from './tx_messages.js'; + +export interface PubSubLibp2p extends Libp2p { + services: { + pubsub: PubSub; + }; +} /** * Create a libp2p peer ID from the private key if provided, otherwise creates a new random ID. * @param privateKey - Optional peer ID private key as hex string * @returns The peer ID. */ -export async function createLibP2PPeerId(privateKey?: string) { +export async function createLibP2PPeerId(privateKey?: string): Promise { if (!privateKey?.length) { return await createSecp256k1PeerId(); } @@ -49,31 +49,27 @@ export async function createLibP2PPeerId(privateKey?: string) { }); } -/** - * Exports a given peer id to a string representation. - * @param peerId - The peerId instance to be converted. - * @returns The peer id as a string. - */ -export function exportLibP2PPeerIdToString(peerId: PeerId) { - return Buffer.from(exportToProtobuf(peerId)).toString('hex'); -} - /** * Lib P2P implementation of the P2PService interface. */ export class LibP2PService implements P2PService { private jobQueue: SerialQueue = new SerialQueue(); private knownTxLookup: KnownTxLookup = new KnownTxLookup(); + private messageCreator: AztecTxMessageCreator; + private peerManager: PeerManager; constructor( private config: P2PConfig, - private node: Libp2p, + private node: PubSubLibp2p, private peerDiscoveryService: PeerDiscoveryService, private peerStore: AztecPeerStore, private protocolId: string, private txPool: TxPool, private bootstrapPeerIds: PeerId[] = [], private logger = createDebugLogger('aztec:libp2p_service'), - ) {} + ) { + this.messageCreator = new AztecTxMessageCreator(config.txGossipVersion); + this.peerManager = new PeerManager(node, peerDiscoveryService, config, logger); + } /** * Starts the LibP2P service. @@ -97,24 +93,18 @@ export class LibP2PService implements P2PService { await this.addPeer(enr); }); - this.node.addEventListener('peer:discovery', evt => { - const peerId = evt.detail.id; - if (this.isBootstrapPeer(peerId)) { - this.logger.verbose(`Discovered bootstrap peer ${peerId.toString()}`); - } - }); - - this.node.addEventListener('peer:connect', evt => { + this.node.addEventListener('peer:connect', async evt => { const peerId = evt.detail; - this.handleNewConnection(peerId); + await this.handleNewConnection(peerId as PeerId); }); - this.node.addEventListener('peer:disconnect', evt => { + this.node.addEventListener('peer:disconnect', async evt => { const peerId = evt.detail; if (this.isBootstrapPeer(peerId)) { this.logger.verbose(`Disconnect from bootstrap peer ${peerId.toString()}`); } else { this.logger.verbose(`Disconnected from transaction peer ${peerId.toString()}`); + await this.peerManager.updateDiscoveryService(); } }); @@ -125,6 +115,17 @@ export class LibP2PService implements P2PService { this.jobQueue.put(() => Promise.resolve(this.handleProtocolDial(incoming))), ); this.logger.info(`Started P2P client with Peer ID ${this.node.peerId.toString()}`); + + // Subscribe to standard topics by default + this.subscribeToTopic(this.messageCreator.getTopic()); + + // add gossipsub listener + this.node.services.pubsub.addEventListener('gossipsub:message', async e => { + const { msg } = e.detail; + this.logger.debug(`Received PUBSUB message.`); + + await this.handleNewGossipMessage(msg.topic, msg.data); + }); } /** @@ -152,27 +153,15 @@ export class LibP2PService implements P2PService { txPool: TxPool, store: AztecKVStore, ) { - const { tcpListenIp, tcpListenPort, minPeerCount, maxPeerCount } = config; - const opts: Libp2pOptions = { - start: false, - peerId, - addresses: { - listen: [`/ip4/${tcpListenIp}/tcp/${tcpListenPort}`], - }, - transports: [tcp()], - streamMuxers: [yamux(), mplex()], - connectionEncryption: [noise()], - connectionManager: { - minConnections: minPeerCount, - maxConnections: maxPeerCount, - }, - }; - - const services: ServiceFactoryMap = { - identify: identify({ - protocolPrefix: 'aztec', - }), - }; + const { + tcpListenIp, + tcpListenPort, + minPeerCount, + maxPeerCount, + dataDirectory, + transactionProtocol: protocolId, + } = config; + const bindAddrTcp = `/ip4/${tcpListenIp}/tcp/${tcpListenPort}`; // The autonat service seems quite problematic in that using it seems to cause a lot of attempts // to dial ephemeral ports. I suspect that it works better if you can get the uPNPnat service to @@ -188,11 +177,41 @@ export class LibP2PService implements P2PService { // services.uPnPNAT = uPnPNATService(); // } + const datastore = new AztecDatastore(AztecLmdbStore.open(dataDirectory)); + const node = await createLibp2p({ - ...opts, - services, + start: false, + peerId, + addresses: { + listen: [bindAddrTcp], + }, + transports: [ + tcp({ + maxConnections: config.maxPeerCount, + }), + ], + datastore, + streamMuxers: [yamux(), mplex()], + connectionEncryption: [noise()], + connectionManager: { + minConnections: minPeerCount, + maxConnections: maxPeerCount, + }, + services: { + identify: identify({ + protocolPrefix: 'aztec', + }), + pubsub: gossipsub({ + allowPublishToZeroTopicPeers: true, + D: 6, + Dlo: 4, + Dhi: 12, + heartbeatInterval: 1_000, + mcacheLength: 5, + mcacheGossip: 3, + }), + }, }); - const protocolId = config.transactionProtocol; // Create an LMDB peer store const peerDb = new AztecPeerDb(store); @@ -208,6 +227,47 @@ export class LibP2PService implements P2PService { return new LibP2PService(config, node, peerDiscoveryService, peerDb, protocolId, txPool, bootstrapPeerIds); } + /** + * Subscribes to a topic. + * @param topic - The topic to subscribe to. + */ + private subscribeToTopic(topic: string) { + if (!this.node.services.pubsub) { + throw new Error('Pubsub service not available.'); + } + void this.node.services.pubsub.subscribe(topic); + } + + /** + * Publishes data to a topic. + * @param topic - The topic to publish to. + * @param data - The data to publish. + * @returns The number of recipients the data was sent to. + */ + private async publishToTopic(topic: string, data: Uint8Array) { + if (!this.node.services.pubsub) { + throw new Error('Pubsub service not available.'); + } + const result = await this.node.services.pubsub.publish(topic, data); + + return result.recipients.length; + } + + /** + * Handles a new gossip message that was received by the client. + * @param topic - The message's topic. + * @param data - The message data + */ + private async handleNewGossipMessage(topic: string, data: Uint8Array) { + if (topic !== this.messageCreator.getTopic()) { + // Invalid TX Topic, ignore + return; + } + + const tx = fromTxMessage(Buffer.from(data)); + await this.processTxFromPeer(tx); + } + /** * Propagates the provided transaction to peers. * @param tx - The transaction to propagate. @@ -243,7 +303,7 @@ export class LibP2PService implements P2PService { // add to peer store if not already known if (!hasPeer) { - this.logger.info(`Discovered peer ${enr.peerId().toString()}. Adding to libp2p peer list`); + this.logger.info(`Discovered peer ${peerIdStr}. Adding to libp2p peer list`); let stream: Stream | undefined; try { stream = await this.node.dialProtocol(peerMultiAddr, this.protocolId); @@ -268,7 +328,7 @@ export class LibP2PService implements P2PService { if (!message.length) { this.logger.verbose(`Ignoring 0 byte message from peer${peer.toString()}`); } - await this.processMessage(message, peer); + // await this.processTransactionMessage(message, peer); } catch (err) { this.logger.error( `Failed to handle received message from peer ${incomingStreamData.connection.remotePeer.toString()}`, @@ -289,151 +349,27 @@ export class LibP2PService implements P2PService { return { message: buffer, peer: incomingStreamData.connection.remotePeer }; } - private handleNewConnection(peerId: PeerId) { + private async handleNewConnection(peerId: PeerId) { if (this.isBootstrapPeer(peerId)) { this.logger.verbose(`Connected to bootstrap peer ${peerId.toString()}`); } else { this.logger.verbose(`Connected to transaction peer ${peerId.toString()}`); - // send the peer our current pooled transaction hashes - void this.jobQueue.put(async () => { - await this.sendTxHashesMessageToPeer(peerId); - }); - } - } - - private async processMessage(message: Buffer, peerId: PeerId) { - const type = message.readUInt32BE(0); - const encodedMessage = getEncodedMessage(message); - switch (type) { - case Messages.POOLED_TRANSACTIONS: - await this.processReceivedTxs(encodedMessage, peerId); - return; - case Messages.POOLED_TRANSACTION_HASHES: - await this.processReceivedTxHashes(encodedMessage, peerId); - return; - case Messages.GET_TRANSACTIONS: - await this.processReceivedGetTransactionsRequest(encodedMessage, peerId); - return; - } - throw new Error(`Unknown message type ${type}`); - } - - private async processReceivedTxHashes(encodedMessage: Buffer, peerId: PeerId) { - try { - const txHashes = decodeTransactionHashesMessage(encodedMessage); - this.logger.debug(`Received tx hash messages from ${peerId.toString()}`); - // we send a message requesting the transactions that we don't have from the set of received hashes - const requiredHashes = txHashes.filter(hash => !this.txPool.hasTx(hash)); - if (!requiredHashes.length) { - return; - } - await this.sendGetTransactionsMessageToPeer(txHashes, peerId); - } catch (err) { - this.logger.error(`Failed to process received tx hashes`, err); - } - } - - private async processReceivedGetTransactionsRequest(encodedMessage: Buffer, peerId: PeerId) { - try { - this.logger.debug(`Received get txs messages from ${peerId.toString()}`); - // get the transactions in the list that we have and return them - const removeUndefined = (value: S | undefined): value is S => value != undefined; - const txHashes = decodeGetTransactionsRequestMessage(encodedMessage); - const txs = txHashes.map(x => this.txPool.getTxByHash(x)).filter(removeUndefined); - if (!txs.length) { - return; - } - await this.sendTransactionsMessageToPeer(txs, peerId); - } catch (err) { - this.logger.error(`Failed to process get txs request`, err); - } - } - - private async processReceivedTxs(encodedMessage: Buffer, peerId: PeerId) { - try { - const txs = decodeTransactionsMessage(encodedMessage); - // Could optimize here and process all txs at once - // Propagation would need to filter and send custom tx set per peer - for (const tx of txs) { - await this.processTxFromPeer(tx, peerId); - } - } catch (err) { - this.logger.error(`Failed to process pooled transactions message`, err); + await this.peerManager.updateDiscoveryService(); } } - private async processTxFromPeer(tx: Tx, peerId: PeerId): Promise { + private async processTxFromPeer(tx: Tx): Promise { const txHash = tx.getTxHash(); const txHashString = txHash.toString(); - this.knownTxLookup.addPeerForTx(peerId, txHashString); - this.logger.debug(`Received tx ${txHashString} from peer ${peerId.toString()}`); + this.logger.debug(`Received tx ${txHashString} from external peer.`); await this.txPool.addTxs([tx]); - this.propagateTx(tx); } private async sendTxToPeers(tx: Tx) { - const txs = createTransactionsMessage([tx]); - const payload = new Uint8Array(txs); - const peers = this.getTxPeers(); - const txHash = tx.getTxHash(); - const txHashString = txHash.toString(); - for (const peer of peers) { - try { - if (this.knownTxLookup.hasPeerSeenTx(peer, txHashString)) { - this.logger.debug(`Not sending tx ${txHashString} to peer ${peer.toString()} as they have already seen it`); - continue; - } - this.logger.debug(`Sending tx ${txHashString} to peer ${peer.toString()}`); - await this.sendRawMessageToPeer(payload, peer); - this.knownTxLookup.addPeerForTx(peer, txHashString); - } catch (err) { - this.logger.error(`Failed to send txs to peer ${peer.toString()}`, err); - continue; - } - } - } - - private async sendTxHashesMessageToPeer(peer: PeerId) { - try { - const hashes = this.txPool.getAllTxHashes(); - if (!hashes.length) { - return; - } - const message = createTransactionHashesMessage(hashes); - await this.sendRawMessageToPeer(new Uint8Array(message), peer); - } catch (err) { - this.logger.error(`Failed to send tx hashes to peer ${peer.toString()}`, err); - } - } - - private async sendGetTransactionsMessageToPeer(hashes: TxHash[], peer: PeerId) { - try { - const message = createGetTransactionsRequestMessage(hashes); - await this.sendRawMessageToPeer(new Uint8Array(message), peer); - } catch (err) { - this.logger.error(`Failed to send tx request to peer ${peer.toString()}`, err); - } - } - - private async sendTransactionsMessageToPeer(txs: Tx[], peer: PeerId) { - // don't filter out any transactions based on what we think the peer has seen, - // we have been explicitly asked for these transactions - const message = createTransactionsMessage(txs); - await this.sendRawMessageToPeer(message, peer); - for (const tx of txs) { - const hash = tx.getTxHash(); - this.knownTxLookup.addPeerForTx(peer, hash.toString()); - } - } - - private async sendRawMessageToPeer(message: Uint8Array, peer: PeerId) { - const stream = await this.node.dialProtocol(peer, this.protocolId); - await pipe([message], stream); - await stream.close(); - } - - private getTxPeers() { - return this.node.getPeers().filter(peer => !this.isBootstrapPeer(peer)); + const { data: txData } = this.messageCreator.createTxMessage(tx); + this.logger.debug(`Sending tx ${tx.getTxHash().toString()} to peers`); + const recipientsNum = await this.publishToTopic(this.messageCreator.getTopic(), txData); + this.logger.debug(`Sent tx ${tx.getTxHash().toString()} to ${recipientsNum} peers`); } private isBootstrapPeer(peer: PeerId) { diff --git a/yarn-project/p2p/src/service/peer_manager.ts b/yarn-project/p2p/src/service/peer_manager.ts new file mode 100644 index 00000000000..9e2993103d9 --- /dev/null +++ b/yarn-project/p2p/src/service/peer_manager.ts @@ -0,0 +1,26 @@ +import { createDebugLogger } from '@aztec/foundation/log'; + +import { type Libp2p } from 'libp2p'; + +import { type P2PConfig } from '../config.js'; +import { type PeerDiscoveryService, PeerDiscoveryState } from './service.js'; + +export class PeerManager { + constructor( + private libP2PNode: Libp2p, + private discV5Node: PeerDiscoveryService, + private config: P2PConfig, + private logger = createDebugLogger('aztec:p2p:peer_manager'), + ) {} + + async updateDiscoveryService() { + const peerCount = this.libP2PNode.getPeers().length; + if (peerCount >= this.config.maxPeerCount && this.discV5Node.getStatus() === PeerDiscoveryState.RUNNING) { + this.logger.debug('Max peer count reached, stopping discovery service'); + await this.discV5Node.stop(); + } else if (peerCount <= this.config.minPeerCount && this.discV5Node.getStatus() === PeerDiscoveryState.STOPPED) { + this.logger.debug('Min peer count reached, starting discovery service'); + await this.discV5Node.start(); + } + } +} diff --git a/yarn-project/p2p/src/service/service.ts b/yarn-project/p2p/src/service/service.ts index 645b1eb80d0..5d3389af54d 100644 --- a/yarn-project/p2p/src/service/service.ts +++ b/yarn-project/p2p/src/service/service.ts @@ -3,6 +3,11 @@ import type { Tx, TxHash } from '@aztec/circuit-types'; import type { ENR } from '@chainsafe/enr'; import type EventEmitter from 'events'; +export enum PeerDiscoveryState { + RUNNING = 'running', + STOPPED = 'stopped', +} + /** * The interface for a P2P service implementation. */ @@ -57,4 +62,6 @@ export interface PeerDiscoveryService extends EventEmitter { */ on(event: 'peer:discovered', listener: (enr: ENR) => void): this; emit(event: 'peer:discovered', enr: ENR): boolean; + + getStatus(): PeerDiscoveryState; } diff --git a/yarn-project/p2p/src/service/tx_messages.test.ts b/yarn-project/p2p/src/service/tx_messages.test.ts index 6f097e36337..108fb148416 100644 --- a/yarn-project/p2p/src/service/tx_messages.test.ts +++ b/yarn-project/p2p/src/service/tx_messages.test.ts @@ -1,20 +1,8 @@ -import { type Tx, mockTx, randomTxHash } from '@aztec/circuit-types'; +import { type Tx, mockTx } from '@aztec/circuit-types'; import { expect } from '@jest/globals'; -import { - Messages, - createGetTransactionsRequestMessage, - createTransactionHashesMessage, - createTransactionsMessage, - decodeGetTransactionsRequestMessage, - decodeMessageType, - decodeTransactionHashesMessage, - decodeTransactionsMessage, - fromTxMessage, - getEncodedMessage, - toTxMessage, -} from './tx_messages.js'; +import { fromTxMessage, toTxMessage } from './tx_messages.js'; const verifyTx = (actual: Tx, expected: Tx) => { expect(actual.data!.toBuffer()).toEqual(expected.data?.toBuffer()); @@ -29,30 +17,4 @@ describe('Messages', () => { const decodedTransaction = fromTxMessage(message); verifyTx(decodedTransaction, transaction); }); - - it('Correctly serializes and deserializes transactions messages', () => { - const privateTransactions = [mockTx(), mockTx(), mockTx()]; - const message = createTransactionsMessage(privateTransactions); - expect(decodeMessageType(message)).toBe(Messages.POOLED_TRANSACTIONS); - const decodedTransactions = decodeTransactionsMessage(getEncodedMessage(message)); - verifyTx(decodedTransactions[0], privateTransactions[0]); - verifyTx(decodedTransactions[1], privateTransactions[1]); - verifyTx(decodedTransactions[2], privateTransactions[2]); - }); - - it('Correctly serializes and deserializes transaction hashes message', () => { - const txHashes = [randomTxHash(), randomTxHash(), randomTxHash()]; - const message = createTransactionHashesMessage(txHashes); - expect(decodeMessageType(message)).toEqual(Messages.POOLED_TRANSACTION_HASHES); - const decodedHashes = decodeTransactionHashesMessage(getEncodedMessage(message)); - expect(decodedHashes.map(x => x.toString())).toEqual(txHashes.map(x => x.toString())); - }); - - it('Correctly serializes and deserializes get transactions message', () => { - const txHashes = [randomTxHash(), randomTxHash(), randomTxHash()]; - const message = createGetTransactionsRequestMessage(txHashes); - expect(decodeMessageType(message)).toEqual(Messages.GET_TRANSACTIONS); - const decodedHashes = decodeGetTransactionsRequestMessage(getEncodedMessage(message)); - expect(decodedHashes.map(x => x.toString())).toEqual(txHashes.map(x => x.toString())); - }); }); diff --git a/yarn-project/p2p/src/service/tx_messages.ts b/yarn-project/p2p/src/service/tx_messages.ts index e3af21304f4..c4ec54e5db0 100644 --- a/yarn-project/p2p/src/service/tx_messages.ts +++ b/yarn-project/p2p/src/service/tx_messages.ts @@ -1,34 +1,26 @@ -import { EncryptedTxL2Logs, Tx, TxHash, UnencryptedTxL2Logs } from '@aztec/circuit-types'; +import { EncryptedTxL2Logs, Tx, UnencryptedTxL2Logs } from '@aztec/circuit-types'; import { PrivateKernelTailCircuitPublicInputs, Proof, PublicCallRequest } from '@aztec/circuits.js'; import { numToUInt32BE } from '@aztec/foundation/serialize'; -/** - * Enumeration of P2P message types. - */ -export enum Messages { - POOLED_TRANSACTIONS = 1, - POOLED_TRANSACTION_HASHES = 2, - GET_TRANSACTIONS = 3, -} +import { type SemVer } from 'semver'; -/** - * Create a P2P message from the message type and message data. - * @param type - The type of the message. - * @param messageData - The binary message data. - * @returns The encoded message. - */ -export function createMessage(type: Messages, messageData: Buffer) { - return Buffer.concat([numToUInt32BE(type), messageData]); -} +export const TX_MESSAGE_TOPIC = ''; -/** - * Create a POOLED_TRANSACTIONS message from an array of transactions. - * @param txs - The transactions to encoded into a message. - * @returns The encoded message. - */ -export function createTransactionsMessage(txs: Tx[]) { - const messageData = txs.map(toTxMessage); - return createMessage(Messages.POOLED_TRANSACTIONS, Buffer.concat(messageData)); +export class AztecTxMessageCreator { + private readonly topic: string; + constructor(version: SemVer) { + this.topic = `/aztec/tx/${version.toString()}`; + } + + createTxMessage(tx: Tx) { + const messageData = toTxMessage(tx); + + return { topic: this.topic, data: messageData }; + } + + getTopic() { + return this.topic; + } } /** @@ -49,73 +41,6 @@ export function decodeTransactionsMessage(message: Buffer) { return txs; } -/** - * Create a POOLED_TRANSACTION_HASHES message. - * @param hashes - The transaction hashes to be sent. - * @returns The encoded message. - */ -export function createTransactionHashesMessage(hashes: TxHash[]) { - const messageData = hashes.map(x => x.buffer); - return createMessage(Messages.POOLED_TRANSACTION_HASHES, Buffer.concat(messageData)); -} - -/** - * Decode a POOLED_TRANSACTION_HASHESs message ito the original transaction hash objects. - * @param message - The binary message to be decoded. - * @returns - The array of transaction hashes originally encoded into the message. - */ -export function decodeTransactionHashesMessage(message: Buffer) { - let offset = 0; - const txHashes: TxHash[] = []; - while (offset < message.length) { - const slice = message.subarray(offset, offset + TxHash.SIZE); - if (slice.length < TxHash.SIZE) { - throw new Error(`Invalid message size when processing transaction hashes message`); - } - txHashes.push(new TxHash(slice)); - offset += TxHash.SIZE; - } - return txHashes; -} - -/** - * Create a GET_TRANSACTIONS message from an array of transaction hashes. - * @param hashes - The hashes of the transactions to be requested. - * @returns The encoded message. - */ -export function createGetTransactionsRequestMessage(hashes: TxHash[]) { - const messageData = hashes.map(x => x.buffer); - return createMessage(Messages.GET_TRANSACTIONS, Buffer.concat(messageData)); -} - -/** - * Decode a GET_TRANSACTIONS message into the original transaction hash objects. - * @param message - The binary message to be decoded. - * @returns - The array of transaction hashes originally encoded into the message. - */ -export function decodeGetTransactionsRequestMessage(message: Buffer) { - // for the time being this payload is effectively the same as the POOLED_TRANSACTION_HASHES message - return decodeTransactionHashesMessage(message); -} - -/** - * Decode the message type from a received message. - * @param message - The received message. - * @returns The decoded MessageType. - */ -export function decodeMessageType(message: Buffer) { - return message.readUInt32BE(0); -} - -/** - * Return the encoded message (minus the header) from received message buffer. - * @param message - The complete received message. - * @returns The encoded message, without the header. - */ -export function getEncodedMessage(message: Buffer) { - return message.subarray(4); -} - /** * Creates a tx 'message' for sending to a peer. * @param tx - The transaction to convert to a message. diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 1be75065b37..0ee6524c5e5 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -5,13 +5,6 @@ __metadata: version: 6 cacheKey: 8 -"@aashutoshrathi/word-wrap@npm:^1.2.3": - version: 1.2.6 - resolution: "@aashutoshrathi/word-wrap@npm:1.2.6" - checksum: ada901b9e7c680d190f1d012c84217ce0063d8f5c5a7725bb91ec3c5ed99bb7572680eb2d2938a531ccbaec39a95422fcd8a6b4a13110c7d98dd75402f66a0cd - languageName: node - linkType: hard - "@adraffy/ens-normalize@npm:1.10.0": version: 1.10.0 resolution: "@adraffy/ens-normalize@npm:1.10.0" @@ -627,32 +620,39 @@ __metadata: "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^" - "@chainsafe/discv5": ^9.0.0 - "@chainsafe/enr": ^3.0.0 + "@chainsafe/discv5": 9.0.0 + "@chainsafe/enr": 3.0.0 + "@chainsafe/libp2p-gossipsub": 13.0.0 "@chainsafe/libp2p-noise": ^15.0.0 "@chainsafe/libp2p-yamux": ^6.0.2 "@jest/globals": ^29.5.0 - "@libp2p/bootstrap": ^9.0.4 - "@libp2p/crypto": ^4.0.3 - "@libp2p/identify": ^1.0.15 - "@libp2p/interface": ^1.1.4 - "@libp2p/interface-libp2p": ^3.2.0 - "@libp2p/kad-dht": ^10.0.4 - "@libp2p/mplex": ^10.0.16 - "@libp2p/peer-id": ^4.0.7 - "@libp2p/peer-id-factory": ^4.0.7 - "@libp2p/tcp": ^9.0.16 - "@multiformats/multiaddr": ^12.1.14 + "@libp2p/bootstrap": 10.0.0 + "@libp2p/crypto": 4.0.3 + "@libp2p/identify": 1.0.18 + "@libp2p/interface": 1.3.1 + "@libp2p/kad-dht": 10.0.4 + "@libp2p/mplex": 10.0.16 + "@libp2p/peer-id": 4.0.7 + "@libp2p/peer-id-factory": 4.1.1 + "@libp2p/peer-store": 10.0.16 + "@libp2p/tcp": 9.0.24 + "@multiformats/multiaddr": 12.1.14 "@types/jest": ^29.5.0 "@types/node": ^18.14.6 + interface-datastore: ^8.2.11 + interface-store: ^5.1.8 + it-drain: ^3.0.5 + it-length: ^3.0.6 it-pipe: ^3.0.1 jest: ^29.5.0 jest-mock-extended: ^3.0.4 - libp2p: ^1.2.4 + libp2p: 1.5.0 + semver: ^7.6.0 sha3: ^2.1.4 ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 + uint8arrays: ^5.0.3 languageName: unknown linkType: soft @@ -896,7 +896,7 @@ __metadata: languageName: unknown linkType: soft -"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.12.13, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.24.1, @babel/code-frame@npm:^7.24.2": +"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.12.13, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.24.2": version: 7.24.2 resolution: "@babel/code-frame@npm:7.24.2" dependencies: @@ -914,25 +914,25 @@ __metadata: linkType: hard "@babel/core@npm:^7.11.6, @babel/core@npm:^7.12.3, @babel/core@npm:^7.23.9": - version: 7.24.4 - resolution: "@babel/core@npm:7.24.4" + version: 7.24.5 + resolution: "@babel/core@npm:7.24.5" dependencies: "@ampproject/remapping": ^2.2.0 "@babel/code-frame": ^7.24.2 - "@babel/generator": ^7.24.4 + "@babel/generator": ^7.24.5 "@babel/helper-compilation-targets": ^7.23.6 - "@babel/helper-module-transforms": ^7.23.3 - "@babel/helpers": ^7.24.4 - "@babel/parser": ^7.24.4 + "@babel/helper-module-transforms": ^7.24.5 + "@babel/helpers": ^7.24.5 + "@babel/parser": ^7.24.5 "@babel/template": ^7.24.0 - "@babel/traverse": ^7.24.1 - "@babel/types": ^7.24.0 + "@babel/traverse": ^7.24.5 + "@babel/types": ^7.24.5 convert-source-map: ^2.0.0 debug: ^4.1.0 gensync: ^1.0.0-beta.2 json5: ^2.2.3 semver: ^6.3.1 - checksum: 15ecad7581f3329995956ba461961b1af7bed48901f14fe962ccd3217edca60049e9e6ad4ce48134618397e6c90230168c842e2c28e47ef1f16c97dbbf663c61 + checksum: f4f0eafde12b145f2cb9cc893085e5f1436e1ef265bb3b7d8aa6282515c9b4e740bbd5e2cbc32114adb9afed2dd62c2336758b9fabb7e46e8ba542f76d4f3f80 languageName: node linkType: hard @@ -947,15 +947,15 @@ __metadata: languageName: node linkType: hard -"@babel/generator@npm:^7.23.0, @babel/generator@npm:^7.24.1, @babel/generator@npm:^7.24.4, @babel/generator@npm:^7.7.2": - version: 7.24.4 - resolution: "@babel/generator@npm:7.24.4" +"@babel/generator@npm:^7.23.0, @babel/generator@npm:^7.24.5, @babel/generator@npm:^7.7.2": + version: 7.24.5 + resolution: "@babel/generator@npm:7.24.5" dependencies: - "@babel/types": ^7.24.0 + "@babel/types": ^7.24.5 "@jridgewell/gen-mapping": ^0.3.5 "@jridgewell/trace-mapping": ^0.3.25 jsesc: ^2.5.1 - checksum: 1b6146c31386c9df3eb594a2c36b5c98da4f67f7c06edb3d68a442b92516b21bb5ba3ad7dbe0058fe76625ed24d66923e15c95b0df75ef1907d4068921a699b8 + checksum: a08c0ab900b36e1a17863e18e3216153322ea993246fd7a358ba38a31cfb15bab2af1dc178b2adafe4cb8a9f3ab0e0ceafd3fe6e8ca870dffb435b53b2b2a803 languageName: node linkType: hard @@ -998,7 +998,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-module-imports@npm:^7.22.15": +"@babel/helper-module-imports@npm:^7.24.3": version: 7.24.3 resolution: "@babel/helper-module-imports@npm:7.24.3" dependencies: @@ -1007,57 +1007,57 @@ __metadata: languageName: node linkType: hard -"@babel/helper-module-transforms@npm:^7.23.3": - version: 7.23.3 - resolution: "@babel/helper-module-transforms@npm:7.23.3" +"@babel/helper-module-transforms@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/helper-module-transforms@npm:7.24.5" dependencies: "@babel/helper-environment-visitor": ^7.22.20 - "@babel/helper-module-imports": ^7.22.15 - "@babel/helper-simple-access": ^7.22.5 - "@babel/helper-split-export-declaration": ^7.22.6 - "@babel/helper-validator-identifier": ^7.22.20 + "@babel/helper-module-imports": ^7.24.3 + "@babel/helper-simple-access": ^7.24.5 + "@babel/helper-split-export-declaration": ^7.24.5 + "@babel/helper-validator-identifier": ^7.24.5 peerDependencies: "@babel/core": ^7.0.0 - checksum: 5d0895cfba0e16ae16f3aa92fee108517023ad89a855289c4eb1d46f7aef4519adf8e6f971e1d55ac20c5461610e17213f1144097a8f932e768a9132e2278d71 + checksum: 208c2e3877536c367ae3f39345bb5c5954ad481fdb2204d4d1906063e53ae564e5b7b846951b1aa96ee716ec24ec3b6db01b41d128884c27315b415f62db9fd2 languageName: node linkType: hard "@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.24.0, @babel/helper-plugin-utils@npm:^7.8.0": - version: 7.24.0 - resolution: "@babel/helper-plugin-utils@npm:7.24.0" - checksum: e2baa0eede34d2fa2265947042aa84d444aa48dc51e9feedea55b67fc1bc3ab051387e18b33ca7748285a6061390831ab82f8a2c767d08470b93500ec727e9b9 + version: 7.24.5 + resolution: "@babel/helper-plugin-utils@npm:7.24.5" + checksum: fa1450c92541b32fe18a6ae85e5c989296a284838fa0a282a2138732cae6f173f36d39dc724890c1740ae72d6d6fbca0b009916b168d4bc874bacc7e5c2fdce0 languageName: node linkType: hard -"@babel/helper-simple-access@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/helper-simple-access@npm:7.22.5" +"@babel/helper-simple-access@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/helper-simple-access@npm:7.24.5" dependencies: - "@babel/types": ^7.22.5 - checksum: fe9686714caf7d70aedb46c3cce090f8b915b206e09225f1e4dbc416786c2fdbbee40b38b23c268b7ccef749dd2db35f255338fb4f2444429874d900dede5ad2 + "@babel/types": ^7.24.5 + checksum: 5616044603c98434342f09b056c869394acdeba7cd9ec29e6a9abb0dae1922f779d364aaba74dc2ae4facf85945c6156295adbe0511a8aaecaa8a1559d14757a languageName: node linkType: hard -"@babel/helper-split-export-declaration@npm:^7.22.6": - version: 7.22.6 - resolution: "@babel/helper-split-export-declaration@npm:7.22.6" +"@babel/helper-split-export-declaration@npm:^7.22.6, @babel/helper-split-export-declaration@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/helper-split-export-declaration@npm:7.24.5" dependencies: - "@babel/types": ^7.22.5 - checksum: e141cace583b19d9195f9c2b8e17a3ae913b7ee9b8120246d0f9ca349ca6f03cb2c001fd5ec57488c544347c0bb584afec66c936511e447fd20a360e591ac921 + "@babel/types": ^7.24.5 + checksum: f23ab6942568084a57789462ce55dc9631aef1d2142ffa2ee28fc411ab55ed3ca65adf109e48655aa349bf8df7ca6dd81fd91c8c229fee1dc77e283189dc83c2 languageName: node linkType: hard -"@babel/helper-string-parser@npm:^7.23.4": +"@babel/helper-string-parser@npm:^7.24.1": version: 7.24.1 resolution: "@babel/helper-string-parser@npm:7.24.1" checksum: 8404e865b06013979a12406aab4c0e8d2e377199deec09dfe9f57b833b0c9ce7b6e8c1c553f2da8d0bcd240c5005bd7a269f4fef0d628aeb7d5fe035c436fb67 languageName: node linkType: hard -"@babel/helper-validator-identifier@npm:^7.16.7, @babel/helper-validator-identifier@npm:^7.22.20": - version: 7.22.20 - resolution: "@babel/helper-validator-identifier@npm:7.22.20" - checksum: 136412784d9428266bcdd4d91c32bcf9ff0e8d25534a9d94b044f77fe76bc50f941a90319b05aafd1ec04f7d127cd57a179a3716009ff7f3412ef835ada95bdc +"@babel/helper-validator-identifier@npm:^7.16.7, @babel/helper-validator-identifier@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/helper-validator-identifier@npm:7.24.5" + checksum: 75d6f9f475c08f3be87bae4953e9b8d8c72983e16ed2860870b328d048cb20dccb4fcbf85eacbdd817ea1efbb38552a6db9046e2e37bfe13bdec44ac8939024c languageName: node linkType: hard @@ -1068,35 +1068,35 @@ __metadata: languageName: node linkType: hard -"@babel/helpers@npm:^7.24.4": - version: 7.24.4 - resolution: "@babel/helpers@npm:7.24.4" +"@babel/helpers@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/helpers@npm:7.24.5" dependencies: "@babel/template": ^7.24.0 - "@babel/traverse": ^7.24.1 - "@babel/types": ^7.24.0 - checksum: ecd2dc0b3b32e24b97fa3bcda432dd3235b77c2be1e16eafc35b8ef8f6c461faa99796a8bc2431a408c98b4aabfd572c160e2b67ecea4c5c9dd3a8314a97994a + "@babel/traverse": ^7.24.5 + "@babel/types": ^7.24.5 + checksum: 941937456ca50ef44dbc5cdcb9a74c6ce18ce38971663acd80b622e7ecf1cc4fa034597de3ccccc37939d324139f159709f493fd8e7c385adbc162cb0888cfee languageName: node linkType: hard "@babel/highlight@npm:^7.24.2": - version: 7.24.2 - resolution: "@babel/highlight@npm:7.24.2" + version: 7.24.5 + resolution: "@babel/highlight@npm:7.24.5" dependencies: - "@babel/helper-validator-identifier": ^7.22.20 + "@babel/helper-validator-identifier": ^7.24.5 chalk: ^2.4.2 js-tokens: ^4.0.0 picocolors: ^1.0.0 - checksum: 5f17b131cc3ebf3ab285a62cf98a404aef1bd71a6be045e748f8d5bf66d6a6e1aefd62f5972c84369472e8d9f22a614c58a89cd331eb60b7ba965b31b1bbeaf5 + checksum: eece0e63e9210e902f1ee88f15cabfa31d2693bd2e56806eb849478b859d274c24477081c649cee6a241c4aed7da6f3e05c7afa5c3cd70094006ed095292b0d0 languageName: node linkType: hard -"@babel/parser@npm:^7.0.0, @babel/parser@npm:^7.1.0, @babel/parser@npm:^7.14.7, @babel/parser@npm:^7.20.5, @babel/parser@npm:^7.20.7, @babel/parser@npm:^7.21.4, @babel/parser@npm:^7.23.0, @babel/parser@npm:^7.23.9, @babel/parser@npm:^7.24.0, @babel/parser@npm:^7.24.1, @babel/parser@npm:^7.24.4": - version: 7.24.4 - resolution: "@babel/parser@npm:7.24.4" +"@babel/parser@npm:^7.0.0, @babel/parser@npm:^7.1.0, @babel/parser@npm:^7.14.7, @babel/parser@npm:^7.20.5, @babel/parser@npm:^7.20.7, @babel/parser@npm:^7.21.4, @babel/parser@npm:^7.23.0, @babel/parser@npm:^7.23.9, @babel/parser@npm:^7.24.0, @babel/parser@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/parser@npm:7.24.5" bin: parser: ./bin/babel-parser.js - checksum: 94c9e3e592894cd6fc57c519f4e06b65463df9be5f01739bb0d0bfce7ffcf99b3c2fdadd44dc59cc858ba2739ce6e469813a941c2f2dfacf333a3b2c9c5c8465 + checksum: a251ea41bf8b5f61048beb320d43017aff68af5a3506bd2ef392180f5fa32c1061513171d582bb3d46ea48e3659dece8b3ba52511a2566066e58abee300ce2a0 languageName: node linkType: hard @@ -1255,11 +1255,11 @@ __metadata: linkType: hard "@babel/runtime@npm:^7.21.0": - version: 7.24.4 - resolution: "@babel/runtime@npm:7.24.4" + version: 7.24.5 + resolution: "@babel/runtime@npm:7.24.5" dependencies: regenerator-runtime: ^0.14.0 - checksum: 2f27d4c0ffac7ae7999ac0385e1106f2a06992a8bdcbf3da06adcac7413863cd08c198c2e4e970041bbea849e17f02e1df18875539b6afba76c781b6b59a07c3 + checksum: 755383192f3ac32ba4c62bd4f1ae92aed5b82d2c6665f39eb28fa94546777cf5c63493ea92dd03f1c2e621b17e860f190c056684b7f234270fdc91e29beda063 languageName: node linkType: hard @@ -1292,21 +1292,21 @@ __metadata: languageName: node linkType: hard -"@babel/traverse@npm:^7.24.1": - version: 7.24.1 - resolution: "@babel/traverse@npm:7.24.1" +"@babel/traverse@npm:^7.24.5": + version: 7.24.5 + resolution: "@babel/traverse@npm:7.24.5" dependencies: - "@babel/code-frame": ^7.24.1 - "@babel/generator": ^7.24.1 + "@babel/code-frame": ^7.24.2 + "@babel/generator": ^7.24.5 "@babel/helper-environment-visitor": ^7.22.20 "@babel/helper-function-name": ^7.23.0 "@babel/helper-hoist-variables": ^7.22.5 - "@babel/helper-split-export-declaration": ^7.22.6 - "@babel/parser": ^7.24.1 - "@babel/types": ^7.24.0 + "@babel/helper-split-export-declaration": ^7.24.5 + "@babel/parser": ^7.24.5 + "@babel/types": ^7.24.5 debug: ^4.3.1 globals: ^11.1.0 - checksum: 92a5ca906abfba9df17666d2001ab23f18600035f706a687055a0e392a690ae48d6fec67c8bd4ef19ba18699a77a5b7f85727e36b83f7d110141608fe0c24fe9 + checksum: a313fbf4a06946cc4b74b06e9846d7393a9ca1e8b6df6da60c669cff0a9426d6198c21a478041c60807b62b48f980473d4afbd3768764b0d9741ac80f5dfa04f languageName: node linkType: hard @@ -1320,14 +1320,14 @@ __metadata: languageName: node linkType: hard -"@babel/types@npm:^7.0.0, @babel/types@npm:^7.17.0, @babel/types@npm:^7.20.7, @babel/types@npm:^7.22.5, @babel/types@npm:^7.23.0, @babel/types@npm:^7.24.0, @babel/types@npm:^7.3.3, @babel/types@npm:^7.8.3": - version: 7.24.0 - resolution: "@babel/types@npm:7.24.0" +"@babel/types@npm:^7.0.0, @babel/types@npm:^7.17.0, @babel/types@npm:^7.20.7, @babel/types@npm:^7.22.5, @babel/types@npm:^7.23.0, @babel/types@npm:^7.24.0, @babel/types@npm:^7.24.5, @babel/types@npm:^7.3.3, @babel/types@npm:^7.8.3": + version: 7.24.5 + resolution: "@babel/types@npm:7.24.5" dependencies: - "@babel/helper-string-parser": ^7.23.4 - "@babel/helper-validator-identifier": ^7.22.20 + "@babel/helper-string-parser": ^7.24.1 + "@babel/helper-validator-identifier": ^7.24.5 to-fast-properties: ^2.0.0 - checksum: 4b574a37d490f621470ff36a5afaac6deca5546edcb9b5e316d39acbb20998e9c2be42f3fc0bf2b55906fc49ff2a5a6a097e8f5a726ee3f708a0b0ca93aed807 + checksum: 8eeeacd996593b176e649ee49d8dc3f26f9bb6aa1e3b592030e61a0e58ea010fb018dccc51e5314c8139409ea6cbab02e29b33e674e1f6962d8e24c52da6375b languageName: node linkType: hard @@ -1346,13 +1346,13 @@ __metadata: linkType: hard "@chainsafe/as-sha256@npm:^0.4.1": - version: 0.4.1 - resolution: "@chainsafe/as-sha256@npm:0.4.1" - checksum: 6d86975e648ecdafd366802278ac15b392b252e967f3681412ec48b5a3518b936cc5e977517499882b084991446d25787d98f8f585891943688cc81549a44e9a + version: 0.4.2 + resolution: "@chainsafe/as-sha256@npm:0.4.2" + checksum: 91c32f4aa783859dcaef69390ec2a63632e8b0b1b10c9daaa36f71f600cf81748f25376815fb810cfe333290b5aed73b0ab30ef7b6f018e5d3a6d158a6d24457 languageName: node linkType: hard -"@chainsafe/discv5@npm:^9.0.0": +"@chainsafe/discv5@npm:9.0.0": version: 9.0.0 resolution: "@chainsafe/discv5@npm:9.0.0" dependencies: @@ -1370,7 +1370,7 @@ __metadata: languageName: node linkType: hard -"@chainsafe/enr@npm:^3.0.0": +"@chainsafe/enr@npm:3.0.0, @chainsafe/enr@npm:^3.0.0": version: 3.0.0 resolution: "@chainsafe/enr@npm:3.0.0" dependencies: @@ -1394,6 +1394,28 @@ __metadata: languageName: node linkType: hard +"@chainsafe/libp2p-gossipsub@npm:13.0.0": + version: 13.0.0 + resolution: "@chainsafe/libp2p-gossipsub@npm:13.0.0" + dependencies: + "@libp2p/crypto": ^4.0.1 + "@libp2p/interface": ^1.1.2 + "@libp2p/interface-internal": ^1.0.7 + "@libp2p/peer-id": ^4.0.5 + "@libp2p/pubsub": ^9.0.8 + "@multiformats/multiaddr": ^12.1.14 + denque: ^2.1.0 + it-length-prefixed: ^9.0.4 + it-pipe: ^3.0.1 + it-pushable: ^3.2.3 + multiformats: ^13.0.1 + protons-runtime: 5.4.0 + uint8arraylist: ^2.4.8 + uint8arrays: ^5.0.1 + checksum: 2e47e429645e69738dd50fe1b2c25f22de1f28f331a141b9305680998ced503369e41dcd1de6dc1cdc127d3bb85cb130f6bda307f58fc1bf98290f8f4675991b + languageName: node + linkType: hard + "@chainsafe/libp2p-noise@npm:^15.0.0": version: 15.0.0 resolution: "@chainsafe/libp2p-noise@npm:15.0.0" @@ -2061,6 +2083,38 @@ __metadata: languageName: node linkType: hard +"@jsonjoy.com/base64@npm:^1.1.1": + version: 1.1.2 + resolution: "@jsonjoy.com/base64@npm:1.1.2" + peerDependencies: + tslib: 2 + checksum: 00dbf9cbc6ecb3af0e58288a305cc4ee3dfca9efa24443d98061756e8f6de4d6d2d3764bdfde07f2b03e6ce56db27c8a59b490bd134bf3d8122b4c6b394c7010 + languageName: node + linkType: hard + +"@jsonjoy.com/json-pack@npm:^1.0.3": + version: 1.0.4 + resolution: "@jsonjoy.com/json-pack@npm:1.0.4" + dependencies: + "@jsonjoy.com/base64": ^1.1.1 + "@jsonjoy.com/util": ^1.1.2 + hyperdyperid: ^1.2.0 + thingies: ^1.20.0 + peerDependencies: + tslib: 2 + checksum: 21e5166d5b5f4856791c2c7019dfba0e8313d2501937543691cdffd5fbe1f9680548a456d2c8aa78929aa69b2ac4c787ca8dbc7cf8e4926330decedcd0d9b8ea + languageName: node + linkType: hard + +"@jsonjoy.com/util@npm:^1.1.2": + version: 1.1.3 + resolution: "@jsonjoy.com/util@npm:1.1.3" + peerDependencies: + tslib: 2 + checksum: 144df56aafcae8984d43ebf0f2a11cecb69052286c83522758823710fbf2caabbe93946bdf5c343d3b50073bb0a1c332fea0e797eb8b4df35db480a75b0946ac + languageName: node + linkType: hard + "@koa/cors@npm:^5.0.0": version: 5.0.0 resolution: "@koa/cors@npm:5.0.0" @@ -2077,20 +2131,35 @@ __metadata: languageName: node linkType: hard -"@libp2p/bootstrap@npm:^9.0.4": - version: 9.0.12 - resolution: "@libp2p/bootstrap@npm:9.0.12" +"@libp2p/bootstrap@npm:10.0.0": + version: 10.0.0 + resolution: "@libp2p/bootstrap@npm:10.0.0" dependencies: - "@libp2p/interface": ^0.1.6 - "@libp2p/logger": ^3.1.0 - "@libp2p/peer-id": ^3.0.6 - "@multiformats/mafmt": ^12.1.2 - "@multiformats/multiaddr": ^12.1.5 - checksum: 249198129b806bf5525d527074e9151c96a411c61474543f8e2679664733af0873c5267b4c579fa29ac4f64f7fe3dae32e70dba66acafd321a3368adc579bccf + "@libp2p/interface": ^1.0.0 + "@libp2p/peer-id": ^4.0.0 + "@multiformats/mafmt": ^12.1.6 + "@multiformats/multiaddr": ^12.1.10 + checksum: e387a40b57acb2b8531db1ef93388786dcb0e2f151a4d14440974c569ebc1ebda317c098f5b5058b84a8bf55bc84794d302fa77dc2adfa53bcc0d3dd761901a2 languageName: node linkType: hard -"@libp2p/crypto@npm:^2.0.8": +"@libp2p/crypto@npm:4.0.3": + version: 4.0.3 + resolution: "@libp2p/crypto@npm:4.0.3" + dependencies: + "@libp2p/interface": ^1.1.4 + "@noble/curves": ^1.3.0 + "@noble/hashes": ^1.3.3 + asn1js: ^3.0.5 + multiformats: ^13.1.0 + protons-runtime: ^5.4.0 + uint8arraylist: ^2.4.8 + uint8arrays: ^5.0.2 + checksum: 5b73a5018a549e5271e2d559074b74789dc7d4e1e52eb6cbc698a4514b8f4ad0b8c45e894b03a3e05f7f1c0f7a6d77004a2d6b17f39c6023c8fdf3899a3e1ca8 + languageName: node + linkType: hard + +"@libp2p/crypto@npm:^2.0.3": version: 2.0.8 resolution: "@libp2p/crypto@npm:2.0.8" dependencies: @@ -2106,11 +2175,11 @@ __metadata: languageName: node linkType: hard -"@libp2p/crypto@npm:^4.0.0, @libp2p/crypto@npm:^4.0.1, @libp2p/crypto@npm:^4.0.3, @libp2p/crypto@npm:^4.0.6": - version: 4.0.6 - resolution: "@libp2p/crypto@npm:4.0.6" +"@libp2p/crypto@npm:^4.0.0, @libp2p/crypto@npm:^4.0.1, @libp2p/crypto@npm:^4.1.1": + version: 4.1.1 + resolution: "@libp2p/crypto@npm:4.1.1" dependencies: - "@libp2p/interface": ^1.2.0 + "@libp2p/interface": ^1.3.1 "@noble/curves": ^1.4.0 "@noble/hashes": ^1.4.0 asn1js: ^3.0.5 @@ -2118,18 +2187,18 @@ __metadata: protons-runtime: ^5.4.0 uint8arraylist: ^2.4.8 uint8arrays: ^5.0.3 - checksum: f3ef3ebdfae517e6c3b9fef9c7aab2941ac77fdc82cc10a0444561f9fac7836239b48183f52fed39a0f23fa7b373ac19ffab74ea8589d6d70acacb5a5a29c84e + checksum: cae1a122c7baa476e2ea7e7acee594255433408acfeeb152497dbb4329eaef0f6ef8a40d043744263f78c6608ce2972e539b56dbf95799f930d93f13ebe95611 languageName: node linkType: hard -"@libp2p/identify@npm:^1.0.15": - version: 1.0.19 - resolution: "@libp2p/identify@npm:1.0.19" +"@libp2p/identify@npm:1.0.18": + version: 1.0.18 + resolution: "@libp2p/identify@npm:1.0.18" dependencies: "@libp2p/interface": ^1.2.0 "@libp2p/interface-internal": ^1.1.0 "@libp2p/peer-id": ^4.0.10 - "@libp2p/peer-record": ^7.0.14 + "@libp2p/peer-record": ^7.0.13 "@multiformats/multiaddr": ^12.2.1 "@multiformats/multiaddr-matcher": ^1.2.0 it-protobuf-stream: ^1.1.2 @@ -2137,35 +2206,11 @@ __metadata: uint8arraylist: ^2.4.8 uint8arrays: ^5.0.3 wherearewe: ^2.0.1 - checksum: c4e2f7d3cd5355b66c9495f7d092abf962721760877c8ad2bdc01198b15e0f1d1aa0505cdb0c7a2886f7b08c0e0253b80c5d3ec269455e841665423b7e50e63a + checksum: 6b4d93bf6444ac6b5540f1c96c12357d522658b44c26d6174b41196fd1621fb3e89d84d9e422d063ff67384fef691e24fd3dad369901657646e898e546e4a9f9 languageName: node linkType: hard -"@libp2p/interface-connection@npm:^5.0.0": - version: 5.1.1 - resolution: "@libp2p/interface-connection@npm:5.1.1" - dependencies: - "@libp2p/interface-peer-id": ^2.0.0 - "@libp2p/interfaces": ^3.0.0 - "@multiformats/multiaddr": ^12.0.0 - it-stream-types: ^2.0.1 - uint8arraylist: ^2.4.3 - checksum: f5c60d9f78c40d06460a93a4bedd34c66c12a64ebc5012da584a73676bfab9b3f047a8d7c2a52c54866c47d44497447d80d45b5bbfa20e99daf864ff58523e78 - languageName: node - linkType: hard - -"@libp2p/interface-content-routing@npm:^2.0.0": - version: 2.1.1 - resolution: "@libp2p/interface-content-routing@npm:2.1.1" - dependencies: - "@libp2p/interface-peer-info": ^1.0.0 - "@libp2p/interfaces": ^3.0.0 - multiformats: ^11.0.0 - checksum: 6913b26d2e27afe78f0407cb574d80359a11fa887db9e974dd503df81cbad8f881c0604c48960824dcf974b6f344222fbfeae318e204b43ce44d92c27f90a0f1 - languageName: node - linkType: hard - -"@libp2p/interface-internal@npm:^0.1.9": +"@libp2p/interface-internal@npm:^0.1.4": version: 0.1.12 resolution: "@libp2p/interface-internal@npm:0.1.12" dependencies: @@ -2177,137 +2222,33 @@ __metadata: languageName: node linkType: hard -"@libp2p/interface-internal@npm:^1.1.0": - version: 1.1.0 - resolution: "@libp2p/interface-internal@npm:1.1.0" +"@libp2p/interface-internal@npm:^1.0.7, @libp2p/interface-internal@npm:^1.1.0, @libp2p/interface-internal@npm:^1.2.0": + version: 1.2.0 + resolution: "@libp2p/interface-internal@npm:1.2.0" dependencies: - "@libp2p/interface": ^1.2.0 - "@libp2p/peer-collections": ^5.1.10 + "@libp2p/interface": ^1.3.1 + "@libp2p/peer-collections": ^5.2.0 "@multiformats/multiaddr": ^12.2.1 uint8arraylist: ^2.4.8 - checksum: 40e25e3fa2ee70376d3f70b627f0c096e71929dede7c87f80b8ac75b56131b4293d0665e7164e0935f201e0e4d1febac8b43ca1cd3cfeea79581242dde992727 - languageName: node - linkType: hard - -"@libp2p/interface-keychain@npm:^2.0.0": - version: 2.0.5 - resolution: "@libp2p/interface-keychain@npm:2.0.5" - dependencies: - "@libp2p/interface-peer-id": ^2.0.0 - multiformats: ^11.0.0 - checksum: 242888f107aa586dfa6d11f3b579403b0b1ec2e60cb477984dec0d7afe4b69ef302230df7f23e351cb53de92b669733e4723ea832b9ec864314af6cbcd318557 - languageName: node - linkType: hard - -"@libp2p/interface-libp2p@npm:^3.2.0": - version: 3.2.0 - resolution: "@libp2p/interface-libp2p@npm:3.2.0" - dependencies: - "@libp2p/interface-connection": ^5.0.0 - "@libp2p/interface-content-routing": ^2.0.0 - "@libp2p/interface-keychain": ^2.0.0 - "@libp2p/interface-metrics": ^4.0.0 - "@libp2p/interface-peer-id": ^2.0.0 - "@libp2p/interface-peer-info": ^1.0.0 - "@libp2p/interface-peer-routing": ^1.0.0 - "@libp2p/interface-peer-store": ^2.0.0 - "@libp2p/interface-registrar": ^2.0.0 - "@libp2p/interface-transport": ^4.0.0 - "@libp2p/interfaces": ^3.0.0 - "@multiformats/multiaddr": ^12.0.0 - checksum: 76643668a8f94d9d13708f0c447a017415410fc78892a2d78d6917ccac7f444fbce1bce2f63b8e727ddf3e4bfcbe90100e77801a3d756b1c338e2cbc29b9e862 + checksum: 530403cd4d4f8e3b4f23c043906de1d5a412b7e01ffd63e392b1c36d4e838eb6fdb7fb7f6fcc8ef913a382fa43c2256b1bba1daa74d6d64c84b8f633f7c835ce languageName: node linkType: hard -"@libp2p/interface-metrics@npm:^4.0.0": - version: 4.0.8 - resolution: "@libp2p/interface-metrics@npm:4.0.8" - dependencies: - "@libp2p/interface-connection": ^5.0.0 - checksum: 185e0c8476c95a90f5edd066379252d073d10734e02b96c0f264d13f9dcd82e47813d4b57ac8897c0f701571b9af1c834e628ea7f74caba13673180acd8c546f - languageName: node - linkType: hard - -"@libp2p/interface-peer-id@npm:^2.0.0, @libp2p/interface-peer-id@npm:^2.0.2": - version: 2.0.2 - resolution: "@libp2p/interface-peer-id@npm:2.0.2" - dependencies: - multiformats: ^11.0.0 - checksum: 70db48ee6757cf1c7badbc78b0c2357bb29724bc15f789e85cb00f0fdac80f0655c4474113b436fbe4e52c9cf627465dde7d7e3cd8d6a7ba53143d414f39f497 - languageName: node - linkType: hard - -"@libp2p/interface-peer-info@npm:^1.0.0": - version: 1.0.10 - resolution: "@libp2p/interface-peer-info@npm:1.0.10" - dependencies: - "@libp2p/interface-peer-id": ^2.0.0 - "@multiformats/multiaddr": ^12.0.0 - checksum: 2e13de3d77ef3ae1caf6a3d3ad1ce04c1e0ccad830d8db4a3e564dbbe02f1c8e877fa908081eb7ef4285411d37f999433d75d4f37cf7215677d470a8dbc65128 - languageName: node - linkType: hard - -"@libp2p/interface-peer-routing@npm:^1.0.0": - version: 1.1.1 - resolution: "@libp2p/interface-peer-routing@npm:1.1.1" - dependencies: - "@libp2p/interface-peer-id": ^2.0.0 - "@libp2p/interface-peer-info": ^1.0.0 - "@libp2p/interfaces": ^3.0.0 - checksum: acea6188d706947edea80d82ceb2723b88f141679ce82c1a7ccf818a9ae53d485095c09b29adf638c72f9dd77dc17816989d2031d6202a51c9a575335a11f60b - languageName: node - linkType: hard - -"@libp2p/interface-peer-store@npm:^2.0.0": - version: 2.0.4 - resolution: "@libp2p/interface-peer-store@npm:2.0.4" - dependencies: - "@libp2p/interface-peer-id": ^2.0.0 - "@multiformats/multiaddr": ^12.0.0 - checksum: e6563e09dbb36abd17723d69a420f08549cf3cf7ce23690c0ffef507d1407bd6971084ab032b7887be8fb713b22bafcadc3f6dc10c23417e8a94c8c00247095f - languageName: node - linkType: hard - -"@libp2p/interface-registrar@npm:^2.0.0": - version: 2.0.12 - resolution: "@libp2p/interface-registrar@npm:2.0.12" - dependencies: - "@libp2p/interface-connection": ^5.0.0 - "@libp2p/interface-peer-id": ^2.0.0 - checksum: f6e6e053f3c98328acad2e91f14ed787ac5309d9d6737b1fb1c3fc5f77cbbe0651cc6554001545c32315879bb47ae95e4d76946ea9ce1b09e2d468dd99ff1843 - languageName: node - linkType: hard - -"@libp2p/interface-stream-muxer@npm:^4.0.0": - version: 4.1.2 - resolution: "@libp2p/interface-stream-muxer@npm:4.1.2" - dependencies: - "@libp2p/interface-connection": ^5.0.0 - "@libp2p/interfaces": ^3.0.0 - "@libp2p/logger": ^2.0.7 - abortable-iterator: ^5.0.1 - any-signal: ^4.1.1 - it-pushable: ^3.1.3 - it-stream-types: ^2.0.1 - uint8arraylist: ^2.4.3 - checksum: 146742f0361597e4d6e00c8658a37840923e901b203389df86e282c06ce97b76446d89dd7576e4299887ad0d14808e50b67ba8044f4b0d9490858f0c8bc5b387 - languageName: node - linkType: hard - -"@libp2p/interface-transport@npm:^4.0.0": - version: 4.0.3 - resolution: "@libp2p/interface-transport@npm:4.0.3" +"@libp2p/interface@npm:1.3.1, @libp2p/interface@npm:^1.0.0, @libp2p/interface@npm:^1.1.1, @libp2p/interface@npm:^1.1.2, @libp2p/interface@npm:^1.1.3, @libp2p/interface@npm:^1.1.4, @libp2p/interface@npm:^1.2.0, @libp2p/interface@npm:^1.3.0, @libp2p/interface@npm:^1.3.1": + version: 1.3.1 + resolution: "@libp2p/interface@npm:1.3.1" dependencies: - "@libp2p/interface-connection": ^5.0.0 - "@libp2p/interface-stream-muxer": ^4.0.0 - "@libp2p/interfaces": ^3.0.0 - "@multiformats/multiaddr": ^12.0.0 + "@multiformats/multiaddr": ^12.2.1 + it-pushable: ^3.2.3 it-stream-types: ^2.0.1 - checksum: 8c5e8b3d4775f0574905e6b6bb825c09868746c4e7b0d5d6b1f1e404f0e34930fce1e94fe208d1eb52b26c294782daf7bdd0103c6ab744cac3d8477ab5b48404 + multiformats: ^13.1.0 + progress-events: ^1.0.0 + uint8arraylist: ^2.4.8 + checksum: c7f66fad32edc05ab66508f549f6f720f0d8c63d2f882cdf0ba53476ac79bcf8cb1c37d5a0932ba3a7533cd259e55b485daef0a75a992db0ef27bb6f0b2fa7e7 languageName: node linkType: hard -"@libp2p/interface@npm:^0.1.6": +"@libp2p/interface@npm:^0.1.2, @libp2p/interface@npm:^0.1.6": version: 0.1.6 resolution: "@libp2p/interface@npm:0.1.6" dependencies: @@ -2323,42 +2264,22 @@ __metadata: languageName: node linkType: hard -"@libp2p/interface@npm:^1.0.0, @libp2p/interface@npm:^1.1.1, @libp2p/interface@npm:^1.1.3, @libp2p/interface@npm:^1.1.4, @libp2p/interface@npm:^1.2.0": - version: 1.2.0 - resolution: "@libp2p/interface@npm:1.2.0" - dependencies: - "@multiformats/multiaddr": ^12.2.1 - it-pushable: ^3.2.3 - it-stream-types: ^2.0.1 - multiformats: ^13.1.0 - progress-events: ^1.0.0 - uint8arraylist: ^2.4.8 - checksum: 622a5bb7f0ffcca4a418afc7e52b4c8dceb48af763c317290fdf747335166f65615eba6947419daa76351afbb66e0b17b630aa40f10164155e76524b46b18fe6 - languageName: node - linkType: hard - -"@libp2p/interfaces@npm:^3.0.0": - version: 3.3.2 - resolution: "@libp2p/interfaces@npm:3.3.2" - checksum: 3071fa49dcbb81a4b218248a1f648fba1061fb9c51e4b5edab9b8a7b9425c25afec96fdf3351ea7a469e7039269e59d95265682a934aa9c21630226dfcb67313 - languageName: node - linkType: hard - -"@libp2p/kad-dht@npm:^10.0.4": - version: 10.0.15 - resolution: "@libp2p/kad-dht@npm:10.0.15" +"@libp2p/kad-dht@npm:10.0.4": + version: 10.0.4 + resolution: "@libp2p/kad-dht@npm:10.0.4" dependencies: - "@libp2p/crypto": ^2.0.8 - "@libp2p/interface": ^0.1.6 - "@libp2p/interface-internal": ^0.1.9 - "@libp2p/logger": ^3.1.0 - "@libp2p/peer-collections": ^4.0.8 - "@libp2p/peer-id": ^3.0.6 + "@libp2p/crypto": ^2.0.3 + "@libp2p/interface": ^0.1.2 + "@libp2p/interface-internal": ^0.1.4 + "@libp2p/logger": ^3.0.2 + "@libp2p/peer-collections": ^4.0.3 + "@libp2p/peer-id": ^3.0.2 "@multiformats/multiaddr": ^12.1.5 - "@types/sinon": ^17.0.0 + "@types/sinon": ^10.0.15 abortable-iterator: ^5.0.1 any-signal: ^4.1.1 datastore-core: ^9.0.1 + events: ^3.3.0 hashlru: ^2.3.0 interface-datastore: ^8.2.0 it-all: ^3.0.2 @@ -2370,7 +2291,6 @@ __metadata: it-merge: ^3.0.0 it-parallel: ^3.0.0 it-pipe: ^3.0.1 - it-pushable: ^3.2.1 it-stream-types: ^2.0.1 it-take: ^3.0.1 multiformats: ^12.0.1 @@ -2383,24 +2303,11 @@ __metadata: uint8-varint: ^2.0.0 uint8arraylist: ^2.4.3 uint8arrays: ^4.0.6 - checksum: 566c62d45ff8ba92ea15332c8b62395a8e4f794ee46c038b04e4c144f032ddceae080e2a6de0e0948370620d3b708f61052783b788ba40d53d11044910f9becf + checksum: 8fbc6b2e12eeb98825b7dfa9e09a1c26f22a679167bde6305e8c524ee5514f509639db70915c432e1749272348f3eb8bb37ea7978a1a6f4133053e6b37ae3e3f languageName: node linkType: hard -"@libp2p/logger@npm:^2.0.7": - version: 2.1.1 - resolution: "@libp2p/logger@npm:2.1.1" - dependencies: - "@libp2p/interface-peer-id": ^2.0.2 - "@multiformats/multiaddr": ^12.1.3 - debug: ^4.3.4 - interface-datastore: ^8.2.0 - multiformats: ^11.0.2 - checksum: 2176be1b4539c974d62f193bc8053eb4b7854875da2ca7a9456b4fb1443a7e0714ea76b4233e414f270e60d06f64ac7e99e4b5a2a7e95830bf5a67c62f9f5e14 - languageName: node - linkType: hard - -"@libp2p/logger@npm:^3.1.0": +"@libp2p/logger@npm:^3.0.2": version: 3.1.0 resolution: "@libp2p/logger@npm:3.1.0" dependencies: @@ -2413,40 +2320,40 @@ __metadata: languageName: node linkType: hard -"@libp2p/logger@npm:^4.0.10, @libp2p/logger@npm:^4.0.6": - version: 4.0.10 - resolution: "@libp2p/logger@npm:4.0.10" +"@libp2p/logger@npm:^4.0.12, @libp2p/logger@npm:^4.0.6": + version: 4.0.12 + resolution: "@libp2p/logger@npm:4.0.12" dependencies: - "@libp2p/interface": ^1.2.0 + "@libp2p/interface": ^1.3.1 "@multiformats/multiaddr": ^12.2.1 debug: ^4.3.4 interface-datastore: ^8.2.11 multiformats: ^13.1.0 - checksum: 9897edd36cdb13e200249a77077c18c21b58cc11056f7efc30ade2bb399130100ea7a23864d1ddcf1805b71d2404b834e1620b5a129b193b299ee94373bd991a + checksum: 4348cfecd5bc93a68706c66c7958d2600280598d76539f10eb5aa404a550127560106f776be9c721e571d18d8eef3e31cf6ae6f48b2ace9546bc70f5f2e3963a languageName: node linkType: hard -"@libp2p/mplex@npm:^10.0.16": - version: 10.0.20 - resolution: "@libp2p/mplex@npm:10.0.20" +"@libp2p/mplex@npm:10.0.16": + version: 10.0.16 + resolution: "@libp2p/mplex@npm:10.0.16" dependencies: - "@libp2p/interface": ^1.2.0 - "@libp2p/utils": ^5.3.1 + "@libp2p/interface": ^1.1.4 + "@libp2p/utils": ^5.2.6 it-pipe: ^3.0.1 it-pushable: ^3.2.3 it-stream-types: ^2.0.1 uint8-varint: ^2.0.4 uint8arraylist: ^2.4.8 - uint8arrays: ^5.0.3 - checksum: 091875301433de10a9ba5f92c00720330c2a3f9ba2b693b28792b080712f28cc44bad0de0bbdf91a8c2c5324ed0d9f95baf55f6758827d353a6b2b7a4570d12b + uint8arrays: ^5.0.2 + checksum: a73d7c66fd35b749cdf9d9d93d8b62efeb4a97849c68207ec24de54224b12f641cf15eab937caed6bbc934bfb1d5ac14d9f88342611089674f16362d259bc7e7 languageName: node linkType: hard -"@libp2p/multistream-select@npm:^5.1.7": - version: 5.1.7 - resolution: "@libp2p/multistream-select@npm:5.1.7" +"@libp2p/multistream-select@npm:^5.1.9": + version: 5.1.9 + resolution: "@libp2p/multistream-select@npm:5.1.9" dependencies: - "@libp2p/interface": ^1.2.0 + "@libp2p/interface": ^1.3.1 it-length-prefixed: ^9.0.4 it-length-prefixed-stream: ^1.1.6 it-stream-types: ^2.0.1 @@ -2455,11 +2362,11 @@ __metadata: uint8-varint: ^2.0.4 uint8arraylist: ^2.4.8 uint8arrays: ^5.0.3 - checksum: 663a5f858a96dd0fe59083ea297573c3e778deb3936f2ac51ce4c932a4f29c5571ccdb74bfb13acb5cc9a3521d3312fb9f411c6c5aa7d2299993009900ea5255 + checksum: c5be0a0d3ca4a80e28af82ffc84af262be8a5cf1655bc2b77c1d17f745a19bd45dc84b7603592c0b5de0631ef3cd753e928e248c9398bb793c8e937cbf4e3cd8 languageName: node linkType: hard -"@libp2p/peer-collections@npm:^4.0.8": +"@libp2p/peer-collections@npm:^4.0.3, @libp2p/peer-collections@npm:^4.0.8": version: 4.0.11 resolution: "@libp2p/peer-collections@npm:4.0.11" dependencies: @@ -2469,31 +2376,43 @@ __metadata: languageName: node linkType: hard -"@libp2p/peer-collections@npm:^5.1.10": - version: 5.1.10 - resolution: "@libp2p/peer-collections@npm:5.1.10" +"@libp2p/peer-collections@npm:^5.1.11, @libp2p/peer-collections@npm:^5.2.0": + version: 5.2.0 + resolution: "@libp2p/peer-collections@npm:5.2.0" dependencies: - "@libp2p/interface": ^1.2.0 - "@libp2p/peer-id": ^4.0.10 - checksum: 959ca7d53961fd2da6c90f6938c7b25cecd07ca0a2a57e43a23c34b8406834b15f1a56e86ca15d79d77508ab04700a586a80850541b1f07d3d5fa8b3a3758280 + "@libp2p/interface": ^1.3.1 + "@libp2p/peer-id": ^4.1.1 + "@libp2p/utils": ^5.4.0 + checksum: 592a327daef801dd1899ba345f284c8ce11b320fe025e897e8e4fac49db7cc162a0e283212344e4a4363f24c9df2666f73f392f43b9b494ba2614bcd3a84f077 languageName: node linkType: hard -"@libp2p/peer-id-factory@npm:^4.0.10, @libp2p/peer-id-factory@npm:^4.0.7": - version: 4.0.10 - resolution: "@libp2p/peer-id-factory@npm:4.0.10" +"@libp2p/peer-id-factory@npm:4.1.1, @libp2p/peer-id-factory@npm:^4.1.1": + version: 4.1.1 + resolution: "@libp2p/peer-id-factory@npm:4.1.1" dependencies: - "@libp2p/crypto": ^4.0.6 - "@libp2p/interface": ^1.2.0 - "@libp2p/peer-id": ^4.0.10 + "@libp2p/crypto": ^4.1.1 + "@libp2p/interface": ^1.3.1 + "@libp2p/peer-id": ^4.1.1 protons-runtime: ^5.4.0 uint8arraylist: ^2.4.8 uint8arrays: ^5.0.3 - checksum: b08ef471f730af54e9e50ca9225fb221b850936fe453ca33c89c8bea0a91fdb06d7065d57cc2921ca26948b470c5449c8b91ddeb364bcd05671a3694fe7dc756 + checksum: 3bce0166c7ceab6cdb4de851e2b4783176b417000744e911a2586bbe6de3207bb355a4a0524eb7bdd2718bdef1a4292006f3cdb5de32be28d6672d977ac681fa + languageName: node + linkType: hard + +"@libp2p/peer-id@npm:4.0.7": + version: 4.0.7 + resolution: "@libp2p/peer-id@npm:4.0.7" + dependencies: + "@libp2p/interface": ^1.1.4 + multiformats: ^13.1.0 + uint8arrays: ^5.0.2 + checksum: d044b77bf99a3aacc31d12cad21ca767f351a69a82835ed95dd20e5b6b5872e1acdd67da4d156f5b42a0ea75adbc11b151e82199172846004e8b3f9dc85e3e54 languageName: node linkType: hard -"@libp2p/peer-id@npm:^3.0.6": +"@libp2p/peer-id@npm:^3.0.2, @libp2p/peer-id@npm:^3.0.6": version: 3.0.6 resolution: "@libp2p/peer-id@npm:3.0.6" dependencies: @@ -2504,42 +2423,42 @@ __metadata: languageName: node linkType: hard -"@libp2p/peer-id@npm:^4.0.0, @libp2p/peer-id@npm:^4.0.10, @libp2p/peer-id@npm:^4.0.4, @libp2p/peer-id@npm:^4.0.7": - version: 4.0.10 - resolution: "@libp2p/peer-id@npm:4.0.10" +"@libp2p/peer-id@npm:^4.0.0, @libp2p/peer-id@npm:^4.0.10, @libp2p/peer-id@npm:^4.0.4, @libp2p/peer-id@npm:^4.0.5, @libp2p/peer-id@npm:^4.1.0, @libp2p/peer-id@npm:^4.1.1": + version: 4.1.1 + resolution: "@libp2p/peer-id@npm:4.1.1" dependencies: - "@libp2p/interface": ^1.2.0 + "@libp2p/interface": ^1.3.1 multiformats: ^13.1.0 uint8arrays: ^5.0.3 - checksum: 5816e043a0cc5f753ed177fa63bcfbbcc1b236e93f5984943bc4107dab3bb023f6631b3d884554046315eb074fd7cb903bb0ead5bd462f998f5ba49009e5201f + checksum: a994577b56fd24d206428858d8665f7fb14fa9e1ba6b904e9b7caf6b2a9c4481da980e08d4bf16cb6bdf1a51adb45a77427d056bb60fb36594468bce094544ac languageName: node linkType: hard -"@libp2p/peer-record@npm:^7.0.14": - version: 7.0.14 - resolution: "@libp2p/peer-record@npm:7.0.14" +"@libp2p/peer-record@npm:^7.0.13, @libp2p/peer-record@npm:^7.0.15, @libp2p/peer-record@npm:^7.0.16": + version: 7.0.16 + resolution: "@libp2p/peer-record@npm:7.0.16" dependencies: - "@libp2p/crypto": ^4.0.6 - "@libp2p/interface": ^1.2.0 - "@libp2p/peer-id": ^4.0.10 - "@libp2p/utils": ^5.3.1 + "@libp2p/crypto": ^4.1.1 + "@libp2p/interface": ^1.3.1 + "@libp2p/peer-id": ^4.1.1 + "@libp2p/utils": ^5.4.0 "@multiformats/multiaddr": ^12.2.1 protons-runtime: ^5.4.0 uint8-varint: ^2.0.4 uint8arraylist: ^2.4.8 uint8arrays: ^5.0.3 - checksum: 9fc253f1c7f605f777b5238c1798997882f5d62fdc7b9a9678d4843050e60ff6fe105f64b002f76e1a84af100795dec6e653c4d6ec8922fa86898982a93da1c3 + checksum: f1c04605a3fe49d32945f6ef8cc41fa9ed1aaed72725def86ca73d152ef8ab0f7318b786e899cdf059fbb99e83158fc5e281e313bf1efb167b39bc2be8751dac languageName: node linkType: hard -"@libp2p/peer-store@npm:^10.0.15": - version: 10.0.15 - resolution: "@libp2p/peer-store@npm:10.0.15" +"@libp2p/peer-store@npm:10.0.16": + version: 10.0.16 + resolution: "@libp2p/peer-store@npm:10.0.16" dependencies: - "@libp2p/interface": ^1.2.0 - "@libp2p/peer-collections": ^5.1.10 - "@libp2p/peer-id": ^4.0.10 - "@libp2p/peer-record": ^7.0.14 + "@libp2p/interface": ^1.3.0 + "@libp2p/peer-collections": ^5.1.11 + "@libp2p/peer-id": ^4.1.0 + "@libp2p/peer-record": ^7.0.15 "@multiformats/multiaddr": ^12.2.1 interface-datastore: ^8.2.11 it-all: ^3.0.4 @@ -2548,85 +2467,131 @@ __metadata: protons-runtime: ^5.4.0 uint8arraylist: ^2.4.8 uint8arrays: ^5.0.3 - checksum: 3fa3bb7a03d79dc61802d0d97deb04aec70288494cc6ed54a12ef7a164d4ad248d5a848177bea7c4accbd833e8d2ea2b2575be9b3daa81ed8ba6640e84bc62a3 + checksum: ee9c9f0d4e8eebda339de038df73012ca5a635a4be7e48ca55817f96d6bedaf856f96469e79bba02ab55ef4073824c5efd09d0289f088d2e06d183be1c2c0b24 languageName: node linkType: hard -"@libp2p/tcp@npm:^9.0.16": - version: 9.0.22 - resolution: "@libp2p/tcp@npm:9.0.22" +"@libp2p/peer-store@npm:^10.0.17": + version: 10.0.17 + resolution: "@libp2p/peer-store@npm:10.0.17" dependencies: - "@libp2p/interface": ^1.2.0 - "@libp2p/utils": ^5.3.1 + "@libp2p/interface": ^1.3.1 + "@libp2p/peer-collections": ^5.2.0 + "@libp2p/peer-id": ^4.1.1 + "@libp2p/peer-record": ^7.0.16 + "@multiformats/multiaddr": ^12.2.1 + interface-datastore: ^8.2.11 + it-all: ^3.0.4 + mortice: ^3.0.4 + multiformats: ^13.1.0 + protons-runtime: ^5.4.0 + uint8arraylist: ^2.4.8 + uint8arrays: ^5.0.3 + checksum: fe7bc9a6bf76b8dbfb60530f02a598f922dbdd281c0f628529e914aefea89fdfc64ea7e1301f54856e675647c2238338dfea42c19c49402548785bf0e6898cf8 + languageName: node + linkType: hard + +"@libp2p/pubsub@npm:^9.0.8": + version: 9.0.17 + resolution: "@libp2p/pubsub@npm:9.0.17" + dependencies: + "@libp2p/crypto": ^4.1.1 + "@libp2p/interface": ^1.3.1 + "@libp2p/interface-internal": ^1.2.0 + "@libp2p/peer-collections": ^5.2.0 + "@libp2p/peer-id": ^4.1.1 + "@libp2p/utils": ^5.4.0 + it-length-prefixed: ^9.0.4 + it-pipe: ^3.0.1 + it-pushable: ^3.2.3 + multiformats: ^13.1.0 + p-queue: ^8.0.1 + uint8arraylist: ^2.4.8 + uint8arrays: ^5.0.3 + checksum: 3875a8ab886ff2028a70f7ff1b44949195bb05a4fbe72d3f93129743da0fb73168e1b11a456de10c9ff7b10bdfa9d56eee4b09a0091b4e31bb0172af9b85c480 + languageName: node + linkType: hard + +"@libp2p/tcp@npm:9.0.24": + version: 9.0.24 + resolution: "@libp2p/tcp@npm:9.0.24" + dependencies: + "@libp2p/interface": ^1.3.1 + "@libp2p/utils": ^5.4.0 "@multiformats/mafmt": ^12.1.6 "@multiformats/multiaddr": ^12.2.1 "@types/sinon": ^17.0.3 stream-to-it: ^1.0.0 - checksum: bf9c8e26385bbcf4b112b6d69eae6cf9a74537059b153b7163022221bd1eeb8b1600a6d622186257f8ffc57c4eac73458206f3ff577f4743ff01d004af91800a + checksum: a0d8ffa567d28e8d0e25e87930eb4697c93ad0e5bd790db6ed8c23b5d2b295fcfdccf476474cd9c26f5e04520c2e54562586e0debef7e42654a57545e3113e7c languageName: node linkType: hard -"@libp2p/utils@npm:^5.2.5, @libp2p/utils@npm:^5.3.1": - version: 5.3.1 - resolution: "@libp2p/utils@npm:5.3.1" +"@libp2p/utils@npm:^5.2.5, @libp2p/utils@npm:^5.2.6, @libp2p/utils@npm:^5.4.0": + version: 5.4.0 + resolution: "@libp2p/utils@npm:5.4.0" dependencies: "@chainsafe/is-ip": ^2.0.2 - "@libp2p/interface": ^1.2.0 - "@libp2p/logger": ^4.0.10 + "@libp2p/crypto": ^4.1.1 + "@libp2p/interface": ^1.3.1 + "@libp2p/logger": ^4.0.12 "@multiformats/multiaddr": ^12.2.1 "@multiformats/multiaddr-matcher": ^1.2.0 + "@sindresorhus/fnv1a": ^3.1.0 + "@types/murmurhash3js-revisited": ^3.0.3 delay: ^6.0.0 get-iterator: ^2.0.1 is-loopback-addr: ^2.0.2 it-pushable: ^3.2.3 it-stream-types: ^2.0.1 + murmurhash3js-revisited: ^3.0.0 netmask: ^2.0.2 p-defer: ^4.0.1 race-event: ^1.2.0 race-signal: ^1.0.2 uint8arraylist: ^2.4.8 - checksum: 6183d2207209e150fe415077cc80635119ea2d94fe7ca6e4881644ce0500ff2039844061bcce9496ee5704bb67b9268d27ae2108eeb1bef55f7541257daef2a8 + uint8arrays: ^5.0.3 + checksum: 8c651c4835430d4572134248ac539fdd519c3e649db56777139457f6e6bad304a29850366374c182139b786268dcd34e5a1ee53e8a080294b3c15af3bb4c662e languageName: node linkType: hard -"@lmdb/lmdb-darwin-arm64@npm:3.0.6": - version: 3.0.6 - resolution: "@lmdb/lmdb-darwin-arm64@npm:3.0.6" +"@lmdb/lmdb-darwin-arm64@npm:3.0.8": + version: 3.0.8 + resolution: "@lmdb/lmdb-darwin-arm64@npm:3.0.8" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@lmdb/lmdb-darwin-x64@npm:3.0.6": - version: 3.0.6 - resolution: "@lmdb/lmdb-darwin-x64@npm:3.0.6" +"@lmdb/lmdb-darwin-x64@npm:3.0.8": + version: 3.0.8 + resolution: "@lmdb/lmdb-darwin-x64@npm:3.0.8" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@lmdb/lmdb-linux-arm64@npm:3.0.6": - version: 3.0.6 - resolution: "@lmdb/lmdb-linux-arm64@npm:3.0.6" +"@lmdb/lmdb-linux-arm64@npm:3.0.8": + version: 3.0.8 + resolution: "@lmdb/lmdb-linux-arm64@npm:3.0.8" conditions: os=linux & cpu=arm64 languageName: node linkType: hard -"@lmdb/lmdb-linux-arm@npm:3.0.6": - version: 3.0.6 - resolution: "@lmdb/lmdb-linux-arm@npm:3.0.6" +"@lmdb/lmdb-linux-arm@npm:3.0.8": + version: 3.0.8 + resolution: "@lmdb/lmdb-linux-arm@npm:3.0.8" conditions: os=linux & cpu=arm languageName: node linkType: hard -"@lmdb/lmdb-linux-x64@npm:3.0.6": - version: 3.0.6 - resolution: "@lmdb/lmdb-linux-x64@npm:3.0.6" +"@lmdb/lmdb-linux-x64@npm:3.0.8": + version: 3.0.8 + resolution: "@lmdb/lmdb-linux-x64@npm:3.0.8" conditions: os=linux & cpu=x64 languageName: node linkType: hard -"@lmdb/lmdb-win32-x64@npm:3.0.6": - version: 3.0.6 - resolution: "@lmdb/lmdb-win32-x64@npm:3.0.6" +"@lmdb/lmdb-win32-x64@npm:3.0.8": + version: 3.0.8 + resolution: "@lmdb/lmdb-win32-x64@npm:3.0.8" conditions: os=win32 & cpu=x64 languageName: node linkType: hard @@ -2734,7 +2699,7 @@ __metadata: languageName: node linkType: hard -"@multiformats/mafmt@npm:^12.1.2, @multiformats/mafmt@npm:^12.1.6": +"@multiformats/mafmt@npm:^12.1.6": version: 12.1.6 resolution: "@multiformats/mafmt@npm:12.1.6" dependencies: @@ -2744,17 +2709,32 @@ __metadata: linkType: hard "@multiformats/multiaddr-matcher@npm:^1.2.0": - version: 1.2.0 - resolution: "@multiformats/multiaddr-matcher@npm:1.2.0" + version: 1.2.1 + resolution: "@multiformats/multiaddr-matcher@npm:1.2.1" dependencies: "@chainsafe/is-ip": ^2.0.1 "@multiformats/multiaddr": ^12.0.0 multiformats: ^13.0.0 - checksum: 0546bcb8105e9c146b577d481232226aa751e2fb0b3d13d0a182ea3e5b9d4e69308cb50f1a3e73531ccb1b2b265d083b4ee127b511f8125a0745229eeb847aec + checksum: 7420f3b722eacded222dcad7c89d4e768e01eb1c90eba09b969122bc950d6e507e73e942c4216edabc12f2b6636b9595565d3a8ca6713b71ddc7f569df3bbf61 + languageName: node + linkType: hard + +"@multiformats/multiaddr@npm:12.1.14": + version: 12.1.14 + resolution: "@multiformats/multiaddr@npm:12.1.14" + dependencies: + "@chainsafe/is-ip": ^2.0.1 + "@chainsafe/netmask": ^2.0.0 + "@libp2p/interface": ^1.0.0 + dns-over-http-resolver: ^3.0.2 + multiformats: ^13.0.0 + uint8-varint: ^2.0.1 + uint8arrays: ^5.0.0 + checksum: 6c48bb1c467b36c030b2c746574b81f7e3a8fba46987471b5f6714dac1ceea120759383be37c1cacc8d1fbb9c8666eb28ad0041c5737eaf457bd8d58f0d520fa languageName: node linkType: hard -"@multiformats/multiaddr@npm:^12.0.0, @multiformats/multiaddr@npm:^12.1.10, @multiformats/multiaddr@npm:^12.1.14, @multiformats/multiaddr@npm:^12.1.3, @multiformats/multiaddr@npm:^12.1.5, @multiformats/multiaddr@npm:^12.2.1": +"@multiformats/multiaddr@npm:^12.0.0, @multiformats/multiaddr@npm:^12.1.10, @multiformats/multiaddr@npm:^12.1.14, @multiformats/multiaddr@npm:^12.1.5, @multiformats/multiaddr@npm:^12.2.1": version: 12.2.1 resolution: "@multiformats/multiaddr@npm:12.2.1" dependencies: @@ -2794,7 +2774,7 @@ __metadata: languageName: node linkType: hard -"@noble/curves@npm:^1.0.0, @noble/curves@npm:^1.1.0, @noble/curves@npm:^1.2.0, @noble/curves@npm:^1.4.0": +"@noble/curves@npm:^1.0.0, @noble/curves@npm:^1.1.0, @noble/curves@npm:^1.2.0, @noble/curves@npm:^1.3.0, @noble/curves@npm:^1.4.0": version: 1.4.0 resolution: "@noble/curves@npm:1.4.0" dependencies: @@ -2817,7 +2797,7 @@ __metadata: languageName: node linkType: hard -"@noble/hashes@npm:1.4.0, @noble/hashes@npm:^1.3.1, @noble/hashes@npm:^1.4.0": +"@noble/hashes@npm:1.4.0, @noble/hashes@npm:^1.3.1, @noble/hashes@npm:^1.3.3, @noble/hashes@npm:^1.4.0": version: 1.4.0 resolution: "@noble/hashes@npm:1.4.0" checksum: 8ba816ae26c90764b8c42493eea383716396096c5f7ba6bea559993194f49d80a73c081f315f4c367e51bd2d5891700bcdfa816b421d24ab45b41cb03e4f3342 @@ -2885,11 +2865,11 @@ __metadata: linkType: hard "@npmcli/fs@npm:^3.1.0": - version: 3.1.0 - resolution: "@npmcli/fs@npm:3.1.0" + version: 3.1.1 + resolution: "@npmcli/fs@npm:3.1.1" dependencies: semver: ^7.3.5 - checksum: a50a6818de5fc557d0b0e6f50ec780a7a02ab8ad07e5ac8b16bf519e0ad60a144ac64f97d05c443c3367235d337182e1d012bbac0eb8dbae8dc7b40b193efd0e + checksum: d960cab4b93adcb31ce223bfb75c5714edbd55747342efb67dcc2f25e023d930a7af6ece3e75f2f459b6f38fc14d031c766f116cd124fdc937fd33112579e820 languageName: node linkType: hard @@ -2900,9 +2880,9 @@ __metadata: languageName: node linkType: hard -"@puppeteer/browsers@npm:2.2.2": - version: 2.2.2 - resolution: "@puppeteer/browsers@npm:2.2.2" +"@puppeteer/browsers@npm:2.2.3": + version: 2.2.3 + resolution: "@puppeteer/browsers@npm:2.2.3" dependencies: debug: 4.3.4 extract-zip: 2.0.1 @@ -2914,7 +2894,7 @@ __metadata: yargs: 17.7.2 bin: browsers: lib/cjs/main-cli.js - checksum: 328a10ceb432784ec4cd524c461799936603b8436e50eed6a61127022f4c8a36ba31143b0d4d311190d619968f2e9db9fa7ac046757cff2c9f81d301110560be + checksum: 44d496e2c4d717e472b40473fd916b1aa3b1a6024b9e4f571ca1521172ae38d090b5f331ccc6694593f41eb0b667865d72e4c9bc29d6a705a369ade53dacbd5c languageName: node linkType: hard @@ -2974,6 +2954,13 @@ __metadata: languageName: node linkType: hard +"@sindresorhus/fnv1a@npm:^3.1.0": + version: 3.1.0 + resolution: "@sindresorhus/fnv1a@npm:3.1.0" + checksum: 9816f4382da21df562e9049bd40dca95bc952afbc5f2257750b1b537af0810850749ee113c8b97f0b4c49a2d82c225fc8e0e14fda191333de9e1f73730a428e3 + languageName: node + linkType: hard + "@sinonjs/commons@npm:^3.0.0": version: 3.0.1 resolution: "@sinonjs/commons@npm:3.0.1" @@ -2992,90 +2979,90 @@ __metadata: languageName: node linkType: hard -"@swc/core-darwin-arm64@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-darwin-arm64@npm:1.4.16" +"@swc/core-darwin-arm64@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-darwin-arm64@npm:1.5.5" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@swc/core-darwin-x64@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-darwin-x64@npm:1.4.16" +"@swc/core-darwin-x64@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-darwin-x64@npm:1.5.5" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@swc/core-linux-arm-gnueabihf@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-linux-arm-gnueabihf@npm:1.4.16" +"@swc/core-linux-arm-gnueabihf@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-linux-arm-gnueabihf@npm:1.5.5" conditions: os=linux & cpu=arm languageName: node linkType: hard -"@swc/core-linux-arm64-gnu@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-linux-arm64-gnu@npm:1.4.16" +"@swc/core-linux-arm64-gnu@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-linux-arm64-gnu@npm:1.5.5" conditions: os=linux & cpu=arm64 & libc=glibc languageName: node linkType: hard -"@swc/core-linux-arm64-musl@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-linux-arm64-musl@npm:1.4.16" +"@swc/core-linux-arm64-musl@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-linux-arm64-musl@npm:1.5.5" conditions: os=linux & cpu=arm64 & libc=musl languageName: node linkType: hard -"@swc/core-linux-x64-gnu@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-linux-x64-gnu@npm:1.4.16" +"@swc/core-linux-x64-gnu@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-linux-x64-gnu@npm:1.5.5" conditions: os=linux & cpu=x64 & libc=glibc languageName: node linkType: hard -"@swc/core-linux-x64-musl@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-linux-x64-musl@npm:1.4.16" +"@swc/core-linux-x64-musl@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-linux-x64-musl@npm:1.5.5" conditions: os=linux & cpu=x64 & libc=musl languageName: node linkType: hard -"@swc/core-win32-arm64-msvc@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-win32-arm64-msvc@npm:1.4.16" +"@swc/core-win32-arm64-msvc@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-win32-arm64-msvc@npm:1.5.5" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"@swc/core-win32-ia32-msvc@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-win32-ia32-msvc@npm:1.4.16" +"@swc/core-win32-ia32-msvc@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-win32-ia32-msvc@npm:1.5.5" conditions: os=win32 & cpu=ia32 languageName: node linkType: hard -"@swc/core-win32-x64-msvc@npm:1.4.16": - version: 1.4.16 - resolution: "@swc/core-win32-x64-msvc@npm:1.4.16" +"@swc/core-win32-x64-msvc@npm:1.5.5": + version: 1.5.5 + resolution: "@swc/core-win32-x64-msvc@npm:1.5.5" conditions: os=win32 & cpu=x64 languageName: node linkType: hard "@swc/core@npm:^1.4.11": - version: 1.4.16 - resolution: "@swc/core@npm:1.4.16" - dependencies: - "@swc/core-darwin-arm64": 1.4.16 - "@swc/core-darwin-x64": 1.4.16 - "@swc/core-linux-arm-gnueabihf": 1.4.16 - "@swc/core-linux-arm64-gnu": 1.4.16 - "@swc/core-linux-arm64-musl": 1.4.16 - "@swc/core-linux-x64-gnu": 1.4.16 - "@swc/core-linux-x64-musl": 1.4.16 - "@swc/core-win32-arm64-msvc": 1.4.16 - "@swc/core-win32-ia32-msvc": 1.4.16 - "@swc/core-win32-x64-msvc": 1.4.16 + version: 1.5.5 + resolution: "@swc/core@npm:1.5.5" + dependencies: + "@swc/core-darwin-arm64": 1.5.5 + "@swc/core-darwin-x64": 1.5.5 + "@swc/core-linux-arm-gnueabihf": 1.5.5 + "@swc/core-linux-arm64-gnu": 1.5.5 + "@swc/core-linux-arm64-musl": 1.5.5 + "@swc/core-linux-x64-gnu": 1.5.5 + "@swc/core-linux-x64-musl": 1.5.5 + "@swc/core-win32-arm64-msvc": 1.5.5 + "@swc/core-win32-ia32-msvc": 1.5.5 + "@swc/core-win32-x64-msvc": 1.5.5 "@swc/counter": ^0.1.2 "@swc/types": ^0.1.5 peerDependencies: @@ -3104,7 +3091,7 @@ __metadata: peerDependenciesMeta: "@swc/helpers": optional: true - checksum: 67b72646a70c7b5967b0e2f3511bab9451285c7c24f107347ff92cea04ae61c76eb6e8c688f04d1bff2541134519f4a625005811be3b0f7670d1dad1167cc1fc + checksum: 40d70f19aee70d0fa7940b213c0086159fcc2d2bbffa750ce1b7e02c7ce711424b3846eb9550a844cc5608377e4154bfe99978f40bbb9bc943268449bf385e2c languageName: node linkType: hard @@ -3713,9 +3700,9 @@ __metadata: linkType: hard "@types/lodash@npm:*": - version: 4.17.0 - resolution: "@types/lodash@npm:4.17.0" - checksum: 3f98c0b67a93994cbc3403d4fa9dbaf52b0b6bb7f07a764d73875c2dcd5ef91222621bd5bcf8eee7b417a74d175c2f7191b9f595f8603956fd06f0674c0cba93 + version: 4.17.1 + resolution: "@types/lodash@npm:4.17.1" + checksum: 01984d5b44c09ef45258f8ac6d0cf926900624064722d51a020ba179e5d4a293da0068fb278d87dc695586afe7ebd3362ec57f5c0e7c4f6c1fab9d04a80e77f5 languageName: node linkType: hard @@ -3756,21 +3743,28 @@ __metadata: languageName: node linkType: hard +"@types/murmurhash3js-revisited@npm:^3.0.3": + version: 3.0.3 + resolution: "@types/murmurhash3js-revisited@npm:3.0.3" + checksum: 810d5402d6ce723e86e59babfea8e15127619f7b643b5f251697d50c7a8b5efc30a7af6f7f63b0bdfae062e1f3b3f9d4e951dc5c9557bafbe40325a3288bec98 + languageName: node + linkType: hard + "@types/node@npm:*": - version: 20.12.7 - resolution: "@types/node@npm:20.12.7" + version: 20.12.11 + resolution: "@types/node@npm:20.12.11" dependencies: undici-types: ~5.26.4 - checksum: 7cc979f7e2ca9a339ec71318c3901b9978555257929ef3666987f3e447123bc6dc92afcc89f6347e09e07d602fde7d51bcddea626c23aa2bb74aeaacfd1e1686 + checksum: 0cc06bb69cd8150e96fcf65fa3d7f2eeebedf110a99e1834a7fa55bd6c04e7b6d73f74321a2acfc569ca300c0b88d8e1b702ce245b3802f6e5f6a8987fef451a languageName: node linkType: hard "@types/node@npm:^18.14.6, @types/node@npm:^18.15.11, @types/node@npm:^18.15.3, @types/node@npm:^18.7.23": - version: 18.19.31 - resolution: "@types/node@npm:18.19.31" + version: 18.19.33 + resolution: "@types/node@npm:18.19.33" dependencies: undici-types: ~5.26.4 - checksum: 949bddfd7071bd47300d1f33d380ee34695ccd5f046f1a03e4d2be0d953ace896905144d44a6f483f241b5ef34b86f0e40a0e312201117782eecf89e81a4ff13 + checksum: b6db87d095bc541d64a410fa323a35c22c6113220b71b608bbe810b2397932d0f0a51c3c0f3ef90c20d8180a1502d950a7c5314b907e182d9cc10b36efd2a44e languageName: node linkType: hard @@ -3839,7 +3833,16 @@ __metadata: languageName: node linkType: hard -"@types/sinon@npm:^17.0.0, @types/sinon@npm:^17.0.3": +"@types/sinon@npm:^10.0.15": + version: 10.0.20 + resolution: "@types/sinon@npm:10.0.20" + dependencies: + "@types/sinonjs__fake-timers": "*" + checksum: 7322771345c202b90057f8112e0d34b7339e5ae1827fb1bfe385fc9e38ed6a2f18b4c66e88d27d98c775f7f74fb1167c0c14f61ca64155786534541e6c6eb05f + languageName: node + linkType: hard + +"@types/sinon@npm:^17.0.3": version: 17.0.3 resolution: "@types/sinon@npm:17.0.3" dependencies: @@ -3872,13 +3875,13 @@ __metadata: linkType: hard "@types/superagent@npm:*": - version: 8.1.6 - resolution: "@types/superagent@npm:8.1.6" + version: 8.1.7 + resolution: "@types/superagent@npm:8.1.7" dependencies: "@types/cookiejar": ^2.1.5 "@types/methods": ^1.1.4 "@types/node": "*" - checksum: 240ea5a58bb3c9e53f0dbe1ccd1bfe046e084fffdb4eaf44f0bf846fb98dad98ce03d057fdfb555bfa06afbb76a0e5877fe639750b798edac594bc7e19833934 + checksum: 8f80c72bd1cc9a9295a2e1e8a7a8de9bef09348db63f33cc4f61e457917662064ab86ce013f28249c34d7239d9a4415c1a597dc70d4391b2ad83b338a63a3b73 languageName: node linkType: hard @@ -4873,29 +4876,38 @@ __metadata: linkType: hard "bare-fs@npm:^2.1.1": - version: 2.2.3 - resolution: "bare-fs@npm:2.2.3" + version: 2.3.0 + resolution: "bare-fs@npm:2.3.0" dependencies: bare-events: ^2.0.0 bare-path: ^2.0.0 - streamx: ^2.13.0 - checksum: 598f1998f08b19c7f1eea76291e5c93664c82b60b997e56aa0e6dea05193d74d3865cfe1172d05684893253ef700ce3abb4e76c55da799fed2ee7a82597a5c44 + bare-stream: ^1.0.0 + checksum: 0b2033551d30e51acbca64a885f76e0361cb1e783c410e10589206a9c6a4ac25ff5865aa67e6a5e412d3175694c7aff6ffe490c509f1cb38b329a855dc7471a5 languageName: node linkType: hard "bare-os@npm:^2.1.0": - version: 2.2.1 - resolution: "bare-os@npm:2.2.1" - checksum: 7d870d8955531809253dfbceeda5b68e8396ef640166f8ff6c4c5e344f18a6bc9253f6d5e7d9ae2841426b66e9b7b1a39b2a102e6b23e1ddff26ad8a8981af81 + version: 2.3.0 + resolution: "bare-os@npm:2.3.0" + checksum: 873aa2d18c5dc4614b63f5a7eaf4ffdd1b5385c57167aa90895d6ba308c92c28e5f7e2cdc8474695df26b3320e72e3174f7b8d7202c46b46f47e016e2ade5185 languageName: node linkType: hard "bare-path@npm:^2.0.0, bare-path@npm:^2.1.0": - version: 2.1.1 - resolution: "bare-path@npm:2.1.1" + version: 2.1.2 + resolution: "bare-path@npm:2.1.2" dependencies: bare-os: ^2.1.0 - checksum: f25710be4ee4106f15b405b85ceea5c8da799f803b237008dc4a3533c0db01acd2500742f2204a37909c6871949725fb1907cf95434d80710bf832716d0da8df + checksum: 06bdb3f5909b459dc34aa42624c6d3fcf8baf46203e36add063f3040ea86dda527620c2d06d53926ee5725502f4d0c57eb0a0bf0b5c14a687fd81246104e5ca5 + languageName: node + linkType: hard + +"bare-stream@npm:^1.0.0": + version: 1.0.0 + resolution: "bare-stream@npm:1.0.0" + dependencies: + streamx: ^2.16.1 + checksum: 3bc1fab505e12628257e9e162e4194af26a5bb4a66adae142ad82570faf2a4b2a934deef7fd93b180cc6ba1bdf0b57068e79d3d635f14ab38cddd66827379919 languageName: node linkType: hard @@ -5147,8 +5159,8 @@ __metadata: linkType: hard "cacache@npm:^18.0.0": - version: 18.0.2 - resolution: "cacache@npm:18.0.2" + version: 18.0.3 + resolution: "cacache@npm:18.0.3" dependencies: "@npmcli/fs": ^3.1.0 fs-minipass: ^3.0.0 @@ -5162,7 +5174,7 @@ __metadata: ssri: ^10.0.0 tar: ^6.1.11 unique-filename: ^3.0.0 - checksum: 0250df80e1ad0c828c956744850c5f742c24244e9deb5b7dc81bca90f8c10e011e132ecc58b64497cc1cad9a98968676147fb6575f4f94722f7619757b17a11b + checksum: b717fd9b36e9c3279bfde4545c3a8f6d5a539b084ee26a9504d48f83694beb724057d26e090b97540f9cc62bea18b9f6cf671c50e18fb7dac60eda9db691714f languageName: node linkType: hard @@ -5222,9 +5234,9 @@ __metadata: linkType: hard "caniuse-lite@npm:^1.0.30001587": - version: 1.0.30001612 - resolution: "caniuse-lite@npm:1.0.30001612" - checksum: 2b6ab6a19c72bdf8dccac824944e828a2a1fae52c6dfeb2d64ccecfd60d0466d2e5a392e996da2150d92850188a5034666dceed34a38d978177f6934e0bf106d + version: 1.0.30001617 + resolution: "caniuse-lite@npm:1.0.30001617" + checksum: a03bfd6ed474d14378f1b93bf90e9b0031e56a813cf42b364e5a86881ecdcdfdd58bf94c56febb0e4128c5ab57cc0a760ab7f3ef7ce0c1ead1af78a8e806375e languageName: node linkType: hard @@ -5284,16 +5296,16 @@ __metadata: languageName: node linkType: hard -"chromium-bidi@npm:0.5.17": - version: 0.5.17 - resolution: "chromium-bidi@npm:0.5.17" +"chromium-bidi@npm:0.5.19": + version: 0.5.19 + resolution: "chromium-bidi@npm:0.5.19" dependencies: mitt: 3.0.1 urlpattern-polyfill: 10.0.0 zod: 3.22.4 peerDependencies: devtools-protocol: "*" - checksum: 522da996ed5abfb47707583cc24785f9aa05d87bd968dbd520f245cf8972fa3ec102f8d1d72fa07558daa70495d8c6f2bf364d8599eb60b77504e528601d8a30 + checksum: aec876416dc856150c2fe4af1eb0328497c6859af8f9e5be0e0275435d7c0996654bfff68ea1fcd6125bf605957f16ad431c1961f47897542f0cb927ceb93e31 languageName: node linkType: hard @@ -5315,9 +5327,9 @@ __metadata: linkType: hard "cjs-module-lexer@npm:^1.0.0": - version: 1.2.3 - resolution: "cjs-module-lexer@npm:1.2.3" - checksum: 5ea3cb867a9bb609b6d476cd86590d105f3cfd6514db38ff71f63992ab40939c2feb68967faa15a6d2b1f90daa6416b79ea2de486e9e2485a6f8b66a21b4fb0a + version: 1.3.1 + resolution: "cjs-module-lexer@npm:1.3.1" + checksum: 75f20ac264a397ea5c63f9c2343a51ab878043666468f275e94862f7180ec1d764a400ec0c09085dcf0db3193c74a8b571519abd2bf4be0d2be510d1377c8d4b languageName: node linkType: hard @@ -5961,7 +5973,7 @@ __metadata: languageName: node linkType: hard -"define-properties@npm:^1.1.3, define-properties@npm:^1.2.0, define-properties@npm:^1.2.1": +"define-properties@npm:^1.2.0, define-properties@npm:^1.2.1": version: 1.2.1 resolution: "define-properties@npm:1.2.1" dependencies: @@ -6004,6 +6016,13 @@ __metadata: languageName: node linkType: hard +"denque@npm:^2.1.0": + version: 2.1.0 + resolution: "denque@npm:2.1.0" + checksum: 1d4ae1d05e59ac3a3481e7b478293f4b4c813819342273f3d5b826c7ffa9753c520919ba264f377e09108d24ec6cf0ec0ac729a5686cbb8f32d797126c5dae74 + languageName: node + linkType: hard + "depd@npm:2.0.0, depd@npm:^2.0.0, depd@npm:~2.0.0": version: 2.0.0 resolution: "depd@npm:2.0.0" @@ -6256,10 +6275,10 @@ __metadata: languageName: node linkType: hard -"devtools-protocol@npm:0.0.1262051": - version: 0.0.1262051 - resolution: "devtools-protocol@npm:0.0.1262051" - checksum: beaad00059964a661ab056d5e993492742c612c0370c6f08acd91490181c4d4ecf57d316eedb5a37fb6bb59321901d09ce50762f79ea09a50751d86f601b8f8e +"devtools-protocol@npm:0.0.1273771": + version: 0.0.1273771 + resolution: "devtools-protocol@npm:0.0.1273771" + checksum: 2a88694ec0f2f167f826cea8c3d6030ede911c2db79d2a62d76d1be450bcb395e8283ca03f225fa308710ab06182dced47eed8cece56b377d1946403a321b64f languageName: node linkType: hard @@ -6307,6 +6326,16 @@ __metadata: languageName: node linkType: hard +"dns-over-http-resolver@npm:^3.0.2": + version: 3.0.2 + resolution: "dns-over-http-resolver@npm:3.0.2" + dependencies: + debug: ^4.3.4 + receptacle: ^1.3.2 + checksum: 782739450bae3329fdbafcb3c53b497eeb0b3af3bdd8de91977a513d4fe797446597a09d6e042a2c5da99cfc0039c4acac8a7efb93aca5b3424b58f4174d4a4f + languageName: node + linkType: hard + "dns-packet@npm:^5.6.1": version: 5.6.1 resolution: "dns-packet@npm:5.6.1" @@ -6363,9 +6392,9 @@ __metadata: linkType: hard "electron-to-chromium@npm:^1.4.668": - version: 1.4.745 - resolution: "electron-to-chromium@npm:1.4.745" - checksum: f73b576108863cad160deb22b8e8c6754a8b16b22cda90cfce038a755f886be9c03fb8360bbd7c9d28ddd184800d0d6bd430a11f9289316145f0b28321dfe71d + version: 1.4.761 + resolution: "electron-to-chromium@npm:1.4.761" + checksum: c69d459966682a68e3505ca2d6a72d02612ce3fe0e27b6cf33fa5e8205307504263b930b2d8e6f38b2abb01327c2657d29b63b6bfa296d8ca19d173208115d20 languageName: node linkType: hard @@ -6438,12 +6467,12 @@ __metadata: linkType: hard "enhanced-resolve@npm:^5.0.0, enhanced-resolve@npm:^5.12.0, enhanced-resolve@npm:^5.16.0, enhanced-resolve@npm:^5.8.3": - version: 5.16.0 - resolution: "enhanced-resolve@npm:5.16.0" + version: 5.16.1 + resolution: "enhanced-resolve@npm:5.16.1" dependencies: graceful-fs: ^4.2.4 tapable: ^2.2.0 - checksum: ccfd01850ecf2aa51e8554d539973319ff7d8a539ef1e0ba3460a0ccad6223c4ef6e19165ee64161b459cd8a48df10f52af4434c60023c65fde6afa32d475f7e + checksum: 6e4c166fef72ef231455f9119686d93ecccb11874f8256d73a42de5b293cb2536050849382468864b25973514ca4fa4cb13c37be2ff857a211e2aca3ff05bb6c languageName: node linkType: hard @@ -6455,11 +6484,11 @@ __metadata: linkType: hard "envinfo@npm:^7.7.3": - version: 7.12.0 - resolution: "envinfo@npm:7.12.0" + version: 7.13.0 + resolution: "envinfo@npm:7.13.0" bin: envinfo: dist/cli.js - checksum: 4c83a55768cf8b7e553155c29e7fa7bbdb0fb2c1156208efc373fc030045c6aca5e8e642e96027d3eb0c752156922ea3fca6183d9e13f38507f0e02ec82c23a1 + checksum: 822fc30f53bd0be67f0e25be96eb6a2562b8062f3058846bbd7ec471bd4b7835fca6436ee72c4029c8ae4a3d8f8cddbe2ee725b22291f015232d20a682bee732 languageName: node linkType: hard @@ -6557,9 +6586,9 @@ __metadata: linkType: hard "es-module-lexer@npm:^1.2.1": - version: 1.5.0 - resolution: "es-module-lexer@npm:1.5.0" - checksum: adbe0772701e226b4b853f758fd89c0bbfe8357ab93babde7b1cdb4f88c3a31460c908cbe578817e241d116cc4fcf569f7c6f29c4fbfa0aadb0def90f1ad4dd2 + version: 1.5.2 + resolution: "es-module-lexer@npm:1.5.2" + checksum: 59c47109eca80b93dda2418337b4308c194c578704dc57d5aa54973b196e378d31e92f258e5525655b99b3de8a84dda2debb9646cddf6fe8830f1bfca95ee060 languageName: node linkType: hard @@ -6680,7 +6709,7 @@ __metadata: languageName: node linkType: hard -"escalade@npm:^3.1.1": +"escalade@npm:^3.1.1, escalade@npm:^3.1.2": version: 3.1.2 resolution: "escalade@npm:3.1.2" checksum: 1ec0977aa2772075493002bdbd549d595ff6e9393b1cb0d7d6fcaf78c750da0c158f180938365486f75cb69fba20294351caddfce1b46552a7b6c3cde52eaa02 @@ -7029,7 +7058,7 @@ __metadata: languageName: node linkType: hard -"events@npm:^3.2.0": +"events@npm:^3.2.0, events@npm:^3.3.0": version: 3.3.0 resolution: "events@npm:3.3.0" checksum: f6f487ad2198aa41d878fa31452f1a3c00958f46e9019286ff4787c84aac329332ab45c9cdc8c445928fc6d7ded294b9e005a7fce9426488518017831b272780 @@ -7596,11 +7625,11 @@ __metadata: linkType: hard "get-tsconfig@npm:^4.5.0": - version: 4.7.3 - resolution: "get-tsconfig@npm:4.7.3" + version: 4.7.4 + resolution: "get-tsconfig@npm:4.7.4" dependencies: resolve-pkg-maps: ^1.0.0 - checksum: d124e6900f8beb3b71f215941096075223158d0abb09fb5daa8d83299f6c17d5e95a97d12847b387e9e716bb9bd256a473f918fb8020f3b1acc0b1e5c2830bbf + checksum: d6519a1b20d1bc2811d3dc1e3bef08e96e83d31f10f27c9c5a3a7ed8913698c7c01cfae9c34aff9f1348687a0ec48d9d19b668c091f7cfa0ddf816bf28d1ea0d languageName: node linkType: hard @@ -7687,11 +7716,12 @@ __metadata: linkType: hard "globalthis@npm:^1.0.3": - version: 1.0.3 - resolution: "globalthis@npm:1.0.3" + version: 1.0.4 + resolution: "globalthis@npm:1.0.4" dependencies: - define-properties: ^1.1.3 - checksum: fbd7d760dc464c886d0196166d92e5ffb4c84d0730846d6621a39fbbc068aeeb9c8d1421ad330e94b7bca4bb4ea092f5f21f3d36077812af5d098b4dc006c998 + define-properties: ^1.2.1 + gopd: ^1.0.1 + checksum: 39ad667ad9f01476474633a1834a70842041f70a55571e8dcef5fb957980a92da5022db5430fca8aecc5d47704ae30618c0bc877a579c70710c904e9ef06108a languageName: node linkType: hard @@ -7989,6 +8019,13 @@ __metadata: languageName: node linkType: hard +"hyperdyperid@npm:^1.2.0": + version: 1.2.0 + resolution: "hyperdyperid@npm:1.2.0" + checksum: 210029d1c86926f09109f6317d143f8b056fc38e8dd11b0c3e3205fc6c6ff8429fb55b4b9c2bce065462719ed9d34366eced387aaa0035d93eb76b306a8547ef + languageName: node + linkType: hard + "iconv-lite@npm:0.4.24": version: 0.4.24 resolution: "iconv-lite@npm:0.4.24" @@ -8112,7 +8149,7 @@ __metadata: languageName: node linkType: hard -"interface-store@npm:^5.0.0": +"interface-store@npm:^5.0.0, interface-store@npm:^5.1.8": version: 5.1.8 resolution: "interface-store@npm:5.1.8" checksum: 7b3b67e5fc3e2d9286db94e1941893176a989f89e6cb8027425acfbb5509b8d9845aaa614bac1b03514f6e7852cc713e568c67e3ab349bf56b3c9ffdc516e9bb @@ -8596,65 +8633,65 @@ __metadata: linkType: hard "it-all@npm:^3.0.0, it-all@npm:^3.0.2, it-all@npm:^3.0.4": - version: 3.0.4 - resolution: "it-all@npm:3.0.4" - checksum: fb7259660b6555ae268ffde6f0245026e9d4e8afccf9c43a088bb0ff0483aaca95954b6074c1c96d46a57b572bce35fa1bb8542934ce9aee477e1dba46293891 + version: 3.0.6 + resolution: "it-all@npm:3.0.6" + checksum: 7c43b0aab7b496d9c590102edd9fa640e82f166e14c05d879a7f669a1c592acc7e0c37329a1ee8a93ad1ed338d5f29cdee0f6d29bcec613a4f3690f43ac298ce languageName: node linkType: hard "it-byte-stream@npm:^1.0.0": - version: 1.0.8 - resolution: "it-byte-stream@npm:1.0.8" + version: 1.0.10 + resolution: "it-byte-stream@npm:1.0.10" dependencies: it-stream-types: ^2.0.1 - p-defer: ^4.0.0 - race-signal: ^1.0.1 - uint8arraylist: ^2.4.1 - checksum: b8fbb98b8beaf8382b1f4c3822cab6587094e1ddeb09769b9f96a078e40e5c0e7fda4fa8b106bc79db608428d79e9786367a220d724ca8acbbd9ba49f809e5c9 + p-defer: ^4.0.1 + race-signal: ^1.0.2 + uint8arraylist: ^2.4.8 + checksum: 3504667d11b16ff2da5006f9ad65bf789e658358b8845437afe35e80dbee2b40f06ffe61a360136cbebd766bda36ad636dc6ce8a3c961dc617eaf365e8d26bc3 languageName: node linkType: hard "it-drain@npm:^3.0.2, it-drain@npm:^3.0.5": - version: 3.0.5 - resolution: "it-drain@npm:3.0.5" - checksum: 6ab86dc487737a0a87556fab52dadd00f376881b633bd00b8c461f1e8eace47c426e8065700946eb066072e33fc7df7f0e9fa12426bd1d8cac914d52c8f44f43 + version: 3.0.7 + resolution: "it-drain@npm:3.0.7" + checksum: fd41a759a397594f4fd3bc96e6efe7b738e294573da1cb0617a3dfcedd616f03413cdb18852f9856900fbbf48e4d9dc9d7ac459b5bf94f12767f9d46600f776e languageName: node linkType: hard "it-filter@npm:^3.0.4": - version: 3.0.4 - resolution: "it-filter@npm:3.0.4" + version: 3.1.0 + resolution: "it-filter@npm:3.1.0" dependencies: it-peekable: ^3.0.0 - checksum: 8d57903bd99fa1b18ff2c3d0fb7ba0d041a229a33b77ff5ff86ca591e5e0ed0a61b14e937c250754ff1085d8e1c4f88996a4feff76bfc3f73e5fe54726c74dd9 + checksum: cecc2eadfb71889338966e81beb10b8d264b0d8be2b0afa9315f302cbd62eb8fa8fa9393840ffa46d45990a9c0369d1b485b1dfc98d52f000705e5dfb5d12c77 languageName: node linkType: hard "it-first@npm:^3.0.1": - version: 3.0.4 - resolution: "it-first@npm:3.0.4" - checksum: 428cf4b7baaf04dcb0c157cbd6332c2bab9708eeae6df752533d8fd8e21f7c321bfa8a57d35982115f57760baf526a9bf210b7d982d793e8340e22db2aa68fc6 + version: 3.0.6 + resolution: "it-first@npm:3.0.6" + checksum: 36a76248ea326992b47ced7f5e793e60e760ce229f871fc335850bfe2bfceb21e4b75badfd687be6a407d662e1b85357eee82e596d14afbfae5aecef7c822937 languageName: node linkType: hard "it-foreach@npm:^2.0.6": - version: 2.0.6 - resolution: "it-foreach@npm:2.0.6" + version: 2.1.0 + resolution: "it-foreach@npm:2.1.0" dependencies: it-peekable: ^3.0.0 - checksum: 95f66b141ced66ca4429711a5d4f36b605005e5607d5e17c2a0357f10ed1b6750e3d49683e029190c1d4ff7a89378fbf9d17b26ded31ddd55741b2a1ddc3d3f2 + checksum: 28de345c532b4c42cb5feab8189bdcdd08384dd33a921464b396bcda25b6b0fc285b44900a4ce6792bc67e50f100776ae6c1212389d7eb20f3bfeacd017d8598 languageName: node linkType: hard "it-length-prefixed-stream@npm:^1.0.0, it-length-prefixed-stream@npm:^1.1.6": - version: 1.1.6 - resolution: "it-length-prefixed-stream@npm:1.1.6" + version: 1.1.7 + resolution: "it-length-prefixed-stream@npm:1.1.7" dependencies: it-byte-stream: ^1.0.0 it-stream-types: ^2.0.1 - uint8-varint: ^2.0.1 - uint8arraylist: ^2.4.1 - checksum: 9bba9b781934eb85f68187f4c9128c158a856d0e7d3770e13201cee84829d9d482fb60bcf5eb9ca3ed85f3671a1a27df123e3869c8461cac6929a3a2f349b792 + uint8-varint: ^2.0.4 + uint8arraylist: ^2.4.8 + checksum: 599912ec364208b662b36397c5c83cd890c65fd7fc6b6f1449bd8b3cc370763a3702249c1c55ffe864b8a808dc3a0c989adc2e51d6047f1d639f62f7a561e3bf languageName: node linkType: hard @@ -8672,28 +8709,28 @@ __metadata: languageName: node linkType: hard -"it-length@npm:^3.0.1": - version: 3.0.4 - resolution: "it-length@npm:3.0.4" - checksum: 881208cbcad1e3a396b27b35d73acbac9c27eb8b9fa43b1ed1bb4ca1aba489040981e0ea2b3db6fae90d2d9a1e4c610013abef4030ecd80eca64689f07df8dc9 +"it-length@npm:^3.0.1, it-length@npm:^3.0.6": + version: 3.0.6 + resolution: "it-length@npm:3.0.6" + checksum: 3d18197d040029c30ff3aadcbe499c6e2355e342dc40cd9359c494fbd1fccb01ce4638bd76f37d099e49aef2e26df97a1934a27488988804c9f12ced604a736c languageName: node linkType: hard "it-map@npm:^3.0.3, it-map@npm:^3.0.5": - version: 3.0.5 - resolution: "it-map@npm:3.0.5" + version: 3.1.0 + resolution: "it-map@npm:3.1.0" dependencies: it-peekable: ^3.0.0 - checksum: bdaa2f1662325457a4eba487dfb04ca8aee0b1d91356b285bf6133aaeda67fba5b7d5c6644838ea8a025e4bd0e8a46910dd7b203f75940ed7ce0d8f3d159bbf3 + checksum: 003c0f1b51a59278efbcadf2117eff91789855556f8f42a4ee594aa44d292ad476d29fa10ab37db74e4b80b04862e6a605dda68af69d511cfea074928da78641 languageName: node linkType: hard "it-merge@npm:^3.0.0, it-merge@npm:^3.0.3": - version: 3.0.3 - resolution: "it-merge@npm:3.0.3" + version: 3.0.5 + resolution: "it-merge@npm:3.0.5" dependencies: - it-pushable: ^3.2.0 - checksum: 031c72302b35db8769c07646c561980c8d97097ce96aa869ebd0cf7b506ea075299b497a177a04bd5eb26398379b3e0b8f4c59a9a1ad0b1e7068d1a921cabf7b + it-pushable: ^3.2.3 + checksum: e79c21151af43c769653003d5f7a002c8c4f5cb62dfd586643a7014b06a94f660459650b2748aa8c5a0d103660cecf38617ebc552215cad0d36344ffa450ab82 languageName: node linkType: hard @@ -8708,18 +8745,18 @@ __metadata: linkType: hard "it-parallel@npm:^3.0.0, it-parallel@npm:^3.0.6": - version: 3.0.6 - resolution: "it-parallel@npm:3.0.6" + version: 3.0.7 + resolution: "it-parallel@npm:3.0.7" dependencies: - p-defer: ^4.0.0 - checksum: ca9cc7faea9dee197dd5e683743542da21369c5a3d6991278b0221493d0e801abd7d750ed2860a97e6eeffae6b7c8af9fdd3e61285895317599d8608ccd7576d + p-defer: ^4.0.1 + checksum: 3b8ff6d4ae69ceaadc8e120a17efaf1855abff7e712afb952bb232eddd0467365fb0e28a591b5c7510042fe05860b8ac150edd0fd33a74023bea8f89c1584ca9 languageName: node linkType: hard "it-peekable@npm:^3.0.0": - version: 3.0.3 - resolution: "it-peekable@npm:3.0.3" - checksum: 9603045130673b26a572cb2a9bfb7cbf9907fd759aa9dbfb1113b38c07c7b750b75a8dbec317b0cde6e47b6f3be2fddd9785fc7e38f1147ea3ded7eabd590c7a + version: 3.0.4 + resolution: "it-peekable@npm:3.0.4" + checksum: 6d13b7d69eb2b4b4a1f7a7706d7efd56855f5304be5e3ac4d73b735ffd61d74b30223ef89adbe20d4da45fe44a594a1087b3033da46935bab14daab49306f68f languageName: node linkType: hard @@ -8735,18 +8772,17 @@ __metadata: linkType: hard "it-protobuf-stream@npm:^1.1.2": - version: 1.1.2 - resolution: "it-protobuf-stream@npm:1.1.2" + version: 1.1.3 + resolution: "it-protobuf-stream@npm:1.1.3" dependencies: it-length-prefixed-stream: ^1.0.0 it-stream-types: ^2.0.1 - protons-runtime: ^5.0.0 - uint8arraylist: ^2.4.1 - checksum: d10601aa530ee53da994377b4704e4f28a45ff26a4da1d64c1beccfcbdc1802da5cf480b692ff692a6557bd2dd0823c4e6992fc525122ab5da8d0ba67f003198 + uint8arraylist: ^2.4.8 + checksum: 89b6e1857f4f3c32fa3409dd835ea3cc6b7f95f0be02c71447c6b87e98dbce433af2ea1e47eb1dff5dbb23b962cf4581420a4de16e5748ce06a49d7f4763c118 languageName: node linkType: hard -"it-pushable@npm:^3.1.2, it-pushable@npm:^3.1.3, it-pushable@npm:^3.2.0, it-pushable@npm:^3.2.1, it-pushable@npm:^3.2.3": +"it-pushable@npm:^3.1.2, it-pushable@npm:^3.2.0, it-pushable@npm:^3.2.3": version: 3.2.3 resolution: "it-pushable@npm:3.2.3" dependencies: @@ -8766,11 +8802,11 @@ __metadata: linkType: hard "it-sort@npm:^3.0.4": - version: 3.0.4 - resolution: "it-sort@npm:3.0.4" + version: 3.0.5 + resolution: "it-sort@npm:3.0.5" dependencies: it-all: ^3.0.0 - checksum: de4f1832c6d12914d51109ca3f8ccebba60fdb050d0af2b3d9b8bcd14cb3d320ba1a01e3ef59de2d3691886c0a903e1c4e46ad354796159d4b0d3d7013bc180c + checksum: 83678c9bc792bc61e703723b421f0ee86b352cade4c22321ed1cdb59a48354fda40530221ece90e6164e3cc28d70af4c46d5343a9b26279aee67f12cb0fb6507 languageName: node linkType: hard @@ -8782,9 +8818,9 @@ __metadata: linkType: hard "it-take@npm:^3.0.1, it-take@npm:^3.0.4": - version: 3.0.4 - resolution: "it-take@npm:3.0.4" - checksum: 69dedde350817cba8de80e0432c9b81c35ff2b91f9c80582e657e382ec8c38af003f575353ae22605c963c28605a48cb994c7dba93fedac732db35ee86d7e516 + version: 3.0.5 + resolution: "it-take@npm:3.0.5" + checksum: c3bf22a9d6d04ca7d728fec528e9a2e57c71473033576d7be52684fbdb279984915d921a552a605cd51b1635ad6a5a1a5f1326fbb563007b88d1dde0975b0c7d languageName: node linkType: hard @@ -9029,14 +9065,14 @@ __metadata: linkType: hard "jest-mock-extended@npm:^3.0.3, jest-mock-extended@npm:^3.0.4, jest-mock-extended@npm:^3.0.5": - version: 3.0.6 - resolution: "jest-mock-extended@npm:3.0.6" + version: 3.0.7 + resolution: "jest-mock-extended@npm:3.0.7" dependencies: - ts-essentials: ^9.4.2 + ts-essentials: ^10.0.0 peerDependencies: jest: ^24.0.0 || ^25.0.0 || ^26.0.0 || ^27.0.0 || ^28.0.0 || ^29.0.0 typescript: ^3.0.0 || ^4.0.0 || ^5.0.0 - checksum: 7abff3242f932481561a209b314e0501efa811c7dfd7915d803b897b079d07c5db74b9ca86e1d25110d7cdefa6d7d083d3bc9b431f383182f99d8552fbafbfad + checksum: 59ab510934b0b66e0752c170b6e069f8c93a5b9de40ea2bd3e734f773a70be4b0c251451f8770e60c1c3754d5ddbd25dd1f55568a6379f396d109694d6d3ab79 languageName: node linkType: hard @@ -9661,20 +9697,20 @@ __metadata: languageName: node linkType: hard -"libp2p@npm:^1.2.4": - version: 1.4.2 - resolution: "libp2p@npm:1.4.2" - dependencies: - "@libp2p/crypto": ^4.0.6 - "@libp2p/interface": ^1.2.0 - "@libp2p/interface-internal": ^1.1.0 - "@libp2p/logger": ^4.0.10 - "@libp2p/multistream-select": ^5.1.7 - "@libp2p/peer-collections": ^5.1.10 - "@libp2p/peer-id": ^4.0.10 - "@libp2p/peer-id-factory": ^4.0.10 - "@libp2p/peer-store": ^10.0.15 - "@libp2p/utils": ^5.3.1 +"libp2p@npm:1.5.0": + version: 1.5.0 + resolution: "libp2p@npm:1.5.0" + dependencies: + "@libp2p/crypto": ^4.1.1 + "@libp2p/interface": ^1.3.1 + "@libp2p/interface-internal": ^1.2.0 + "@libp2p/logger": ^4.0.12 + "@libp2p/multistream-select": ^5.1.9 + "@libp2p/peer-collections": ^5.2.0 + "@libp2p/peer-id": ^4.1.1 + "@libp2p/peer-id-factory": ^4.1.1 + "@libp2p/peer-store": ^10.0.17 + "@libp2p/utils": ^5.4.0 "@multiformats/dns": ^1.0.5 "@multiformats/multiaddr": ^12.2.1 "@multiformats/multiaddr-matcher": ^1.2.0 @@ -9685,8 +9721,11 @@ __metadata: it-parallel: ^3.0.6 merge-options: ^3.0.4 multiformats: ^13.1.0 + p-defer: ^4.0.1 + race-event: ^1.3.0 + race-signal: ^1.0.2 uint8arrays: ^5.0.3 - checksum: 111b52ddd704361781cb68f3ad6ba4e31120ba633e01a88decc2559cdaecb440b2da7f12435bd262cee886a22e2bfc0b4756400dcffdb537845cfb4c7d7a3532 + checksum: 6a587061f03cf01feea0dcf80e290944c906de943fa4f2f118051ddfdd3ac9394a6faf2cdd1d193e52f7a0f2eda5478237c5628dd87e2541b9e03803a46ff714 languageName: node linkType: hard @@ -9698,15 +9737,15 @@ __metadata: linkType: hard "lmdb@npm:^3.0.6": - version: 3.0.6 - resolution: "lmdb@npm:3.0.6" - dependencies: - "@lmdb/lmdb-darwin-arm64": 3.0.6 - "@lmdb/lmdb-darwin-x64": 3.0.6 - "@lmdb/lmdb-linux-arm": 3.0.6 - "@lmdb/lmdb-linux-arm64": 3.0.6 - "@lmdb/lmdb-linux-x64": 3.0.6 - "@lmdb/lmdb-win32-x64": 3.0.6 + version: 3.0.8 + resolution: "lmdb@npm:3.0.8" + dependencies: + "@lmdb/lmdb-darwin-arm64": 3.0.8 + "@lmdb/lmdb-darwin-x64": 3.0.8 + "@lmdb/lmdb-linux-arm": 3.0.8 + "@lmdb/lmdb-linux-arm64": 3.0.8 + "@lmdb/lmdb-linux-x64": 3.0.8 + "@lmdb/lmdb-win32-x64": 3.0.8 msgpackr: ^1.9.9 node-addon-api: ^6.1.0 node-gyp: latest @@ -9728,7 +9767,7 @@ __metadata: optional: true bin: download-lmdb-prebuilds: bin/download-prebuilds.js - checksum: e8ab5bbef94e254ec1fa85deec251c4b34047786c87f54abd842cd12c3f29d55f62828512a4b69046075a624a25b2327e232072be702a68fcb3d8183e0175cca + checksum: 8778fee2527e869db560bd46ac91398504df804313ab5a5918bd6ca368cc134a5ec47f71cd3becf9bb62ce1f99dbdcf2b5c89601d9058a32d364457ae6e54a4b languageName: node linkType: hard @@ -9899,9 +9938,9 @@ __metadata: linkType: hard "lru-cache@npm:^10.0.1, lru-cache@npm:^10.1.0, lru-cache@npm:^10.2.0": - version: 10.2.0 - resolution: "lru-cache@npm:10.2.0" - checksum: eee7ddda4a7475deac51ac81d7dd78709095c6fa46e8350dc2d22462559a1faa3b81ed931d5464b13d48cbd7e08b46100b6f768c76833912bc444b99c37e25db + version: 10.2.2 + resolution: "lru-cache@npm:10.2.2" + checksum: 98e8fc93691c546f719a76103ef2bee5a3ac823955c755a47641ec41f8c7fafa1baeaba466937cc1cbfa9cfd47e03536d10e2db3158a64ad91ff3a58a32c893e languageName: node linkType: hard @@ -9998,8 +10037,8 @@ __metadata: linkType: hard "make-fetch-happen@npm:^13.0.0": - version: 13.0.0 - resolution: "make-fetch-happen@npm:13.0.0" + version: 13.0.1 + resolution: "make-fetch-happen@npm:13.0.1" dependencies: "@npmcli/agent": ^2.0.0 cacache: ^18.0.0 @@ -10010,9 +10049,10 @@ __metadata: minipass-flush: ^1.0.5 minipass-pipeline: ^1.2.4 negotiator: ^0.6.3 + proc-log: ^4.2.0 promise-retry: ^2.0.1 ssri: ^10.0.0 - checksum: 7c7a6d381ce919dd83af398b66459a10e2fe8f4504f340d1d090d3fa3d1b0c93750220e1d898114c64467223504bd258612ba83efbc16f31b075cd56de24b4af + checksum: 5c9fad695579b79488fa100da05777213dd9365222f85e4757630f8dd2a21a79ddd3206c78cfd6f9b37346819681782b67900ac847a57cf04190f52dda5343fd languageName: node linkType: hard @@ -10087,11 +10127,14 @@ __metadata: linkType: hard "memfs@npm:^4.6.0": - version: 4.8.2 - resolution: "memfs@npm:4.8.2" + version: 4.9.2 + resolution: "memfs@npm:4.9.2" dependencies: + "@jsonjoy.com/json-pack": ^1.0.3 + "@jsonjoy.com/util": ^1.1.2 + sonic-forest: ^1.0.0 tslib: ^2.0.0 - checksum: ffbc79e89542c57ccdd83f906252313a8354fb050bab6500728a60a321ca2f090e70145c324ff1540b27272a34ff5049b2790e7d5a9af9ec4505fffeca19db8f + checksum: 72850691d37b4e67fb78fceced7294e381caf7a614b22b81fa643c03ac6c13270d52e2ac96d8ed95edab715fd0fba2db1bf604a815cbd6d53ecb3f56c038a583 languageName: node linkType: hard @@ -10281,8 +10324,8 @@ __metadata: linkType: hard "minipass-fetch@npm:^3.0.0": - version: 3.0.4 - resolution: "minipass-fetch@npm:3.0.4" + version: 3.0.5 + resolution: "minipass-fetch@npm:3.0.5" dependencies: encoding: ^0.1.13 minipass: ^7.0.3 @@ -10291,7 +10334,7 @@ __metadata: dependenciesMeta: encoding: optional: true - checksum: af7aad15d5c128ab1ebe52e043bdf7d62c3c6f0cecb9285b40d7b395e1375b45dcdfd40e63e93d26a0e8249c9efd5c325c65575aceee192883970ff8cb11364a + checksum: 8047d273236157aab27ab7cd8eab7ea79e6ecd63e8f80c3366ec076cb9a0fed550a6935bab51764369027c414647fd8256c2a20c5445fb250c483de43350de83 languageName: node linkType: hard @@ -10339,9 +10382,9 @@ __metadata: linkType: hard "minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.0.4": - version: 7.0.4 - resolution: "minipass@npm:7.0.4" - checksum: 87585e258b9488caf2e7acea242fd7856bbe9a2c84a7807643513a338d66f368c7d518200ad7b70a508664d408aa000517647b2930c259a8b1f9f0984f344a21 + version: 7.1.0 + resolution: "minipass@npm:7.1.0" + checksum: c057d4b1d7fdb35b8f4b9d8f627b1f6832c441cd7dff9304ee5efef68abb3b460309bf97b1b0ce5b960e259caa53c724f609d058e4dc12d547e2a074aaae2cd6 languageName: node linkType: hard @@ -10485,13 +10528,6 @@ __metadata: languageName: node linkType: hard -"multiformats@npm:^11.0.0, multiformats@npm:^11.0.2": - version: 11.0.2 - resolution: "multiformats@npm:11.0.2" - checksum: e587bbe709f29e42ae3c22458c960070269027d962183afc49a83b8ba26c31525e81ce2ac71082a52ba0a75e9aed4d0d044cac68d32656fdcd5cd340fb367fac - languageName: node - linkType: hard - "multiformats@npm:^12.0.1": version: 12.1.3 resolution: "multiformats@npm:12.1.3" @@ -10499,13 +10535,20 @@ __metadata: languageName: node linkType: hard -"multiformats@npm:^13.0.0, multiformats@npm:^13.1.0": +"multiformats@npm:^13.0.0, multiformats@npm:^13.0.1, multiformats@npm:^13.1.0": version: 13.1.0 resolution: "multiformats@npm:13.1.0" checksum: b970e3622a80192a4df8c23378c4854520df8b2d17db773ac8b77c19750019e1c9813cc05e12b0e3b0d03599ff5d073681e847d43b4b273efca5aabbb28eb0e0 languageName: node linkType: hard +"murmurhash3js-revisited@npm:^3.0.0": + version: 3.0.0 + resolution: "murmurhash3js-revisited@npm:3.0.0" + checksum: 24b60657ce296b1d3cf358af70688c8ed777e93c4ee263967f066a4adb0ade0d689863a1a51adc74ab134d61a877f41a06e2b73842ac3fc924799cc96b249a40 + languageName: node + linkType: hard + "nanoid@npm:^3.3.7": version: 3.3.7 resolution: "nanoid@npm:3.3.7" @@ -10598,13 +10641,13 @@ __metadata: linkType: hard "node-gyp-build@npm:^4.3.0": - version: 4.8.0 - resolution: "node-gyp-build@npm:4.8.0" + version: 4.8.1 + resolution: "node-gyp-build@npm:4.8.1" bin: node-gyp-build: bin.js node-gyp-build-optional: optional.js node-gyp-build-test: build-test.js - checksum: b82a56f866034b559dd3ed1ad04f55b04ae381b22ec2affe74b488d1582473ca6e7f85fccf52da085812d3de2b0bf23109e752a57709ac7b9963951c710fea40 + checksum: fe6e95da6f4608c1a98655f6bf2fe4e8dd9c877cd13256056a8acaf585cc7f98718823fe9366be11b78c2f332d5a184b00cf07a4af96c9d8fea45f640c019f98 languageName: node linkType: hard @@ -10661,13 +10704,13 @@ __metadata: linkType: hard "nopt@npm:^7.0.0": - version: 7.2.0 - resolution: "nopt@npm:7.2.0" + version: 7.2.1 + resolution: "nopt@npm:7.2.1" dependencies: abbrev: ^2.0.0 bin: nopt: bin/nopt.js - checksum: a9c0f57fb8cb9cc82ae47192ca2b7ef00e199b9480eed202482c962d61b59a7fbe7541920b2a5839a97b42ee39e288c0aed770e38057a608d7f579389dfde410 + checksum: 6fa729cc77ce4162cfad8abbc9ba31d4a0ff6850c3af61d59b505653bef4781ec059f8890ecfe93ee8aa0c511093369cca88bfc998101616a2904e715bbbb7c9 languageName: node linkType: hard @@ -10835,16 +10878,16 @@ __metadata: linkType: hard "optionator@npm:^0.9.3": - version: 0.9.3 - resolution: "optionator@npm:0.9.3" + version: 0.9.4 + resolution: "optionator@npm:0.9.4" dependencies: - "@aashutoshrathi/word-wrap": ^1.2.3 deep-is: ^0.1.3 fast-levenshtein: ^2.0.6 levn: ^0.4.1 prelude-ls: ^1.2.1 type-check: ^0.4.0 - checksum: 09281999441f2fe9c33a5eeab76700795365a061563d66b098923eb719251a42bdbe432790d35064d0816ead9296dbeb1ad51a733edf4167c96bd5d0882e428a + word-wrap: ^1.2.5 + checksum: ecbd010e3dc73e05d239976422d9ef54a82a13f37c11ca5911dff41c98a6c7f0f163b27f922c37e7f8340af9d36febd3b6e9cef508f3339d4c393d7276d716bb languageName: node linkType: hard @@ -11323,6 +11366,13 @@ __metadata: languageName: node linkType: hard +"proc-log@npm:^4.2.0": + version: 4.2.0 + resolution: "proc-log@npm:4.2.0" + checksum: 98f6cd012d54b5334144c5255ecb941ee171744f45fca8b43b58ae5a0c1af07352475f481cadd9848e7f0250376ee584f6aa0951a856ff8f021bdfbff4eb33fc + languageName: node + linkType: hard + "process-nextick-args@npm:~2.0.0": version: 2.0.1 resolution: "process-nextick-args@npm:2.0.1" @@ -11371,7 +11421,7 @@ __metadata: languageName: node linkType: hard -"protons-runtime@npm:^5.0.0, protons-runtime@npm:^5.4.0": +"protons-runtime@npm:5.4.0, protons-runtime@npm:^5.0.0, protons-runtime@npm:^5.4.0": version: 5.4.0 resolution: "protons-runtime@npm:5.4.0" dependencies: @@ -11447,30 +11497,30 @@ __metadata: languageName: node linkType: hard -"puppeteer-core@npm:22.6.5": - version: 22.6.5 - resolution: "puppeteer-core@npm:22.6.5" +"puppeteer-core@npm:22.8.0": + version: 22.8.0 + resolution: "puppeteer-core@npm:22.8.0" dependencies: - "@puppeteer/browsers": 2.2.2 - chromium-bidi: 0.5.17 + "@puppeteer/browsers": 2.2.3 + chromium-bidi: 0.5.19 debug: 4.3.4 - devtools-protocol: 0.0.1262051 - ws: 8.16.0 - checksum: 4dc58083179eae79397d2c55c8cf12b27228278c5ab2d4928dd44a954af17f0f55be0b91e0e442fd282fa96574a2403e6397b3ae10bedf6ff2b38bffed164ff2 + devtools-protocol: 0.0.1273771 + ws: 8.17.0 + checksum: f4250c87c09eb9c73d737ccf08e548babd57e749c9bfc241a7251f2e5e5f3ef2bf3dcb99b7b606763db3a914c866c97cc6714961900566280414b0fad5a330a8 languageName: node linkType: hard "puppeteer@npm:^22.2": - version: 22.6.5 - resolution: "puppeteer@npm:22.6.5" + version: 22.8.0 + resolution: "puppeteer@npm:22.8.0" dependencies: - "@puppeteer/browsers": 2.2.2 + "@puppeteer/browsers": 2.2.3 cosmiconfig: 9.0.0 - devtools-protocol: 0.0.1262051 - puppeteer-core: 22.6.5 + devtools-protocol: 0.0.1273771 + puppeteer-core: 22.8.0 bin: puppeteer: lib/esm/puppeteer/node/cli.js - checksum: d6361ae4e5dd7c55e244b98aca345745b147c434b3636896e1f01103de2994c48274a0ed2febf8ba917692f086d44e4d9a820007acc814e5dba7e8d18ad1aedd + checksum: da4855a71b6355e96196b9838fc255fa39f6bdd09cb0b9a6d3cfc377ba839eecef01e40ad7bbff48ef17a5784266bfac5dbf94e1b298f447ce8983f72ff90185 languageName: node linkType: hard @@ -11534,14 +11584,14 @@ __metadata: languageName: node linkType: hard -"race-event@npm:^1.2.0": - version: 1.2.0 - resolution: "race-event@npm:1.2.0" - checksum: b3468019959adb74859e4f153f7952a3c031d5435de1a031467cf85e9d5d9d1be3c8b7a58a7e07116e06bf5d82c55bae4be1d0029f582802aaee0b18f1e19cbb +"race-event@npm:^1.2.0, race-event@npm:^1.3.0": + version: 1.3.0 + resolution: "race-event@npm:1.3.0" + checksum: 7aaf432c15d0d53221c74d351b7c46dbd7a423be73a21648e46f4f2df6aa3261026b99cad522daa2aee73bff41565b05907ba9ef3a3592e0e7bce2565293e99c languageName: node linkType: hard -"race-signal@npm:^1.0.0, race-signal@npm:^1.0.1, race-signal@npm:^1.0.2": +"race-signal@npm:^1.0.0, race-signal@npm:^1.0.2": version: 1.0.2 resolution: "race-signal@npm:1.0.2" checksum: 01ea1f70059673cd239acbe9523eaf1649f3b02ec786b5266770d9b045018aa96e316150447f0a12e7b0f8aa02522deb23e7d3a2c3a58d37135c505f595f2e49 @@ -11594,9 +11644,9 @@ __metadata: linkType: hard "react-is@npm:^18.0.0": - version: 18.2.0 - resolution: "react-is@npm:18.2.0" - checksum: e72d0ba81b5922759e4aff17e0252bd29988f9642ed817f56b25a3e217e13eea8a7f2322af99a06edb779da12d5d636e9fda473d620df9a3da0df2a74141d53e + version: 18.3.1 + resolution: "react-is@npm:18.3.1" + checksum: e20fe84c86ff172fc8d898251b7cc2c43645d108bf96d0b8edf39b98f9a2cae97b40520ee7ed8ee0085ccc94736c4886294456033304151c3f94978cec03df21 languageName: node linkType: hard @@ -11649,6 +11699,15 @@ __metadata: languageName: node linkType: hard +"receptacle@npm:^1.3.2": + version: 1.3.2 + resolution: "receptacle@npm:1.3.2" + dependencies: + ms: ^2.1.1 + checksum: 7c5011f19e6ddcb759c1e6756877cee3c9eb78fbd1278eca4572d75f74993f0ccdc1e5f7761de6e682dff5344ee94f7a69bc492e2e8eb81d8777774a2399ce9c + languageName: node + linkType: hard + "rechoir@npm:^0.8.0": version: 0.8.0 resolution: "rechoir@npm:0.8.0" @@ -12018,7 +12077,7 @@ __metadata: languageName: node linkType: hard -"semver@npm:7.6.0, semver@npm:^7.3.4, semver@npm:^7.3.5, semver@npm:^7.3.7, semver@npm:^7.3.8, semver@npm:^7.5.3, semver@npm:^7.5.4": +"semver@npm:7.6.0": version: 7.6.0 resolution: "semver@npm:7.6.0" dependencies: @@ -12038,6 +12097,15 @@ __metadata: languageName: node linkType: hard +"semver@npm:^7.3.4, semver@npm:^7.3.5, semver@npm:^7.3.7, semver@npm:^7.3.8, semver@npm:^7.5.3, semver@npm:^7.5.4, semver@npm:^7.6.0": + version: 7.6.1 + resolution: "semver@npm:7.6.1" + bin: + semver: bin/semver.js + checksum: 2c9c89b985230c0fcf02c96ae6a3ca40c474f2f4e838634394691e6e10c347a0c6def0f14fc355d82f90f1744a073b8b9c45457b108aa728280b5d68ed7961cd + languageName: node + linkType: hard + "serialize-javascript@npm:^6.0.1": version: 6.0.2 resolution: "serialize-javascript@npm:6.0.2" @@ -12239,6 +12307,17 @@ __metadata: languageName: node linkType: hard +"sonic-forest@npm:^1.0.0": + version: 1.0.3 + resolution: "sonic-forest@npm:1.0.3" + dependencies: + tree-dump: ^1.0.0 + peerDependencies: + tslib: 2 + checksum: d328735d527ad9e27b3ed9a1599abf33a1e2df139b3689c6515c3c1fa09f19d0a9ddccdc1a43759fa43462259a962308cb18214bed761c1b7ea75a7611e31b11 + languageName: node + linkType: hard + "source-map-js@npm:^1.2.0": version: 1.2.0 resolution: "source-map-js@npm:1.2.0" @@ -12352,11 +12431,11 @@ __metadata: linkType: hard "ssri@npm:^10.0.0": - version: 10.0.5 - resolution: "ssri@npm:10.0.5" + version: 10.0.6 + resolution: "ssri@npm:10.0.6" dependencies: minipass: ^7.0.3 - checksum: 0a31b65f21872dea1ed3f7c200d7bc1c1b91c15e419deca14f282508ba917cbb342c08a6814c7f68ca4ca4116dd1a85da2bbf39227480e50125a1ceffeecb750 + checksum: 4603d53a05bcd44188747d38f1cc43833b9951b5a1ee43ba50535bdfc5fe4a0897472dbe69837570a5417c3c073377ef4f8c1a272683b401857f72738ee57299 languageName: node linkType: hard @@ -12427,7 +12506,7 @@ __metadata: languageName: node linkType: hard -"streamx@npm:^2.13.0, streamx@npm:^2.15.0": +"streamx@npm:^2.15.0, streamx@npm:^2.16.1": version: 2.16.1 resolution: "streamx@npm:2.16.1" dependencies: @@ -12765,8 +12844,8 @@ __metadata: linkType: hard "terser@npm:^5.26.0": - version: 5.30.3 - resolution: "terser@npm:5.30.3" + version: 5.31.0 + resolution: "terser@npm:5.31.0" dependencies: "@jridgewell/source-map": ^0.3.3 acorn: ^8.8.2 @@ -12774,7 +12853,7 @@ __metadata: source-map-support: ~0.5.20 bin: terser: bin/terser - checksum: 8c680ed32a948f806fade0969c52aab94b6de174e4a78610f5d3abf9993b161eb19b88b2ceadff09b153858727c02deb6709635e4bfbd519f67d54e0394e2983 + checksum: 48f14229618866bba8a9464e9d0e7fdcb6b6488b3a6c4690fcf4d48df65bf45959d5ae8c02f1a0b3f3dd035a9ae340b715e1e547645b112dc3963daa3564699a languageName: node linkType: hard @@ -12803,6 +12882,15 @@ __metadata: languageName: node linkType: hard +"thingies@npm:^1.20.0": + version: 1.21.0 + resolution: "thingies@npm:1.21.0" + peerDependencies: + tslib: ^2 + checksum: 283a2785e513dc892822dd0bbadaa79e873a7fc90b84798164717bf7cf837553e0b4518d8027b2307d8f6fc6caab088fa717112cd9196c6222763cc3cc1b7e79 + languageName: node + linkType: hard + "through@npm:2, through@npm:^2.3.8, through@npm:~2.3, through@npm:~2.3.1": version: 2.3.8 resolution: "through@npm:2.3.8" @@ -12840,6 +12928,15 @@ __metadata: languageName: node linkType: hard +"tree-dump@npm:^1.0.0": + version: 1.0.1 + resolution: "tree-dump@npm:1.0.1" + peerDependencies: + tslib: 2 + checksum: 256f2e066ab8743672795822731410d9b9036ef449499f528df1a638ad99af45f345bfbddeaf1cc46b7b9279db3b5f83e1a4cb21bc086ef25ce6add975a3c490 + languageName: node + linkType: hard + "tree-kill@npm:^1.2.2": version: 1.2.2 resolution: "tree-kill@npm:1.2.2" @@ -12872,15 +12969,15 @@ __metadata: languageName: node linkType: hard -"ts-essentials@npm:^9.4.2": - version: 9.4.2 - resolution: "ts-essentials@npm:9.4.2" +"ts-essentials@npm:^10.0.0": + version: 10.0.0 + resolution: "ts-essentials@npm:10.0.0" peerDependencies: - typescript: ">=4.1.0" + typescript: ">=4.5.0" peerDependenciesMeta: typescript: optional: true - checksum: ef9a15cef66e4c23942cd6a64ab1aa15108cabea187904ba8345bab309f5b5d8f4fc076950391af8fd3914df0349ce11dc716930949f7f5d24ec3a5851ccfe73 + checksum: 29c789b32b1885211bc7429410529810fabc0d6a6f3b13e05f15e2ca6540581c019a66296864ddc5d4510c4eec4dfee0627631857bedae12b48b368d9f62b230 languageName: node linkType: hard @@ -13218,7 +13315,7 @@ __metadata: languageName: node linkType: hard -"uint8arraylist@npm:^2.0.0, uint8arraylist@npm:^2.4.1, uint8arraylist@npm:^2.4.3, uint8arraylist@npm:^2.4.8": +"uint8arraylist@npm:^2.0.0, uint8arraylist@npm:^2.4.3, uint8arraylist@npm:^2.4.8": version: 2.4.8 resolution: "uint8arraylist@npm:2.4.8" dependencies: @@ -13330,16 +13427,16 @@ __metadata: linkType: hard "update-browserslist-db@npm:^1.0.13": - version: 1.0.13 - resolution: "update-browserslist-db@npm:1.0.13" + version: 1.0.15 + resolution: "update-browserslist-db@npm:1.0.15" dependencies: - escalade: ^3.1.1 + escalade: ^3.1.2 picocolors: ^1.0.0 peerDependencies: browserslist: ">= 4.21.0" bin: update-browserslist-db: cli.js - checksum: 1e47d80182ab6e4ad35396ad8b61008ae2a1330221175d0abd37689658bdb61af9b705bfc41057fd16682474d79944fb2d86767c5ed5ae34b6276b9bed353322 + checksum: 15f244dc83918c9a1779b86311d1be39d8f990e0a439db559fd2f54150b789fca774cdb4cc1886d5f18b06c767ed97f84d47356a5fda42da3bcc4e0f9b9d22e4 languageName: node linkType: hard @@ -13422,8 +13519,8 @@ __metadata: linkType: hard "viem@npm:^2.7.15": - version: 2.9.25 - resolution: "viem@npm:2.9.25" + version: 2.10.2 + resolution: "viem@npm:2.10.2" dependencies: "@adraffy/ens-normalize": 1.10.0 "@noble/curves": 1.2.0 @@ -13438,7 +13535,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: f9dbcc00a63b223a5ae213da5fd16ae8549d851f069065ace7072fb0c264d295a56fde547ec6c154c71d36011944c5fa600315131ea2c0fc34a94283ae4f40b3 + checksum: 45e7e29908659c60e0a8881f28dcee51a8686004874b425785af18641f19a94489cd694406d4377f7e3db18c3a22764c3518af372c6857753aad877d8f251395 languageName: node linkType: hard @@ -13733,6 +13830,13 @@ __metadata: languageName: node linkType: hard +"word-wrap@npm:^1.2.5": + version: 1.2.5 + resolution: "word-wrap@npm:1.2.5" + checksum: f93ba3586fc181f94afdaff3a6fef27920b4b6d9eaefed0f428f8e07adea2a7f54a5f2830ce59406c8416f033f86902b91eb824072354645eea687dff3691ccb + languageName: node + linkType: hard + "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0": version: 7.0.0 resolution: "wrap-ansi@npm:7.0.0" @@ -13787,9 +13891,9 @@ __metadata: languageName: node linkType: hard -"ws@npm:8.16.0, ws@npm:^8.13.0": - version: 8.16.0 - resolution: "ws@npm:8.16.0" +"ws@npm:8.17.0, ws@npm:^8.13.0": + version: 8.17.0 + resolution: "ws@npm:8.17.0" peerDependencies: bufferutil: ^4.0.1 utf-8-validate: ">=5.0.2" @@ -13798,7 +13902,7 @@ __metadata: optional: true utf-8-validate: optional: true - checksum: feb3eecd2bae82fa8a8beef800290ce437d8b8063bdc69712725f21aef77c49cb2ff45c6e5e7fce622248f9c7abaee506bae0a9064067ffd6935460c7357321b + checksum: 147ef9eab0251364e1d2c55338ad0efb15e6913923ccbfdf20f7a8a6cb8f88432bcd7f4d8f66977135bfad35575644f9983201c1a361019594a4e53977bf6d4e languageName: node linkType: hard @@ -13824,11 +13928,11 @@ __metadata: linkType: hard "yaml@npm:^2.1.3": - version: 2.4.1 - resolution: "yaml@npm:2.4.1" + version: 2.4.2 + resolution: "yaml@npm:2.4.2" bin: yaml: bin.mjs - checksum: 4c391d07a5d5e935e058babb71026c9cdc9a6fd889e35dd91b53cfb0a12691b67c6c5c740858e71345fef18cd9c13c554a6dda9196f59820d769d94041badb0b + checksum: 90dda4485de04367251face9abb5c36927c94e44078f4e958e6468a07e74e7e92f89be20fc49860b6268c51ee5a5fc79ef89197d3f874bf24ef8921cc4ba9013 languageName: node linkType: hard @@ -13903,8 +14007,8 @@ __metadata: linkType: hard "zod@npm:^3.22.4": - version: 3.23.0 - resolution: "zod@npm:3.23.0" - checksum: ba3ae4d2320bfba1207475cac77c3449db55ae345ec737c4fdff794c6851619adebac1e0f5413311f4e80cf98ca6669b7f7c4336a64fde8fa8c6345c6288506d + version: 3.23.8 + resolution: "zod@npm:3.23.8" + checksum: 15949ff82118f59c893dacd9d3c766d02b6fa2e71cf474d5aa888570c469dbf5446ac5ad562bb035bf7ac9650da94f290655c194f4a6de3e766f43febd432c5c languageName: node linkType: hard From 8cf9168c61d8f2bdee5cc29763df6c888422a0bc Mon Sep 17 00:00:00 2001 From: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> Date: Fri, 10 May 2024 12:21:42 +0100 Subject: [PATCH 40/43] feat: avm support for public input columns (#5700) Adds support for public input columns as outlined in the following hackmd: https://hackmd.io/8kkJo4RkRTG6mpwL8fOf3w?both --- barretenberg/cpp/pil/spike/README.md | 3 + barretenberg/cpp/pil/spike/spike.pil | 8 + .../generated/spike/declare_views.hpp | 7 + .../relations/generated/spike/spike.hpp | 48 +++ .../barretenberg/vm/generated/avm_flavor.hpp | 8 + .../vm/generated/avm_verifier.cpp | 2 + .../vm/generated/spike_circuit_builder.hpp | 110 +++++++ .../vm/generated/spike_composer.cpp | 86 ++++++ .../vm/generated/spike_composer.hpp | 69 +++++ .../vm/generated/spike_flavor.hpp | 286 ++++++++++++++++++ .../vm/generated/spike_prover.cpp | 135 +++++++++ .../vm/generated/spike_prover.hpp | 64 ++++ .../vm/generated/spike_verifier.cpp | 110 +++++++ .../vm/generated/spike_verifier.hpp | 33 ++ .../src/barretenberg/vm/tests/spike.test.cpp | 73 +++++ 15 files changed, 1042 insertions(+) create mode 100644 barretenberg/cpp/pil/spike/README.md create mode 100644 barretenberg/cpp/pil/spike/spike.pil create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/spike/declare_views.hpp create mode 100644 barretenberg/cpp/src/barretenberg/relations/generated/spike/spike.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_circuit_builder.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.cpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_flavor.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.cpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.cpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.hpp create mode 100644 barretenberg/cpp/src/barretenberg/vm/tests/spike.test.cpp diff --git a/barretenberg/cpp/pil/spike/README.md b/barretenberg/cpp/pil/spike/README.md new file mode 100644 index 00000000000..69e4f55ac79 --- /dev/null +++ b/barretenberg/cpp/pil/spike/README.md @@ -0,0 +1,3 @@ +## Spike machine + +A spike machine for testing new PIL functionality \ No newline at end of file diff --git a/barretenberg/cpp/pil/spike/spike.pil b/barretenberg/cpp/pil/spike/spike.pil new file mode 100644 index 00000000000..1361c446923 --- /dev/null +++ b/barretenberg/cpp/pil/spike/spike.pil @@ -0,0 +1,8 @@ + +namespace Spike(16); + +pol constant first = [1] + [0]*; +pol commit x; +pol public kernel_inputs; + +x - first = 0; \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/spike/declare_views.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/spike/declare_views.hpp new file mode 100644 index 00000000000..df901e8d155 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/spike/declare_views.hpp @@ -0,0 +1,7 @@ + +#define Spike_DECLARE_VIEWS(index) \ + using Accumulator = typename std::tuple_element::type; \ + using View = typename Accumulator::View; \ + [[maybe_unused]] auto Spike_first = View(new_term.Spike_first); \ + [[maybe_unused]] auto Spike_kernel_inputs = View(new_term.Spike_kernel_inputs); \ + [[maybe_unused]] auto Spike_x = View(new_term.Spike_x); diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/spike/spike.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/spike/spike.hpp new file mode 100644 index 00000000000..2a99922e200 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/generated/spike/spike.hpp @@ -0,0 +1,48 @@ + +#pragma once +#include "../../relation_parameters.hpp" +#include "../../relation_types.hpp" +#include "./declare_views.hpp" + +namespace bb::Spike_vm { + +template struct SpikeRow { + FF Spike_first{}; + FF Spike_x{}; +}; + +inline std::string get_relation_label_spike(int index) +{ + switch (index) {} + return std::to_string(index); +} + +template class spikeImpl { + public: + using FF = FF_; + + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + 2, + }; + + template + void static accumulate(ContainerOverSubrelations& evals, + const AllEntities& new_term, + [[maybe_unused]] const RelationParameters&, + [[maybe_unused]] const FF& scaling_factor) + { + + // Contribution 0 + { + Spike_DECLARE_VIEWS(0); + + auto tmp = (Spike_x - Spike_first); + tmp *= scaling_factor; + std::get<0>(evals) += tmp; + } + } +}; + +template using spike = Relation>; + +} // namespace bb::Spike_vm \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp index 1921397837f..bb97c6808e4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp @@ -2026,6 +2026,14 @@ class AvmFlavor { */ template using ProverUnivariates = AllEntities>; + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck with some of the computation + * optmistically ignored + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template + using OptimisedProverUnivariates = AllEntities>; + /** * @brief A container for univariates produced during the hot loop in sumcheck. */ diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index ecce0af1b4d..ba34ca33fd0 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -3,9 +3,11 @@ #include "./avm_verifier.hpp" #include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" #include "barretenberg/numeric/bitop/get_msb.hpp" +#include "barretenberg/polynomials/polynomial.hpp" #include "barretenberg/transcript/transcript.hpp" namespace bb { + AvmVerifier::AvmVerifier(std::shared_ptr verifier_key) : key(verifier_key) {} diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_circuit_builder.hpp new file mode 100644 index 00000000000..255ceed71c8 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_circuit_builder.hpp @@ -0,0 +1,110 @@ + + +// AUTOGENERATED FILE +#pragma once + +#include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/common/throw_or_abort.hpp" +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" +#include "barretenberg/relations/generic_lookup/generic_lookup_relation.hpp" +#include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" +#include "barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp" + +#include "barretenberg/relations/generated/spike/spike.hpp" +#include "barretenberg/vm/generated/spike_flavor.hpp" + +namespace bb { + +template struct SpikeFullRow { + FF Spike_first{}; + FF Spike_kernel_inputs{}; + FF Spike_x{}; +}; + +class SpikeCircuitBuilder { + public: + using Flavor = bb::SpikeFlavor; + using FF = Flavor::FF; + using Row = SpikeFullRow; + + // TODO: template + using Polynomial = Flavor::Polynomial; + using ProverPolynomials = Flavor::ProverPolynomials; + + static constexpr size_t num_fixed_columns = 3; + static constexpr size_t num_polys = 3; + std::vector rows; + + void set_trace(std::vector&& trace) { rows = std::move(trace); } + + ProverPolynomials compute_polynomials() + { + const auto num_rows = get_circuit_subgroup_size(); + ProverPolynomials polys; + + // Allocate mem for each column + for (auto& poly : polys.get_all()) { + poly = Polynomial(num_rows); + } + + for (size_t i = 0; i < rows.size(); i++) { + polys.Spike_first[i] = rows[i].Spike_first; + polys.Spike_kernel_inputs[i] = rows[i].Spike_kernel_inputs; + polys.Spike_x[i] = rows[i].Spike_x; + } + + return polys; + } + + [[maybe_unused]] bool check_circuit() + { + + auto polys = compute_polynomials(); + const size_t num_rows = polys.get_polynomial_size(); + + const auto evaluate_relation = [&](const std::string& relation_name, + std::string (*debug_label)(int)) { + typename Relation::SumcheckArrayOfValuesOverSubrelations result; + for (auto& r : result) { + r = 0; + } + constexpr size_t NUM_SUBRELATIONS = result.size(); + + for (size_t i = 0; i < num_rows; ++i) { + Relation::accumulate(result, polys.get_row(i), {}, 1); + + bool x = true; + for (size_t j = 0; j < NUM_SUBRELATIONS; ++j) { + if (result[j] != 0) { + std::string row_name = debug_label(static_cast(j)); + throw_or_abort( + format("Relation ", relation_name, ", subrelation index ", row_name, " failed at row ", i)); + x = false; + } + } + if (!x) { + return false; + } + } + return true; + }; + + if (!evaluate_relation.template operator()>("spike", Spike_vm::get_relation_label_spike)) { + return false; + } + + return true; + } + + [[nodiscard]] size_t get_num_gates() const { return rows.size(); } + + [[nodiscard]] size_t get_circuit_subgroup_size() const + { + const size_t num_rows = get_num_gates(); + const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); + size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + return num_rows_pow2; + } +}; +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.cpp new file mode 100644 index 00000000000..9745b6accda --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.cpp @@ -0,0 +1,86 @@ + + +#include "./spike_composer.hpp" +#include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" +#include "barretenberg/plonk_honk_shared/composer/permutation_lib.hpp" +#include "barretenberg/vm/generated/spike_circuit_builder.hpp" +#include "barretenberg/vm/generated/spike_verifier.hpp" + +namespace bb { + +using Flavor = SpikeFlavor; +void SpikeComposer::compute_witness(CircuitConstructor& circuit) +{ + if (computed_witness) { + return; + } + + auto polynomials = circuit.compute_polynomials(); + + for (auto [key_poly, prover_poly] : zip_view(proving_key->get_all(), polynomials.get_unshifted())) { + ASSERT(flavor_get_label(*proving_key, key_poly) == flavor_get_label(polynomials, prover_poly)); + key_poly = prover_poly; + } + + computed_witness = true; +} + +SpikeProver SpikeComposer::create_prover(CircuitConstructor& circuit_constructor) +{ + compute_proving_key(circuit_constructor); + compute_witness(circuit_constructor); + compute_commitment_key(circuit_constructor.get_circuit_subgroup_size()); + + SpikeProver output_state(proving_key, proving_key->commitment_key); + + return output_state; +} + +SpikeVerifier SpikeComposer::create_verifier(CircuitConstructor& circuit_constructor) +{ + auto verification_key = compute_verification_key(circuit_constructor); + + SpikeVerifier output_state(verification_key); + + auto pcs_verification_key = std::make_unique(); + + output_state.pcs_verification_key = std::move(pcs_verification_key); + + return output_state; +} + +std::shared_ptr SpikeComposer::compute_proving_key(CircuitConstructor& circuit_constructor) +{ + if (proving_key) { + return proving_key; + } + + // Initialize proving_key + { + const size_t subgroup_size = circuit_constructor.get_circuit_subgroup_size(); + proving_key = std::make_shared(subgroup_size, 0); + } + + proving_key->contains_recursive_proof = false; + + return proving_key; +} + +std::shared_ptr SpikeComposer::compute_verification_key( + CircuitConstructor& circuit_constructor) +{ + if (verification_key) { + return verification_key; + } + + if (!proving_key) { + compute_proving_key(circuit_constructor); + } + + verification_key = + std::make_shared(proving_key->circuit_size, proving_key->num_public_inputs); + + return verification_key; +} + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.hpp new file mode 100644 index 00000000000..10ddf7dbd93 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_composer.hpp @@ -0,0 +1,69 @@ + + +#pragma once + +#include "barretenberg/plonk_honk_shared/composer/composer_lib.hpp" +#include "barretenberg/srs/global_crs.hpp" +#include "barretenberg/vm/generated/spike_circuit_builder.hpp" +#include "barretenberg/vm/generated/spike_prover.hpp" +#include "barretenberg/vm/generated/spike_verifier.hpp" + +namespace bb { +class SpikeComposer { + public: + using Flavor = SpikeFlavor; + using CircuitConstructor = SpikeCircuitBuilder; + using ProvingKey = Flavor::ProvingKey; + using VerificationKey = Flavor::VerificationKey; + using PCS = Flavor::PCS; + using CommitmentKey = Flavor::CommitmentKey; + using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; + + // TODO: which of these will we really need + static constexpr std::string_view NAME_STRING = "Spike"; + static constexpr size_t NUM_RESERVED_GATES = 0; + static constexpr size_t NUM_WIRES = Flavor::NUM_WIRES; + + std::shared_ptr proving_key; + std::shared_ptr verification_key; + + // The crs_factory holds the path to the srs and exposes methods to extract the srs elements + std::shared_ptr> crs_factory_; + + // The commitment key is passed to the prover but also used herein to compute the verfication key commitments + std::shared_ptr commitment_key; + + std::vector recursive_proof_public_input_indices; + bool contains_recursive_proof = false; + bool computed_witness = false; + + SpikeComposer() { crs_factory_ = bb::srs::get_bn254_crs_factory(); } + + SpikeComposer(std::shared_ptr p_key, std::shared_ptr v_key) + : proving_key(std::move(p_key)) + , verification_key(std::move(v_key)) + {} + + SpikeComposer(SpikeComposer&& other) noexcept = default; + SpikeComposer(SpikeComposer const& other) noexcept = default; + SpikeComposer& operator=(SpikeComposer&& other) noexcept = default; + SpikeComposer& operator=(SpikeComposer const& other) noexcept = default; + ~SpikeComposer() = default; + + std::shared_ptr compute_proving_key(CircuitConstructor& circuit_constructor); + std::shared_ptr compute_verification_key(CircuitConstructor& circuit_constructor); + + void compute_witness(CircuitConstructor& circuit_constructor); + + SpikeProver create_prover(CircuitConstructor& circuit_constructor); + SpikeVerifier create_verifier(CircuitConstructor& circuit_constructor); + + void add_table_column_selector_poly_to_proving_key(bb::polynomial& small, const std::string& tag); + + void compute_commitment_key(size_t circuit_size) + { + proving_key->commitment_key = std::make_shared(circuit_size); + }; +}; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_flavor.hpp new file mode 100644 index 00000000000..b841904764d --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_flavor.hpp @@ -0,0 +1,286 @@ + + +#pragma once +#include "barretenberg/commitment_schemes/kzg/kzg.hpp" +#include "barretenberg/ecc/curves/bn254/g1.hpp" +#include "barretenberg/flavor/relation_definitions.hpp" +#include "barretenberg/polynomials/barycentric.hpp" +#include "barretenberg/polynomials/univariate.hpp" + +#include "barretenberg/relations/generic_permutation/generic_permutation_relation.hpp" + +#include "barretenberg/flavor/flavor.hpp" +#include "barretenberg/flavor/flavor_macros.hpp" +#include "barretenberg/polynomials/evaluation_domain.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/relations/generated/spike/spike.hpp" +#include "barretenberg/transcript/transcript.hpp" + +namespace bb { + +class SpikeFlavor { + public: + using Curve = curve::BN254; + using G1 = Curve::Group; + using PCS = KZG; + + using FF = G1::subgroup_field; + using Polynomial = bb::Polynomial; + using PolynomialHandle = std::span; + using GroupElement = G1::element; + using Commitment = G1::affine_element; + using CommitmentHandle = G1::affine_element; + using CommitmentKey = bb::CommitmentKey; + using VerifierCommitmentKey = bb::VerifierCommitmentKey; + using RelationSeparator = FF; + + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 1; + static constexpr size_t NUM_WITNESS_ENTITIES = 2; + static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; + // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for + // the unshifted and one for the shifted + static constexpr size_t NUM_ALL_ENTITIES = 3; + + using Relations = std::tuple>; + + static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); + + // BATCHED_RELATION_PARTIAL_LENGTH = algebraic degree of sumcheck relation *after* multiplying by the `pow_zeta` + // random polynomial e.g. For \sum(x) [A(x) * B(x) + C(x)] * PowZeta(X), relation length = 2 and random relation + // length = 3 + static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; + static constexpr size_t NUM_RELATIONS = std::tuple_size_v; + + template + using ProtogalaxyTupleOfTuplesOfUnivariates = + decltype(create_protogalaxy_tuple_of_tuples_of_univariates()); + using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); + using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); + + static constexpr bool has_zero_row = true; + + private: + template class PrecomputedEntities : public PrecomputedEntitiesBase { + public: + using DataType = DataType_; + + DEFINE_FLAVOR_MEMBERS(DataType, Spike_first) + + RefVector get_selectors() { return { Spike_first }; }; + RefVector get_sigma_polynomials() { return {}; }; + RefVector get_id_polynomials() { return {}; }; + RefVector get_table_polynomials() { return {}; }; + }; + + template class WitnessEntities { + public: + DEFINE_FLAVOR_MEMBERS(DataType, Spike_kernel_inputs, Spike_x) + + RefVector get_wires() { return { Spike_kernel_inputs, Spike_x }; }; + }; + + template class AllEntities { + public: + DEFINE_FLAVOR_MEMBERS(DataType, Spike_first, Spike_kernel_inputs, Spike_x) + + RefVector get_wires() { return { Spike_first, Spike_kernel_inputs, Spike_x }; }; + RefVector get_unshifted() { return { Spike_first, Spike_kernel_inputs, Spike_x }; }; + RefVector get_to_be_shifted() { return {}; }; + RefVector get_shifted() { return {}; }; + }; + + public: + class ProvingKey + : public ProvingKeyAvm_, WitnessEntities, CommitmentKey> { + public: + // Expose constructors on the base class + using Base = ProvingKeyAvm_, WitnessEntities, CommitmentKey>; + using Base::Base; + + RefVector get_to_be_shifted() { return {}; }; + }; + + using VerificationKey = VerificationKey_, VerifierCommitmentKey>; + + using FoldedPolynomials = AllEntities>; + + class AllValues : public AllEntities { + public: + using Base = AllEntities; + using Base::Base; + }; + + /** + * @brief A container for the prover polynomials handles. + */ + class ProverPolynomials : public AllEntities { + public: + // Define all operations as default, except copy construction/assignment + ProverPolynomials() = default; + ProverPolynomials& operator=(const ProverPolynomials&) = delete; + ProverPolynomials(const ProverPolynomials& o) = delete; + ProverPolynomials(ProverPolynomials&& o) noexcept = default; + ProverPolynomials& operator=(ProverPolynomials&& o) noexcept = default; + ~ProverPolynomials() = default; + + ProverPolynomials(ProvingKey& proving_key) + { + for (auto [prover_poly, key_poly] : zip_view(this->get_unshifted(), proving_key.get_all())) { + ASSERT(flavor_get_label(*this, prover_poly) == flavor_get_label(proving_key, key_poly)); + prover_poly = key_poly.share(); + } + for (auto [prover_poly, key_poly] : zip_view(this->get_shifted(), proving_key.get_to_be_shifted())) { + ASSERT(flavor_get_label(*this, prover_poly) == (flavor_get_label(proving_key, key_poly) + "_shift")); + prover_poly = key_poly.shifted(); + } + } + + [[nodiscard]] size_t get_polynomial_size() const { return Spike_kernel_inputs.size(); } + /** + * @brief Returns the evaluations of all prover polynomials at one point on the boolean hypercube, which + * represents one row in the execution trace. + */ + [[nodiscard]] AllValues get_row(size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + + using RowPolynomials = AllEntities; + + class PartiallyEvaluatedMultivariates : public AllEntities { + public: + PartiallyEvaluatedMultivariates() = default; + PartiallyEvaluatedMultivariates(const size_t circuit_size) + { + // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) + for (auto& poly : get_all()) { + poly = Polynomial(circuit_size / 2); + } + } + }; + + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck. + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template using ProverUnivariates = AllEntities>; + + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck with some of the computation + * optmistically ignored + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template + using OptimisedProverUnivariates = AllEntities>; + + /** + * @brief A container for univariates produced during the hot loop in sumcheck. + */ + using ExtendedEdges = ProverUnivariates; + + /** + * @brief A container for the witness commitments. + * + */ + using WitnessCommitments = WitnessEntities; + + class CommitmentLabels : public AllEntities { + private: + using Base = AllEntities; + + public: + CommitmentLabels() + : AllEntities() + { + Base::Spike_first = "SPIKE_FIRST"; + Base::Spike_kernel_inputs = "SPIKE_KERNEL_INPUTS"; + Base::Spike_x = "SPIKE_X"; + }; + }; + + class VerifierCommitments : public AllEntities { + private: + using Base = AllEntities; + + public: + VerifierCommitments(const std::shared_ptr& verification_key) + { + Spike_first = verification_key->Spike_first; + } + }; + + class Transcript : public NativeTranscript { + public: + uint32_t circuit_size; + + Commitment Spike_kernel_inputs; + Commitment Spike_x; + + std::vector> sumcheck_univariates; + std::array sumcheck_evaluations; + std::vector zm_cq_comms; + Commitment zm_cq_comm; + Commitment zm_pi_comm; + + Transcript() = default; + + Transcript(const std::vector& proof) + : NativeTranscript(proof) + {} + + void deserialize_full_transcript() + { + size_t num_frs_read = 0; + circuit_size = deserialize_from_buffer(proof_data, num_frs_read); + size_t log_n = numeric::get_msb(circuit_size); + + Spike_kernel_inputs = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + Spike_x = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + + for (size_t i = 0; i < log_n; ++i) { + sumcheck_univariates.emplace_back( + deserialize_from_buffer>(Transcript::proof_data, + num_frs_read)); + } + sumcheck_evaluations = + deserialize_from_buffer>(Transcript::proof_data, num_frs_read); + for (size_t i = 0; i < log_n; ++i) { + zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); + } + zm_cq_comm = deserialize_from_buffer(proof_data, num_frs_read); + zm_pi_comm = deserialize_from_buffer(proof_data, num_frs_read); + } + + void serialize_full_transcript() + { + size_t old_proof_length = proof_data.size(); + Transcript::proof_data.clear(); + size_t log_n = numeric::get_msb(circuit_size); + + serialize_to_buffer(circuit_size, Transcript::proof_data); + + serialize_to_buffer(Spike_kernel_inputs, Transcript::proof_data); + serialize_to_buffer(Spike_x, Transcript::proof_data); + + for (size_t i = 0; i < log_n; ++i) { + serialize_to_buffer(sumcheck_univariates[i], Transcript::proof_data); + } + serialize_to_buffer(sumcheck_evaluations, Transcript::proof_data); + for (size_t i = 0; i < log_n; ++i) { + serialize_to_buffer(zm_cq_comms[i], proof_data); + } + serialize_to_buffer(zm_cq_comm, proof_data); + serialize_to_buffer(zm_pi_comm, proof_data); + + // sanity check to make sure we generate the same length of proof as before. + ASSERT(proof_data.size() == old_proof_length); + } + }; +}; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.cpp new file mode 100644 index 00000000000..1f2925eecd1 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.cpp @@ -0,0 +1,135 @@ + + +#include "spike_prover.hpp" +#include "barretenberg/commitment_schemes/claim.hpp" +#include "barretenberg/commitment_schemes/commitment_key.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" +#include "barretenberg/honk/proof_system/permutation_library.hpp" +#include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/relations/lookup_relation.hpp" +#include "barretenberg/relations/permutation_relation.hpp" +#include "barretenberg/sumcheck/sumcheck.hpp" + +namespace bb { + +using Flavor = SpikeFlavor; +using FF = Flavor::FF; + +/** + * Create SpikeProver from proving key, witness and manifest. + * + * @param input_key Proving key. + * @param input_manifest Input manifest + * + * @tparam settings Settings class. + * */ +SpikeProver::SpikeProver(std::shared_ptr input_key, + std::shared_ptr commitment_key) + : key(input_key) + , commitment_key(commitment_key) +{ + for (auto [prover_poly, key_poly] : zip_view(prover_polynomials.get_unshifted(), key->get_all())) { + ASSERT(bb::flavor_get_label(prover_polynomials, prover_poly) == bb::flavor_get_label(*key, key_poly)); + prover_poly = key_poly.share(); + } + for (auto [prover_poly, key_poly] : zip_view(prover_polynomials.get_shifted(), key->get_to_be_shifted())) { + ASSERT(bb::flavor_get_label(prover_polynomials, prover_poly) == + bb::flavor_get_label(*key, key_poly) + "_shift"); + prover_poly = key_poly.shifted(); + } +} + +/** + * @brief Add circuit size, public input size, and public inputs to transcript + * + */ +void SpikeProver::execute_preamble_round() +{ + const auto circuit_size = static_cast(key->circuit_size); + + transcript->send_to_verifier("circuit_size", circuit_size); +} + +/** + * @brief Compute commitments to all of the witness wires (apart from the logderivative inverse wires) + * + */ +void SpikeProver::execute_wire_commitments_round() +{ + + // Commit to all polynomials (apart from logderivative inverse polynomials, which are committed to in the later + // logderivative phase) + witness_commitments.Spike_kernel_inputs = commitment_key->commit(key->Spike_kernel_inputs); + witness_commitments.Spike_x = commitment_key->commit(key->Spike_x); + + // Send all commitments to the verifier + transcript->send_to_verifier(commitment_labels.Spike_kernel_inputs, witness_commitments.Spike_kernel_inputs); + transcript->send_to_verifier(commitment_labels.Spike_x, witness_commitments.Spike_x); +} + +void SpikeProver::execute_log_derivative_inverse_round() {} + +/** + * @brief Run Sumcheck resulting in u = (u_1,...,u_d) challenges and all evaluations at u being calculated. + * + */ +void SpikeProver::execute_relation_check_rounds() +{ + using Sumcheck = SumcheckProver; + + auto sumcheck = Sumcheck(key->circuit_size, transcript); + + FF alpha = transcript->template get_challenge("Sumcheck:alpha"); + std::vector gate_challenges(numeric::get_msb(key->circuit_size)); + + for (size_t idx = 0; idx < gate_challenges.size(); idx++) { + gate_challenges[idx] = transcript->template get_challenge("Sumcheck:gate_challenge_" + std::to_string(idx)); + } + sumcheck_output = sumcheck.prove(prover_polynomials, relation_parameters, alpha, gate_challenges); +} + +/** + * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck + * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. + * + * */ +void SpikeProver::execute_zeromorph_rounds() +{ + ZeroMorph::prove(prover_polynomials.get_unshifted(), + prover_polynomials.get_to_be_shifted(), + sumcheck_output.claimed_evaluations.get_unshifted(), + sumcheck_output.claimed_evaluations.get_shifted(), + sumcheck_output.challenge, + commitment_key, + transcript); +} + +HonkProof& SpikeProver::export_proof() +{ + proof = transcript->proof_data; + return proof; +} + +HonkProof& SpikeProver::construct_proof() +{ + // Add circuit size public input size and public inputs to transcript. + execute_preamble_round(); + + // Compute wire commitments + execute_wire_commitments_round(); + + // Compute sorted list accumulator and commitment + + // Fiat-Shamir: alpha + // Run sumcheck subprotocol. + execute_relation_check_rounds(); + + // Fiat-Shamir: rho, y, x, z + // Execute Zeromorph multilinear PCS + execute_zeromorph_rounds(); + + return export_proof(); +} + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.hpp new file mode 100644 index 00000000000..e80b92f384f --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_prover.hpp @@ -0,0 +1,64 @@ + + +#pragma once +#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" +#include "barretenberg/plonk/proof_system/types/proof.hpp" +#include "barretenberg/relations/relation_parameters.hpp" +#include "barretenberg/sumcheck/sumcheck_output.hpp" +#include "barretenberg/transcript/transcript.hpp" + +#include "barretenberg/vm/generated/spike_flavor.hpp" + +namespace bb { + +class SpikeProver { + + using Flavor = SpikeFlavor; + using FF = Flavor::FF; + using PCS = Flavor::PCS; + using PCSCommitmentKey = Flavor::CommitmentKey; + using ProvingKey = Flavor::ProvingKey; + using Polynomial = Flavor::Polynomial; + using ProverPolynomials = Flavor::ProverPolynomials; + using CommitmentLabels = Flavor::CommitmentLabels; + using Transcript = Flavor::Transcript; + + public: + explicit SpikeProver(std::shared_ptr input_key, std::shared_ptr commitment_key); + + void execute_preamble_round(); + void execute_wire_commitments_round(); + void execute_log_derivative_inverse_round(); + void execute_relation_check_rounds(); + void execute_zeromorph_rounds(); + + HonkProof& export_proof(); + HonkProof& construct_proof(); + + std::shared_ptr transcript = std::make_shared(); + + std::vector public_inputs; + + bb::RelationParameters relation_parameters; + + std::shared_ptr key; + + // Container for spans of all polynomials required by the prover (i.e. all multivariates evaluated by Sumcheck). + ProverPolynomials prover_polynomials; + + CommitmentLabels commitment_labels; + typename Flavor::WitnessCommitments witness_commitments; + + Polynomial quotient_W; + + SumcheckOutput sumcheck_output; + + std::shared_ptr commitment_key; + + using ZeroMorph = ZeroMorphProver_; + + private: + HonkProof proof; +}; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.cpp new file mode 100644 index 00000000000..52660b91ed9 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.cpp @@ -0,0 +1,110 @@ + + +#include "./spike_verifier.hpp" +#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" +#include "barretenberg/numeric/bitop/get_msb.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/transcript/transcript.hpp" + +namespace bb { + +SpikeVerifier::SpikeVerifier(std::shared_ptr verifier_key) + : key(verifier_key) +{} + +SpikeVerifier::SpikeVerifier(SpikeVerifier&& other) noexcept + : key(std::move(other.key)) + , pcs_verification_key(std::move(other.pcs_verification_key)) +{} + +SpikeVerifier& SpikeVerifier::operator=(SpikeVerifier&& other) noexcept +{ + key = other.key; + pcs_verification_key = (std::move(other.pcs_verification_key)); + commitments.clear(); + return *this; +} + +using FF = SpikeFlavor::FF; + +// Evaluate the given public input column over the multivariate challenge points +[[maybe_unused]] FF evaluate_public_input_column(std::vector points, std::vector challenges) +{ + Polynomial polynomial(points); + return polynomial.evaluate_mle(challenges); +} + +/** + * @brief This function verifies an Spike Honk proof for given program settings. + * + */ +bool SpikeVerifier::verify_proof(const HonkProof& proof, const std::vector& public_inputs) +{ + using Flavor = SpikeFlavor; + using FF = Flavor::FF; + using Commitment = Flavor::Commitment; + // using PCS = Flavor::PCS; + // using ZeroMorph = ZeroMorphVerifier_; + using VerifierCommitments = Flavor::VerifierCommitments; + using CommitmentLabels = Flavor::CommitmentLabels; + + RelationParameters relation_parameters; + + transcript = std::make_shared(proof); + + VerifierCommitments commitments{ key }; + CommitmentLabels commitment_labels; + + const auto circuit_size = transcript->template receive_from_prover("circuit_size"); + + if (circuit_size != key->circuit_size) { + return false; + } + + // Get commitments to VM wires + commitments.Spike_kernel_inputs = + transcript->template receive_from_prover(commitment_labels.Spike_kernel_inputs); + commitments.Spike_x = transcript->template receive_from_prover(commitment_labels.Spike_x); + + // Get commitments to inverses + + // Execute Sumcheck Verifier + const size_t log_circuit_size = numeric::get_msb(circuit_size); + auto sumcheck = SumcheckVerifier(log_circuit_size, transcript); + + FF alpha = transcript->template get_challenge("Sumcheck:alpha"); + + auto gate_challenges = std::vector(log_circuit_size); + for (size_t idx = 0; idx < log_circuit_size; idx++) { + gate_challenges[idx] = transcript->template get_challenge("Sumcheck:gate_challenge_" + std::to_string(idx)); + } + + auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = + sumcheck.verify(relation_parameters, alpha, gate_challenges); + + // If Sumcheck did not verify, return false + if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { + return false; + } + + FF public_column_evaluation = evaluate_public_input_column(public_inputs, multivariate_challenge); + if (public_column_evaluation != claimed_evaluations.Spike_kernel_inputs) { + return false; + } + + // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the + // unrolled protocol. + // NOTE: temporarily disabled - facing integration issues + // auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), + // commitments.get_to_be_shifted(), + // claimed_evaluations.get_unshifted(), + // claimed_evaluations.get_shifted(), + // multivariate_challenge, + // transcript); + + // auto verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); + // return sumcheck_verified.value() && verified; + return sumcheck_verified.value(); +} + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.hpp new file mode 100644 index 00000000000..c4fb767455a --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/generated/spike_verifier.hpp @@ -0,0 +1,33 @@ + + +#pragma once +#include "barretenberg/plonk/proof_system/types/proof.hpp" +#include "barretenberg/sumcheck/sumcheck.hpp" +#include "barretenberg/vm/generated/spike_flavor.hpp" + +namespace bb { +class SpikeVerifier { + using Flavor = SpikeFlavor; + using FF = Flavor::FF; + using Commitment = Flavor::Commitment; + using VerificationKey = Flavor::VerificationKey; + using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; + using Transcript = Flavor::Transcript; + + public: + explicit SpikeVerifier(std::shared_ptr verifier_key = nullptr); + SpikeVerifier(SpikeVerifier&& other) noexcept; + SpikeVerifier(const SpikeVerifier& other) = delete; + + SpikeVerifier& operator=(const SpikeVerifier& other) = delete; + SpikeVerifier& operator=(SpikeVerifier&& other) noexcept; + + bool verify_proof(const HonkProof& proof, const std::vector& public_inputs); + + std::shared_ptr key; + std::map commitments; + std::shared_ptr pcs_verification_key; + std::shared_ptr transcript; +}; + +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/spike.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/spike.test.cpp new file mode 100644 index 00000000000..1b30f1f4a6c --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm/tests/spike.test.cpp @@ -0,0 +1,73 @@ +#include "barretenberg/crypto/generators/generator_data.hpp" +#include "barretenberg/numeric/random/engine.hpp" +#include "barretenberg/numeric/uint256/uint256.hpp" +#include "barretenberg/vm/generated/spike_circuit_builder.hpp" +#include "barretenberg/vm/generated/spike_flavor.hpp" + +// Proofs +#include "barretenberg/vm/generated/spike_composer.hpp" +#include "barretenberg/vm/generated/spike_prover.hpp" +#include "barretenberg/vm/generated/spike_verifier.hpp" + +#include + +using namespace bb; +namespace { +auto& engine = numeric::get_debug_randomness(); +} + +class SpikePublicColumnsTests : public ::testing::Test { + protected: + // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. + void SetUp() override { srs::init_crs_factory("../srs_db/ignition"); }; +}; + +// Test file for testing public inputs evaluations are the same in the verifier and in sumcheck +// +// The first test runs the verification with the same public inputs in the verifier and in the prover, prover inputs are +// set in the below function The second failure test runs the verification with the different public inputs +bool verify_spike_with_public_with_public_inputs(std::vector verifier_public__inputs) +{ + using Builder = SpikeCircuitBuilder; + using Row = Builder::Row; + Builder circuit_builder; + + srs::init_crs_factory("../srs_db/ignition"); + + const size_t circuit_size = 16; + std::vector rows; + + // Add to the public input column that is increasing + for (size_t i = 0; i < circuit_size; i++) { + // Make sure the external and trace public inputs are the same + Row row{ .Spike_kernel_inputs = i + 1 }; + rows.push_back(row); + } + + circuit_builder.set_trace(std::move(rows)); + + // Create a prover and verifier + auto composer = SpikeComposer(); + auto prover = composer.create_prover(circuit_builder); + HonkProof proof = prover.construct_proof(); + + auto verifier = composer.create_verifier(circuit_builder); + + return verifier.verify_proof(proof, verifier_public__inputs); +} + +TEST(SpikePublicColumnsTests, VerificationSuccess) +{ + std::vector public_inputs = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }; + bool verified = verify_spike_with_public_with_public_inputs(public_inputs); + ASSERT_TRUE(verified); +} + +TEST(SpikePublicColumnsTests, VerificationFailure) +{ + std::vector public_inputs = { + 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160 + }; + bool verified = verify_spike_with_public_with_public_inputs(public_inputs); + ASSERT_FALSE(verified); +} \ No newline at end of file From 6374a328859eefed0346a3c12b3500dd960e0884 Mon Sep 17 00:00:00 2001 From: Ilyas Ridhuan Date: Fri, 10 May 2024 12:30:46 +0100 Subject: [PATCH 41/43] feat(avm-simulator): add to_radix_le instruction (#6308) --- avm-transpiler/src/opcodes.rs | 4 + .../barretenberg/vm/avm_trace/avm_opcode.hpp | 3 + .../public-vm/gen/_instruction-set.mdx | 185 +++++++++++------- .../InstructionSet/InstructionSet.js | 33 ++++ yarn-project/simulator/src/avm/avm_gas.ts | 2 + .../src/avm/opcodes/conversion.test.ts | 90 +++++++++ .../simulator/src/avm/opcodes/conversion.ts | 58 ++++++ .../instruction_serialization.ts | 2 + 8 files changed, 308 insertions(+), 69 deletions(-) create mode 100644 yarn-project/simulator/src/avm/opcodes/conversion.test.ts create mode 100644 yarn-project/simulator/src/avm/opcodes/conversion.ts diff --git a/avm-transpiler/src/opcodes.rs b/avm-transpiler/src/opcodes.rs index 2b63c8e987e..206325cfeff 100644 --- a/avm-transpiler/src/opcodes.rs +++ b/avm-transpiler/src/opcodes.rs @@ -69,6 +69,8 @@ pub enum AvmOpcode { POSEIDON2, SHA256, // temp - may be removed, but alot of contracts rely on it PEDERSEN, // temp - may be removed, but alot of contracts rely on it + // Conversions + TORADIXLE, } impl AvmOpcode { @@ -155,6 +157,8 @@ impl AvmOpcode { AvmOpcode::POSEIDON2 => "POSEIDON2", AvmOpcode::SHA256 => "SHA256 ", AvmOpcode::PEDERSEN => "PEDERSEN", + // Conversions + AvmOpcode::TORADIXLE => "TORADIXLE", } } } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.hpp index 2a4dd1138e9..21423838f43 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_opcode.hpp @@ -96,6 +96,9 @@ enum class OpCode : uint8_t { KECCAK, POSEIDON2, + // Conversions + TORADIXLE, + // Sentinel LAST_OPCODE_SENTINEL, }; diff --git a/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx b/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx index a13fb19a0f5..1c275e77e92 100644 --- a/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx +++ b/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx @@ -154,98 +154,105 @@ Click on an instruction name to jump to its section. } - 0x14 [`CONTRACTCALLDEPTH`](#isa-section-contractcalldepth) + 0x14 [`TRANSACTIONFEE`](#isa-section-transactionfee) + Get the computed transaction fee during teardown phase, zero otherwise + { + `M[dstOffset] = context.environment.transactionFee` + } + + + 0x15 [`CONTRACTCALLDEPTH`](#isa-section-contractcalldepth) Get how many contract calls deep the current call context is { `M[dstOffset] = context.environment.contractCallDepth` } - 0x15 [`CHAINID`](#isa-section-chainid) + 0x16 [`CHAINID`](#isa-section-chainid) Get this rollup's L1 chain ID { `M[dstOffset] = context.environment.globals.chainId` } - 0x16 [`VERSION`](#isa-section-version) + 0x17 [`VERSION`](#isa-section-version) Get this rollup's L2 version ID { `M[dstOffset] = context.environment.globals.version` } - 0x17 [`BLOCKNUMBER`](#isa-section-blocknumber) + 0x18 [`BLOCKNUMBER`](#isa-section-blocknumber) Get this L2 block's number { `M[dstOffset] = context.environment.globals.blocknumber` } - 0x18 [`TIMESTAMP`](#isa-section-timestamp) + 0x19 [`TIMESTAMP`](#isa-section-timestamp) Get this L2 block's timestamp { `M[dstOffset] = context.environment.globals.timestamp` } - 0x19 [`COINBASE`](#isa-section-coinbase) + 0x1a [`COINBASE`](#isa-section-coinbase) Get the block's beneficiary address { `M[dstOffset] = context.environment.globals.coinbase` } - 0x1a [`BLOCKL2GASLIMIT`](#isa-section-blockl2gaslimit) + 0x1b [`BLOCKL2GASLIMIT`](#isa-section-blockl2gaslimit) Total amount of "L2 gas" that a block can consume { `M[dstOffset] = context.environment.globals.l2GasLimit` } - 0x1b [`BLOCKDAGASLIMIT`](#isa-section-blockdagaslimit) + 0x1c [`BLOCKDAGASLIMIT`](#isa-section-blockdagaslimit) Total amount of "DA gas" that a block can consume { `M[dstOffset] = context.environment.globals.daGasLimit` } - 0x1c [`CALLDATACOPY`](#isa-section-calldatacopy) + 0x1d [`CALLDATACOPY`](#isa-section-calldatacopy) Copy calldata into memory { `M[dstOffset:dstOffset+copySize] = context.environment.calldata[cdOffset:cdOffset+copySize]` } - 0x1d [`L2GASLEFT`](#isa-section-l2gasleft) + 0x1e [`L2GASLEFT`](#isa-section-l2gasleft) Remaining "L2 gas" for this call (after this instruction) { `M[dstOffset] = context.MachineState.l2GasLeft` } - 0x1e [`DAGASLEFT`](#isa-section-dagasleft) + 0x1f [`DAGASLEFT`](#isa-section-dagasleft) Remaining "DA gas" for this call (after this instruction) { `M[dstOffset] = context.machineState.daGasLeft` } - 0x1f [`JUMP`](#isa-section-jump) + 0x20 [`JUMP`](#isa-section-jump) Jump to a location in the bytecode { `context.machineState.pc = loc` } - 0x20 [`JUMPI`](#isa-section-jumpi) + 0x21 [`JUMPI`](#isa-section-jumpi) Conditionally jump to a location in the bytecode { `context.machineState.pc = M[condOffset] > 0 ? loc : context.machineState.pc` } - 0x21 [`INTERNALCALL`](#isa-section-internalcall) + 0x22 [`INTERNALCALL`](#isa-section-internalcall) Make an internal call. Push the current PC to the internal call stack and jump to the target location. {`context.machineState.internalCallStack.push(context.machineState.pc) @@ -253,49 +260,49 @@ context.machineState.pc = loc`} - 0x22 [`INTERNALRETURN`](#isa-section-internalreturn) + 0x23 [`INTERNALRETURN`](#isa-section-internalreturn) Return from an internal call. Pop from the internal call stack and jump to the popped location. { `context.machineState.pc = context.machineState.internalCallStack.pop()` } - 0x23 [`SET`](#isa-section-set) + 0x24 [`SET`](#isa-section-set) Set a memory word from a constant in the bytecode { `M[dstOffset] = const` } - 0x24 [`MOV`](#isa-section-mov) + 0x25 [`MOV`](#isa-section-mov) Move a word from source memory location to destination { `M[dstOffset] = M[srcOffset]` } - 0x25 [`CMOV`](#isa-section-cmov) + 0x26 [`CMOV`](#isa-section-cmov) Move a word (conditionally chosen) from one memory location to another (`d = cond > 0 ? a : b`) { `M[dstOffset] = M[condOffset] > 0 ? M[aOffset] : M[bOffset]` } - 0x26 [`SLOAD`](#isa-section-sload) + 0x27 [`SLOAD`](#isa-section-sload) Load a word from this contract's persistent public storage. Zero is loaded for unwritten slots. {`M[dstOffset] = S[M[slotOffset]]`} - 0x27 [`SSTORE`](#isa-section-sstore) + 0x28 [`SSTORE`](#isa-section-sstore) Write a word to this contract's persistent public storage {`S[M[slotOffset]] = M[srcOffset]`} - 0x28 [`NOTEHASHEXISTS`](#isa-section-notehashexists) + 0x29 [`NOTEHASHEXISTS`](#isa-section-notehashexists) Check whether a note hash exists in the note hash tree (as of the start of the current block) {`exists = context.worldState.noteHashes.has({ @@ -306,7 +313,7 @@ M[existsOffset] = exists`} - 0x29 [`EMITNOTEHASH`](#isa-section-emitnotehash) + 0x2a [`EMITNOTEHASH`](#isa-section-emitnotehash) Emit a new note hash to be inserted into the note hash tree {`context.worldState.noteHashes.append( @@ -315,7 +322,7 @@ M[existsOffset] = exists`} - 0x2a [`NULLIFIEREXISTS`](#isa-section-nullifierexists) + 0x2b [`NULLIFIEREXISTS`](#isa-section-nullifierexists) Check whether a nullifier exists in the nullifier tree (including nullifiers from earlier in the current transaction or from earlier in the current block) {`exists = pendingNullifiers.has(M[addressOffset], M[nullifierOffset]) || context.worldState.nullifiers.has( @@ -325,7 +332,7 @@ M[existsOffset] = exists`} - 0x2b [`EMITNULLIFIER`](#isa-section-emitnullifier) + 0x2c [`EMITNULLIFIER`](#isa-section-emitnullifier) Emit a new nullifier to be inserted into the nullifier tree {`context.worldState.nullifiers.append( @@ -334,7 +341,7 @@ M[existsOffset] = exists`} - 0x2c [`L1TOL2MSGEXISTS`](#isa-section-l1tol2msgexists) + 0x2d [`L1TOL2MSGEXISTS`](#isa-section-l1tol2msgexists) Check if a message exists in the L1-to-L2 message tree {`exists = context.worldState.l1ToL2Messages.has({ @@ -344,7 +351,7 @@ M[existsOffset] = exists`} - 0x2d [`HEADERMEMBER`](#isa-section-headermember) + 0x2e [`HEADERMEMBER`](#isa-section-headermember) Check if a header exists in the [archive tree](../state/archive) and retrieve the specified member if so {`exists = context.worldState.header.has({ @@ -357,7 +364,7 @@ if exists: - 0x2e [`GETCONTRACTINSTANCE`](#isa-section-getcontractinstance) + 0x2f [`GETCONTRACTINSTANCE`](#isa-section-getcontractinstance) Copies contract instance data to memory {`M[dstOffset:dstOffset+CONTRACT_INSTANCE_SIZE+1] = [ @@ -372,7 +379,7 @@ if exists: - 0x2f [`EMITUNENCRYPTEDLOG`](#isa-section-emitunencryptedlog) + 0x30 [`EMITUNENCRYPTEDLOG`](#isa-section-emitunencryptedlog) Emit an unencrypted log {`context.accruedSubstate.unencryptedLogs.append( @@ -385,7 +392,7 @@ if exists: - 0x30 [`SENDL2TOL1MSG`](#isa-section-sendl2tol1msg) + 0x31 [`SENDL2TOL1MSG`](#isa-section-sendl2tol1msg) Send an L2-to-L1 message {`context.accruedSubstate.sentL2ToL1Messages.append( @@ -398,7 +405,7 @@ if exists: - 0x31 [`CALL`](#isa-section-call) + 0x32 [`CALL`](#isa-section-call) Call into another contract {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } @@ -412,7 +419,7 @@ updateContextAfterNestedCall(context, instr.args, nestedContext)`} - 0x32 [`STATICCALL`](#isa-section-staticcall) + 0x33 [`STATICCALL`](#isa-section-staticcall) Call into another contract, disallowing World State and Accrued Substate modifications {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } @@ -426,7 +433,7 @@ updateContextAfterNestedCall(context, instr.args, nestedContext)`} - 0x33 [`DELEGATECALL`](#isa-section-delegatecall) + 0x34 [`DELEGATECALL`](#isa-section-delegatecall) Call into another contract, but keep the caller's `sender` and `storageAddress` {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } @@ -440,7 +447,7 @@ updateContextAfterNestedCall(context, instr.args, nestedContext)`} - 0x34 [`RETURN`](#isa-section-return) + 0x35 [`RETURN`](#isa-section-return) Halt execution within this context (without revert), optionally returning some data {`context.contractCallResults.output = M[retOffset:retOffset+retSize] @@ -448,7 +455,7 @@ halt`} - 0x35 [`REVERT`](#isa-section-revert) + 0x36 [`REVERT`](#isa-section-revert) Halt execution within this context as `reverted`, optionally returning some data {`context.contractCallResults.output = M[retOffset:retOffset+retSize] @@ -456,6 +463,11 @@ context.contractCallResults.reverted = true halt`} + + 0x37 [`TORADIXLE`](#isa-section-to_radix_le) + Convert a word to an array of limbs in little-endian radix form + TBD: Storage of limbs and if T[dstOffset] is constrained to U8 + @@ -862,12 +874,28 @@ Get the fee to be paid per "DA gas" - constant for entire transaction [![](/img/protocol-specs/public-vm/bit-formats/FEEPERDAGAS.png)](/img/protocol-specs/public-vm/bit-formats/FEEPERDAGAS.png) +### `TRANSACTIONFEE` +Get the computed transaction fee during teardown phase, zero otherwise + +[See in table.](#isa-table-transactionfee) + +- **Opcode**: 0x14 +- **Category**: Execution Environment +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result +- **Expression**: `M[dstOffset] = context.environment.transactionFee` +- **Tag updates**: `T[dstOffset] = u32` +- **Bit-size**: 56 + + ### `CONTRACTCALLDEPTH` Get how many contract calls deep the current call context is [See in table.](#isa-table-contractcalldepth) -- **Opcode**: 0x14 +- **Opcode**: 0x15 - **Category**: Execution Environment - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -885,7 +913,7 @@ Get this rollup's L1 chain ID [See in table.](#isa-table-chainid) -- **Opcode**: 0x15 +- **Opcode**: 0x16 - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -902,7 +930,7 @@ Get this rollup's L2 version ID [See in table.](#isa-table-version) -- **Opcode**: 0x16 +- **Opcode**: 0x17 - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -919,7 +947,7 @@ Get this L2 block's number [See in table.](#isa-table-blocknumber) -- **Opcode**: 0x17 +- **Opcode**: 0x18 - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -936,7 +964,7 @@ Get this L2 block's timestamp [See in table.](#isa-table-timestamp) -- **Opcode**: 0x18 +- **Opcode**: 0x19 - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -953,7 +981,7 @@ Get the block's beneficiary address [See in table.](#isa-table-coinbase) -- **Opcode**: 0x19 +- **Opcode**: 0x1a - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -970,7 +998,7 @@ Total amount of "L2 gas" that a block can consume [See in table.](#isa-table-blockl2gaslimit) -- **Opcode**: 0x1a +- **Opcode**: 0x1b - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -987,7 +1015,7 @@ Total amount of "DA gas" that a block can consume [See in table.](#isa-table-blockdagaslimit) -- **Opcode**: 0x1b +- **Opcode**: 0x1c - **Category**: Execution Environment - Globals - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1004,7 +1032,7 @@ Copy calldata into memory [See in table.](#isa-table-calldatacopy) -- **Opcode**: 0x1c +- **Opcode**: 0x1d - **Category**: Execution Environment - Calldata - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1024,7 +1052,7 @@ Remaining "L2 gas" for this call (after this instruction) [See in table.](#isa-table-l2gasleft) -- **Opcode**: 0x1d +- **Opcode**: 0x1e - **Category**: Machine State - Gas - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1041,7 +1069,7 @@ Remaining "DA gas" for this call (after this instruction) [See in table.](#isa-table-dagasleft) -- **Opcode**: 0x1e +- **Opcode**: 0x1f - **Category**: Machine State - Gas - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1058,7 +1086,7 @@ Jump to a location in the bytecode [See in table.](#isa-table-jump) -- **Opcode**: 0x1f +- **Opcode**: 0x20 - **Category**: Machine State - Control Flow - **Args**: - **loc**: target location to jump to @@ -1073,7 +1101,7 @@ Conditionally jump to a location in the bytecode [See in table.](#isa-table-jumpi) -- **Opcode**: 0x20 +- **Opcode**: 0x21 - **Category**: Machine State - Control Flow - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1091,7 +1119,7 @@ Make an internal call. Push the current PC to the internal call stack and jump t [See in table.](#isa-table-internalcall) -- **Opcode**: 0x21 +- **Opcode**: 0x22 - **Category**: Machine State - Control Flow - **Args**: - **loc**: target location to jump/call to @@ -1103,14 +1131,13 @@ context.machineState.pc = loc`} - **Details**: Target location is an immediate value (a constant in the bytecode). - **Bit-size**: 48 -[![](/img/protocol-specs/public-vm/bit-formats/INTERNALCALL.png)](/img/protocol-specs/public-vm/bit-formats/INTERNALCALL.png) ### `INTERNALRETURN` Return from an internal call. Pop from the internal call stack and jump to the popped location. [See in table.](#isa-table-internalreturn) -- **Opcode**: 0x22 +- **Opcode**: 0x23 - **Category**: Machine State - Control Flow - **Expression**: `context.machineState.pc = context.machineState.internalCallStack.pop()` - **Bit-size**: 16 @@ -1122,7 +1149,7 @@ Set a memory word from a constant in the bytecode [See in table.](#isa-table-set) -- **Opcode**: 0x23 +- **Opcode**: 0x24 - **Category**: Machine State - Memory - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1142,7 +1169,7 @@ Move a word from source memory location to destination [See in table.](#isa-table-mov) -- **Opcode**: 0x24 +- **Opcode**: 0x25 - **Category**: Machine State - Memory - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1160,7 +1187,7 @@ Move a word (conditionally chosen) from one memory location to another (`d = con [See in table.](#isa-table-cmov) -- **Opcode**: 0x25 +- **Opcode**: 0x26 - **Category**: Machine State - Memory - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1181,7 +1208,7 @@ Load a word from this contract's persistent public storage. Zero is loaded for u [See in table.](#isa-table-sload) -- **Opcode**: 0x26 +- **Opcode**: 0x27 - **Category**: World State - Public Storage - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1226,7 +1253,7 @@ Write a word to this contract's persistent public storage [See in table.](#isa-table-sstore) -- **Opcode**: 0x27 +- **Opcode**: 0x28 - **Category**: World State - Public Storage - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1266,7 +1293,7 @@ Check whether a note hash exists in the note hash tree (as of the start of the c [See in table.](#isa-table-notehashexists) -- **Opcode**: 0x28 +- **Opcode**: 0x29 - **Category**: World State - Notes & Nullifiers - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1304,7 +1331,7 @@ Emit a new note hash to be inserted into the note hash tree [See in table.](#isa-table-emitnotehash) -- **Opcode**: 0x29 +- **Opcode**: 0x2a - **Category**: World State - Notes & Nullifiers - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1336,7 +1363,7 @@ Check whether a nullifier exists in the nullifier tree (including nullifiers fro [See in table.](#isa-table-nullifierexists) -- **Opcode**: 0x2a +- **Opcode**: 0x2b - **Category**: World State - Notes & Nullifiers - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1373,7 +1400,7 @@ Emit a new nullifier to be inserted into the nullifier tree [See in table.](#isa-table-emitnullifier) -- **Opcode**: 0x2b +- **Opcode**: 0x2c - **Category**: World State - Notes & Nullifiers - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1405,7 +1432,7 @@ Check if a message exists in the L1-to-L2 message tree [See in table.](#isa-table-l1tol2msgexists) -- **Opcode**: 0x2c +- **Opcode**: 0x2d - **Category**: World State - Messaging - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1444,7 +1471,7 @@ Check if a header exists in the [archive tree](../state/archive) and retrieve th [See in table.](#isa-table-headermember) -- **Opcode**: 0x2d +- **Opcode**: 0x2e - **Category**: World State - Archive Tree & Headers - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1487,7 +1514,7 @@ Copies contract instance data to memory [See in table.](#isa-table-getcontractinstance) -- **Opcode**: 0x2e +- **Opcode**: 0x2f - **Category**: Other - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1517,7 +1544,7 @@ Emit an unencrypted log [See in table.](#isa-table-emitunencryptedlog) -- **Opcode**: 0x2f +- **Opcode**: 0x30 - **Category**: Accrued Substate - Logging - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1544,7 +1571,7 @@ Send an L2-to-L1 message [See in table.](#isa-table-sendl2tol1msg) -- **Opcode**: 0x30 +- **Opcode**: 0x31 - **Category**: Accrued Substate - Messaging - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1570,7 +1597,7 @@ Call into another contract [See in table.](#isa-table-call) -- **Opcode**: 0x31 +- **Opcode**: 0x32 - **Category**: Control Flow - Contract Calls - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1617,7 +1644,7 @@ Call into another contract, disallowing World State and Accrued Substate modific [See in table.](#isa-table-staticcall) -- **Opcode**: 0x32 +- **Opcode**: 0x33 - **Category**: Control Flow - Contract Calls - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1661,7 +1688,7 @@ Call into another contract, but keep the caller's `sender` and `storageAddress` [See in table.](#isa-table-delegatecall) -- **Opcode**: 0x33 +- **Opcode**: 0x34 - **Category**: Control Flow - Contract Calls - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1705,7 +1732,7 @@ Halt execution within this context (without revert), optionally returning some d [See in table.](#isa-table-return) -- **Opcode**: 0x34 +- **Opcode**: 0x35 - **Category**: Control Flow - Contract Calls - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1727,7 +1754,7 @@ Halt execution within this context as `reverted`, optionally returning some data [See in table.](#isa-table-revert) -- **Opcode**: 0x35 +- **Opcode**: 0x36 - **Category**: Control Flow - Contract Calls - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. @@ -1744,3 +1771,23 @@ halt`} - **Bit-size**: 88 [![](/img/protocol-specs/public-vm/bit-formats/REVERT.png)](/img/protocol-specs/public-vm/bit-formats/REVERT.png) + +### `TORADIXLE` +Convert a word to an array of limbs in little-endian radix form + +[See in table.](#isa-table-to_radix_le) + +- **Opcode**: 0x37 +- **Category**: Conversions +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **srcOffset**: memory offset of word to convert. + - **dstOffset**: memory offset specifying where the first limb of the radix-conversion result is stored. + - **radix**: the maximum bit-size of each limb. + - **numLimbs**: the number of limbs the word will be converted into. +- **Expression**: TBD: Storage of limbs and if T[dstOffset] is constrained to U8 +- **Details**: The limbs will be stored in a contiguous memory block starting at `dstOffset`. +- **Tag checks**: `T[srcOffset] == field` +- **Bit-size**: 152 + diff --git a/docs/src/preprocess/InstructionSet/InstructionSet.js b/docs/src/preprocess/InstructionSet/InstructionSet.js index a94e3b933ab..fac2b3e0239 100644 --- a/docs/src/preprocess/InstructionSet/InstructionSet.js +++ b/docs/src/preprocess/InstructionSet/InstructionSet.js @@ -1537,6 +1537,39 @@ halt "Tag checks": "", "Tag updates": "", }, + { + id: "to_radix_le", + Name: "`TORADIXLE`", + Category: "Conversions", + Flags: [{ name: "indirect", description: INDIRECT_FLAG_DESCRIPTION }], + Args: [ + { + name: "srcOffset", + description: "memory offset of word to convert.", + }, + { + name: "dstOffset", + description: "memory offset specifying where the first limb of the radix-conversion result is stored.", + }, + { + name: "radix", + description: "the maximum bit-size of each limb.", + mode: "immediate", + type: "u32", + }, + { + name: "numLimbs", + description: "the number of limbs the word will be converted into.", + type: "u32", + mode: "immediate", + } + ], + + Expression: `TBD: Storage of limbs and if T[dstOffset] is constrained to U8`, + Summary: "Convert a word to an array of limbs in little-endian radix form", + Details: "The limbs will be stored in a contiguous memory block starting at `dstOffset`.", + "Tag checks": "`T[srcOffset] == field`", + } ]; const INSTRUCTION_SET = INSTRUCTION_SET_RAW.map((instr) => { instr["Bit-size"] = instructionSize(instr); diff --git a/yarn-project/simulator/src/avm/avm_gas.ts b/yarn-project/simulator/src/avm/avm_gas.ts index 8f140ed03e1..b16b171212f 100644 --- a/yarn-project/simulator/src/avm/avm_gas.ts +++ b/yarn-project/simulator/src/avm/avm_gas.ts @@ -123,6 +123,8 @@ export const GasCosts: Record = { [Opcode.POSEIDON2]: TemporaryDefaultGasCost, [Opcode.SHA256]: TemporaryDefaultGasCost, // temp - may be removed, but alot of contracts rely on i: TemporaryDefaultGasCost, [Opcode.PEDERSEN]: TemporaryDefaultGasCost, // temp - may be removed, but alot of contracts rely on i: TemporaryDefaultGasCost,t + // Conversions + [Opcode.TORADIXLE]: TemporaryDefaultGasCost, }; /** Returns the fixed base gas cost for a given opcode, or throws if set to dynamic. */ diff --git a/yarn-project/simulator/src/avm/opcodes/conversion.test.ts b/yarn-project/simulator/src/avm/opcodes/conversion.test.ts new file mode 100644 index 00000000000..d3278b0871f --- /dev/null +++ b/yarn-project/simulator/src/avm/opcodes/conversion.test.ts @@ -0,0 +1,90 @@ +import { type AvmContext } from '../avm_context.js'; +import { Field, type Uint8, Uint32 } from '../avm_memory_types.js'; +import { initContext } from '../fixtures/index.js'; +import { Addressing, AddressingMode } from './addressing_mode.js'; +import { ToRadixLE } from './conversion.js'; + +describe('Conversion Opcodes', () => { + let context: AvmContext; + + beforeEach(async () => { + context = initContext(); + }); + + describe('To Radix LE', () => { + it('Should (de)serialize correctly', () => { + const buf = Buffer.from([ + ToRadixLE.opcode, // opcode + 1, // indirect + ...Buffer.from('12345678', 'hex'), // inputStateOffset + ...Buffer.from('23456789', 'hex'), // outputStateOffset + ...Buffer.from('00000002', 'hex'), // radix + ...Buffer.from('00000100', 'hex'), // numLimbs + ]); + const inst = new ToRadixLE( + /*indirect=*/ 1, + /*srcOffset=*/ 0x12345678, + /*dstOffset=*/ 0x23456789, + /*radix=*/ 2, + /*numLimbs=*/ 256, + ); + + expect(ToRadixLE.deserialize(buf)).toEqual(inst); + expect(inst.serialize()).toEqual(buf); + }); + + it('Should decompose correctly - direct', async () => { + const arg = new Field(0b1011101010100n); + const indirect = 0; + const srcOffset = 0; + const radix = 2; // Bit decomposition + const numLimbs = 10; // only the first 10 bits + const dstOffset = 20; + context.machineState.memory.set(srcOffset, arg); + + await new ToRadixLE(indirect, srcOffset, dstOffset, radix, numLimbs).execute(context); + + const resultBuffer: Buffer = Buffer.concat( + context.machineState.memory.getSliceAs(dstOffset, numLimbs).map(byte => byte.toBuffer()), + ); + // The expected result is the first 10 bits of the input, reversed + const expectedResults = '1011101010100'.split('').reverse().slice(0, numLimbs).map(Number); + for (let i = 0; i < numLimbs; i++) { + expect(resultBuffer.readUInt8(i)).toEqual(expectedResults[i]); + } + }); + + it('Should decompose correctly - indirect', async () => { + const arg = new Field(Buffer.from('1234567890abcdef', 'hex')); + const indirect = new Addressing([ + /*srcOffset=*/ AddressingMode.INDIRECT, + /*dstOffset*/ AddressingMode.INDIRECT, + ]).toWire(); + const srcOffset = 0; + const srcOffsetReal = 10; + const dstOffset = 2; + const dstOffsetReal = 30; + context.machineState.memory.set(srcOffset, new Uint32(srcOffsetReal)); + context.machineState.memory.set(dstOffset, new Uint32(dstOffsetReal)); + context.machineState.memory.set(srcOffsetReal, arg); + + const radix = 1 << 8; // Byte decomposition + const numLimbs = 32; // 256-bit decomposition + await new ToRadixLE(indirect, srcOffset, dstOffset, radix, numLimbs).execute(context); + + const resultBuffer: Buffer = Buffer.concat( + context.machineState.memory.getSliceAs(dstOffsetReal, numLimbs).map(byte => byte.toBuffer()), + ); + // The expected result is the input (padded to 256 bits),and reversed + const expectedResults = '1234567890abcdef' + .padStart(64, '0') + .split('') + .reverse() + .map(a => parseInt(a, 16)); + // Checking the value in each byte of the buffer is correct + for (let i = 0; i < numLimbs; i++) { + expect(resultBuffer.readUInt8(i)).toEqual(expectedResults[2 * i] + expectedResults[2 * i + 1] * 16); + } + }); + }); +}); diff --git a/yarn-project/simulator/src/avm/opcodes/conversion.ts b/yarn-project/simulator/src/avm/opcodes/conversion.ts new file mode 100644 index 00000000000..dc9884d9aab --- /dev/null +++ b/yarn-project/simulator/src/avm/opcodes/conversion.ts @@ -0,0 +1,58 @@ +import { assert } from '../../../../foundation/src/json-rpc/js_utils.js'; +import { type AvmContext } from '../avm_context.js'; +import { TypeTag, Uint8 } from '../avm_memory_types.js'; +import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; +import { Addressing } from './addressing_mode.js'; +import { Instruction } from './instruction.js'; + +export class ToRadixLE extends Instruction { + static type: string = 'TORADIXLE'; + static readonly opcode: Opcode = Opcode.TORADIXLE; + + // Informs (de)serialization. See Instruction.deserialize. + static readonly wireFormat: OperandType[] = [ + OperandType.UINT8, // Opcode + OperandType.UINT8, // Indirect + OperandType.UINT32, // src memory address + OperandType.UINT32, // dst memory address + OperandType.UINT32, // radix (immediate) + OperandType.UINT32, // number of limbs (Immediate) + ]; + + constructor( + private indirect: number, + private srcOffset: number, + private dstOffset: number, + private radix: number, + private numLimbs: number, + ) { + assert(radix <= 256, 'Radix cannot be greater than 256'); + super(); + } + + public async execute(context: AvmContext): Promise { + const memory = context.machineState.memory.track(this.type); + const [srcOffset, dstOffset] = Addressing.fromWire(this.indirect).resolve([this.srcOffset, this.dstOffset], memory); + const memoryOperations = { reads: 1, writes: this.numLimbs, indirect: this.indirect }; + context.machineState.consumeGas(this.gasCost(memoryOperations)); + + // The radix gadget only takes in a Field + memory.checkTag(TypeTag.FIELD, srcOffset); + + let value: bigint = memory.get(srcOffset).toBigInt(); + const radixBN: bigint = BigInt(this.radix); + const limbArray = []; + + for (let i = 0; i < this.numLimbs; i++) { + const limb = value % radixBN; + limbArray.push(limb); + value /= radixBN; + } + + const res = [...limbArray].map(byte => new Uint8(byte)); + memory.setSlice(dstOffset, res); + + memory.assert(memoryOperations); + context.machineState.incrementPc(); + } +} diff --git a/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts b/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts index 569ad1d7eda..dabf361d04c 100644 --- a/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts +++ b/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts @@ -74,6 +74,8 @@ export enum Opcode { POSEIDON2, SHA256, // temp - may be removed, but alot of contracts rely on it PEDERSEN, // temp - may be removed, but alot of contracts rely on it + // Conversion + TORADIXLE, } // Possible types for an instruction's operand in its wire format. (Keep in sync with CPP code. From 9c30759ad9d45bc14f487b602837228392fab44f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Fri, 10 May 2024 15:26:26 +0200 Subject: [PATCH 42/43] refactor: `CompleteAddress` cleanup (#6300) Fixes #5834 --- .../aztec-nr/aztec/src/keys/getters.nr | 24 ++- .../aztec/src/oracle/get_public_key.nr | 22 +-- .../key_registry_contract/src/main.nr | 45 +++--- .../src/main.nr | 3 +- .../src/util.nr | 29 ++-- .../crates/types/src/address/aztec_address.nr | 21 +-- .../types/src/address/public_keys_hash.nr | 56 +++---- .../accounts/src/defaults/account_contract.ts | 5 +- .../src/defaults/account_interface.ts | 5 - .../accounts/src/testing/configuration.ts | 4 +- yarn-project/aztec.js/src/account/contract.ts | 4 +- .../aztec.js/src/account/interface.ts | 3 - .../aztec.js/src/account_manager/index.ts | 2 +- yarn-project/aztec.js/src/utils/account.ts | 2 +- .../aztec.js/src/wallet/account_wallet.ts | 4 - .../aztec.js/src/wallet/base_wallet.ts | 5 - yarn-project/aztec.js/src/wallet/index.ts | 6 +- yarn-project/aztec/src/cli/util.ts | 9 +- .../circuit-types/src/interfaces/pxe.ts | 14 +- .../circuit-types/src/keys/key_store.ts | 27 +--- .../contract_address.test.ts.snap | 2 - .../src/contract/contract_address.test.ts | 23 --- .../src/contract/contract_address.ts | 21 +-- .../src/keys/__snapshots__/index.test.ts.snap | 3 + .../circuits.js/src/keys/index.test.ts | 44 ++++++ yarn-project/circuits.js/src/keys/index.ts | 7 +- .../src/structs/complete_address.test.ts | 25 ++- .../src/structs/complete_address.ts | 146 ++++++++++++------ .../end-to-end/src/benchmarks/utils.ts | 5 +- .../src/e2e_account_contracts.test.ts | 6 +- .../end-to-end/src/e2e_card_game.test.ts | 2 +- .../src/e2e_deploy_contract/deploy_test.ts | 9 +- .../src/e2e_deploy_contract/legacy.test.ts | 2 +- .../end-to-end/src/e2e_key_registry.test.ts | 76 +++------ .../e2e_multiple_accounts_1_enc_key.test.ts | 2 +- .../key-store/src/test_key_store.test.ts | 2 +- yarn-project/key-store/src/test_key_store.ts | 40 ++--- .../pxe/src/database/kv_pxe_database.ts | 2 +- .../src/database/pxe_database_test_suite.ts | 26 +++- .../pxe/src/pxe_service/pxe_service.ts | 54 +++---- .../src/pxe_service/test/pxe_test_suite.ts | 9 +- .../pxe/src/simulator_oracle/index.ts | 12 -- .../pxe/src/synchronizer/synchronizer.test.ts | 9 +- .../pxe/src/synchronizer/synchronizer.ts | 3 +- .../simulator/src/acvm/oracle/oracle.ts | 43 ++---- .../simulator/src/acvm/oracle/typed_oracle.ts | 10 +- .../simulator/src/client/db_oracle.ts | 10 +- .../src/client/private_execution.test.ts | 2 +- .../simulator/src/client/view_data_oracle.ts | 10 -- 49 files changed, 395 insertions(+), 500 deletions(-) create mode 100644 yarn-project/circuits.js/src/keys/__snapshots__/index.test.ts.snap create mode 100644 yarn-project/circuits.js/src/keys/index.test.ts diff --git a/noir-projects/aztec-nr/aztec/src/keys/getters.nr b/noir-projects/aztec-nr/aztec/src/keys/getters.nr index b6fc2759fb7..0e531da1028 100644 --- a/noir-projects/aztec-nr/aztec/src/keys/getters.nr +++ b/noir-projects/aztec-nr/aztec/src/keys/getters.nr @@ -1,4 +1,7 @@ -use dep::protocol_types::{address::AztecAddress, constants::CANONICAL_KEY_REGISTRY_ADDRESS, grumpkin_point::GrumpkinPoint}; +use dep::protocol_types::{ + address::{AztecAddress, PublicKeysHash}, constants::CANONICAL_KEY_REGISTRY_ADDRESS, + grumpkin_point::GrumpkinPoint +}; use crate::{ context::PrivateContext, oracle::keys::get_public_keys_and_partial_address, state_vars::{ @@ -80,20 +83,15 @@ fn fetch_key_from_registry( fn fetch_and_constrain_keys(address: AztecAddress) -> [GrumpkinPoint; 4] { let (public_keys, partial_address) = get_public_keys_and_partial_address(address); - let nullifier_pub_key = public_keys[0]; - let incoming_pub_key = public_keys[1]; - let outgoing_pub_key = public_keys[2]; - let tagging_pub_key = public_keys[3]; + let npk_m = public_keys[0]; + let ivpk_m = public_keys[1]; + let ovpk_m = public_keys[2]; + let tpk_m = public_keys[3]; - let computed_address = AztecAddress::compute_from_public_keys_and_partial_address( - nullifier_pub_key, - incoming_pub_key, - outgoing_pub_key, - tagging_pub_key, - partial_address - ); + let public_keys_hash = PublicKeysHash::compute(npk_m, ivpk_m, ovpk_m, tpk_m); + let computed_address = AztecAddress::compute(public_keys_hash, partial_address); assert(computed_address.eq(address)); - [nullifier_pub_key, incoming_pub_key, outgoing_pub_key, tagging_pub_key] + [npk_m, ivpk_m, ovpk_m, tpk_m] } diff --git a/noir-projects/aztec-nr/aztec/src/oracle/get_public_key.nr b/noir-projects/aztec-nr/aztec/src/oracle/get_public_key.nr index a509e8c1b54..49e97cf5a28 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/get_public_key.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/get_public_key.nr @@ -1,20 +1,8 @@ -use dep::protocol_types::{address::{AztecAddress, PartialAddress, PublicKeysHash}, grumpkin_point::GrumpkinPoint}; - -#[oracle(getPublicKeyAndPartialAddress)] -fn get_public_key_and_partial_address_oracle(_address: AztecAddress) -> [Field; 3] {} - -unconstrained fn get_public_key_and_partial_address_internal(address: AztecAddress) -> [Field; 3] { - get_public_key_and_partial_address_oracle(address) -} +use dep::protocol_types::{address::AztecAddress, grumpkin_point::GrumpkinPoint}; +use crate::oracle::keys::get_public_keys_and_partial_address; +// To be nuked in my next PR: https://github.com/AztecProtocol/aztec-packages/pull/6219 pub fn get_public_key(address: AztecAddress) -> GrumpkinPoint { - let result = get_public_key_and_partial_address_internal(address); - let pub_key = GrumpkinPoint::new(result[0], result[1]); - let partial_address = PartialAddress::from_field(result[2]); - - // TODO(#5830): disabling the following constraint until we update the oracle according to the new key scheme - // let calculated_address = AztecAddress::compute(PublicKeysHash::compute(pub_key), partial_address); - // assert(calculated_address.eq(address)); - - pub_key + let result = get_public_keys_and_partial_address(address); + result.0[1] } diff --git a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr index b985c829d26..ca63a68aba3 100644 --- a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr @@ -3,7 +3,7 @@ contract KeyRegistry { use dep::aztec::{ state_vars::{SharedMutable, Map}, - protocol_types::{grumpkin_point::GrumpkinPoint, address::{AztecAddress, PartialAddress}} + protocol_types::{grumpkin_point::GrumpkinPoint, address::{AztecAddress, PartialAddress, PublicKeysHash}} }; global KEY_ROTATION_DELAY = 5; @@ -27,11 +27,7 @@ contract KeyRegistry { } #[aztec(public)] - fn rotate_nullifier_public_key( - address: AztecAddress, - new_nullifier_public_key: GrumpkinPoint, - nonce: Field - ) { + fn rotate_npk_m(address: AztecAddress, new_npk_m: GrumpkinPoint, nonce: Field) { // TODO: (#6137) if (!address.eq(context.msg_sender())) { assert_current_call_valid_authwit_public(&mut context, address); @@ -41,26 +37,21 @@ contract KeyRegistry { let npk_m_x_registry = storage.npk_m_x_registry.at(address); let npk_m_y_registry = storage.npk_m_y_registry.at(address); - npk_m_x_registry.schedule_value_change(new_nullifier_public_key.x); - npk_m_y_registry.schedule_value_change(new_nullifier_public_key.y); + npk_m_x_registry.schedule_value_change(new_npk_m.x); + npk_m_y_registry.schedule_value_change(new_npk_m.y); } #[aztec(public)] fn register( address: AztecAddress, partial_address: PartialAddress, - nullifier_public_key: GrumpkinPoint, - incoming_public_key: GrumpkinPoint, - outgoing_public_key: GrumpkinPoint, - tagging_public_key: GrumpkinPoint + npk_m: GrumpkinPoint, + ivpk_m: GrumpkinPoint, + ovpk_m: GrumpkinPoint, + tpk_m: GrumpkinPoint ) { - let computed_address = AztecAddress::compute_from_public_keys_and_partial_address( - nullifier_public_key, - incoming_public_key, - outgoing_public_key, - tagging_public_key, - partial_address - ); + let public_keys_hash = PublicKeysHash::compute(npk_m, ivpk_m, ovpk_m, tpk_m); + let computed_address = AztecAddress::compute(public_keys_hash, partial_address); assert(computed_address.eq(address), "Computed address does not match supplied address"); @@ -73,14 +64,14 @@ contract KeyRegistry { // let tpk_m_x_registry = storage.tpk_m_x_registry.at(address); // let tpk_m_y_registry = storage.tpk_m_y_registry.at(address); - npk_m_x_registry.schedule_value_change(nullifier_public_key.x); - npk_m_y_registry.schedule_value_change(nullifier_public_key.y); - ivpk_m_x_registry.schedule_value_change(incoming_public_key.x); - ivpk_m_y_registry.schedule_value_change(incoming_public_key.y); + npk_m_x_registry.schedule_value_change(npk_m.x); + npk_m_y_registry.schedule_value_change(npk_m.y); + ivpk_m_x_registry.schedule_value_change(ivpk_m.x); + ivpk_m_y_registry.schedule_value_change(ivpk_m.y); // Commented out as we hit the max enqueued public calls limit when not done so - // ovpk_m_x_registry.schedule_value_change(outgoing_public_key.x); - // ovpk_m_y_registry.schedule_value_change(outgoing_public_key.y); - // tpk_m_x_registry.schedule_value_change(tagging_public_key.x); - // tpk_m_y_registry.schedule_value_change(tagging_public_key.y); + // ovpk_m_x_registry.schedule_value_change(ovpk_m.x); + // ovpk_m_y_registry.schedule_value_change(ovpk_m.y); + // tpk_m_x_registry.schedule_value_change(tpk_m.x); + // tpk_m_y_registry.schedule_value_change(tpk_m.y); } } diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr index bb6aad4b787..5c75c095d2f 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/main.nr @@ -6,7 +6,8 @@ contract SchnorrSingleKeyAccount { use dep::authwit::{entrypoint::{app::AppPayload, fee::FeePayload}, account::AccountActions}; - use crate::{util::recover_address, auth_oracle::get_auth_witness}; + // use crate::{util::recover_address, auth_oracle::get_auth_witness}; + use crate::auth_oracle::get_auth_witness; global ACCOUNT_ACTIONS_STORAGE_SLOT = 1; diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr index f337d688bbd..89f7e2e9b4d 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr @@ -3,18 +3,19 @@ use dep::aztec::protocol_types::address::PublicKeysHash; use dep::std::{schnorr::verify_signature_slice}; use crate::auth_oracle::AuthWitness; -pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddress { - let message_bytes = message_hash.to_be_bytes(32); - let verification = verify_signature_slice( - witness.owner.x, - witness.owner.y, - witness.signature, - message_bytes - ); - assert(verification == true); +// TODO(#5830): the following is currently broken because we are no longer able to compute public keys hash +// pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddress { +// let message_bytes = message_hash.to_be_bytes(32); +// let verification = verify_signature_slice( +// witness.owner.x, +// witness.owner.y, +// witness.signature, +// message_bytes +// ); +// assert(verification == true); - AztecAddress::compute( - PublicKeysHash::compute(witness.owner), - witness.partial_address - ) -} +// AztecAddress::compute( +// PublicKeysHash::compute(witness.owner), +// witness.partial_address +// ) +// } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr b/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr index 6413bedf15e..6c91a609990 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr @@ -59,25 +59,6 @@ impl AztecAddress { ) } - pub fn compute_from_public_keys_and_partial_address( - nullifier_public_key: GrumpkinPoint, - incoming_public_key: GrumpkinPoint, - outgoing_public_key: GrumpkinPoint, - tagging_public_key: GrumpkinPoint, - partial_address: PartialAddress - ) -> AztecAddress { - let public_keys_hash = PublicKeysHash::compute_new( - nullifier_public_key, - incoming_public_key, - outgoing_public_key, - tagging_public_key - ); - - let computed_address = AztecAddress::compute(public_keys_hash, partial_address); - - computed_address - } - pub fn is_zero(self) -> bool { self.inner == 0 } @@ -93,7 +74,7 @@ impl AztecAddress { } #[test] -fn compute_address_from_partial_and_pubkey() { +fn compute_address_from_partial_and_pub_keys_hash() { let pub_keys_hash = PublicKeysHash::from_field(1); let partial_address = PartialAddress::from_field(2); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr index f91d1383a19..09ad9ba1a15 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr @@ -38,37 +38,18 @@ impl PublicKeysHash { Self { inner: field } } - // TODO(#5830): When we do this refactor, rename compute_new -> compute - pub fn compute(public_key: GrumpkinPoint) -> Self { - PublicKeysHash::from_field( - pedersen_hash( - [ - public_key.x, - public_key.y - ], - GENERATOR_INDEX__PARTIAL_ADDRESS - ) - ) - } - - // TODO(#5830): When we do this refactor, rename compute_new -> compute - pub fn compute_new( - nullifier_public_key: GrumpkinPoint, - incoming_public_key: GrumpkinPoint, - outgoing_public_key: GrumpkinPoint, - tagging_public_key: GrumpkinPoint - ) -> Self { + pub fn compute(npk_m: GrumpkinPoint, ivpk_m: GrumpkinPoint, ovpk_m: GrumpkinPoint, tpk_m: GrumpkinPoint) -> Self { PublicKeysHash::from_field( poseidon2_hash( [ - nullifier_public_key.x, - nullifier_public_key.y, - incoming_public_key.x, - incoming_public_key.y, - outgoing_public_key.x, - outgoing_public_key.y, - tagging_public_key.x, - tagging_public_key.y, + npk_m.x, + npk_m.y, + ivpk_m.x, + ivpk_m.y, + ovpk_m.x, + ovpk_m.y, + tpk_m.x, + tpk_m.y, GENERATOR_INDEX__PUBLIC_KEYS_HASH ] ) @@ -84,11 +65,14 @@ impl PublicKeysHash { } } -// TODO(#5830): re-enable this test once the compute function is updated -// #[test] -// fn compute_public_keys_hash() { -// let point = GrumpkinPoint { x: 1, y: 2 }; -// let actual = PublicKeysHash::compute(point); -// let expected_public_keys_hash = 0x22d83a089d7650514c2de24cd30185a414d943eaa19817c67bffe2c3183006a3; -// assert(actual.to_field() == expected_public_keys_hash); -// } +#[test] +fn compute_public_keys_hash() { + let npk_m = GrumpkinPoint { x: 1, y: 2 }; + let ivpk_m = GrumpkinPoint { x: 3, y: 4 }; + let ovpk_m = GrumpkinPoint { x: 5, y: 6 }; + let tpk_m = GrumpkinPoint { x: 7, y: 8 }; + + let actual = PublicKeysHash::compute(npk_m, ivpk_m, ovpk_m, tpk_m); + let expected_public_keys_hash = 0x1936abe4f6a920d16a9f6917f10a679507687e2cd935dd1f1cdcb1e908c027f3; + assert(actual.to_field() == expected_public_keys_hash); +} diff --git a/yarn-project/accounts/src/defaults/account_contract.ts b/yarn-project/accounts/src/defaults/account_contract.ts index dc3b2330059..f2842c9ac0f 100644 --- a/yarn-project/accounts/src/defaults/account_contract.ts +++ b/yarn-project/accounts/src/defaults/account_contract.ts @@ -1,6 +1,5 @@ import { type AccountContract, type AccountInterface, type AuthWitnessProvider } from '@aztec/aztec.js/account'; import { type CompleteAddress } from '@aztec/circuit-types'; -import { type Fr } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type NodeInfo } from '@aztec/types/interfaces'; @@ -20,7 +19,7 @@ export abstract class DefaultAccountContract implements AccountContract { return this.artifact; } - getInterface(address: CompleteAddress, publicKeysHash: Fr, nodeInfo: NodeInfo): AccountInterface { - return new DefaultAccountInterface(this.getAuthWitnessProvider(address), address, publicKeysHash, nodeInfo); + getInterface(address: CompleteAddress, nodeInfo: NodeInfo): AccountInterface { + return new DefaultAccountInterface(this.getAuthWitnessProvider(address), address, nodeInfo); } } diff --git a/yarn-project/accounts/src/defaults/account_interface.ts b/yarn-project/accounts/src/defaults/account_interface.ts index f32e96aa208..5d7fa311c6e 100644 --- a/yarn-project/accounts/src/defaults/account_interface.ts +++ b/yarn-project/accounts/src/defaults/account_interface.ts @@ -17,7 +17,6 @@ export class DefaultAccountInterface implements AccountInterface { constructor( private authWitnessProvider: AuthWitnessProvider, private address: CompleteAddress, - private publicKeysHash: Fr, nodeInfo: Pick, ) { this.entrypoint = new DefaultAccountEntrypoint( @@ -38,10 +37,6 @@ export class DefaultAccountInterface implements AccountInterface { return this.authWitnessProvider.createAuthWit(messageHash); } - getPublicKeysHash(): Fr { - return this.publicKeysHash; - } - getCompleteAddress(): CompleteAddress { return this.address; } diff --git a/yarn-project/accounts/src/testing/configuration.ts b/yarn-project/accounts/src/testing/configuration.ts index 7fc376ddd70..cc37380d93f 100644 --- a/yarn-project/accounts/src/testing/configuration.ts +++ b/yarn-project/accounts/src/testing/configuration.ts @@ -45,7 +45,9 @@ export async function getDeployedTestAccountsWallets(pxe: PXE): Promise { const initialEncryptionKey = sha512ToGrumpkinScalar([initialSecretKey, GeneratorIndex.IVSK_M]); const publicKey = generatePublicKey(initialEncryptionKey); - return registeredAccounts.find(registered => registered.publicKey.equals(publicKey)) != undefined; + return ( + registeredAccounts.find(registered => registered.masterIncomingViewingPublicKey.equals(publicKey)) != undefined + ); }).map(secretKey => { const signingKey = sha512ToGrumpkinScalar([secretKey, GeneratorIndex.IVSK_M]); // TODO(#5726): use actual salt here instead of hardcoding Fr.ZERO diff --git a/yarn-project/aztec.js/src/account/contract.ts b/yarn-project/aztec.js/src/account/contract.ts index 6ae607d386b..6c49a3b5cf0 100644 --- a/yarn-project/aztec.js/src/account/contract.ts +++ b/yarn-project/aztec.js/src/account/contract.ts @@ -1,5 +1,4 @@ import { type CompleteAddress } from '@aztec/circuit-types'; -import { type Fr } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type NodeInfo } from '@aztec/types/interfaces'; @@ -26,11 +25,10 @@ export interface AccountContract { * The account interface is responsible for assembling tx requests given requested function calls, and * for creating signed auth witnesses given action identifiers (message hashes). * @param address - Address where this account contract is deployed. - * @param publicKeysHash - Hash of the public keys used to authorize actions. * @param nodeInfo - Info on the chain where it is deployed. * @returns An account interface instance for creating tx requests and authorizing actions. */ - getInterface(address: CompleteAddress, publicKeysHash: Fr, nodeInfo: NodeInfo): AccountInterface; + getInterface(address: CompleteAddress, nodeInfo: NodeInfo): AccountInterface; /** * Returns the auth witness provider for the given address. diff --git a/yarn-project/aztec.js/src/account/interface.ts b/yarn-project/aztec.js/src/account/interface.ts index 555fce8cbbc..5a5ab2cf28e 100644 --- a/yarn-project/aztec.js/src/account/interface.ts +++ b/yarn-project/aztec.js/src/account/interface.ts @@ -42,9 +42,6 @@ export interface AccountInterface extends AuthWitnessProvider, EntrypointInterfa /** Returns the complete address for this account. */ getCompleteAddress(): CompleteAddress; - /** Returns the public keys hash for this account. */ - getPublicKeysHash(): Fr; - /** Returns the address for this account. */ getAddress(): AztecAddress; diff --git a/yarn-project/aztec.js/src/account_manager/index.ts b/yarn-project/aztec.js/src/account_manager/index.ts index 549855a4d97..842236286a1 100644 --- a/yarn-project/aztec.js/src/account_manager/index.ts +++ b/yarn-project/aztec.js/src/account_manager/index.ts @@ -51,7 +51,7 @@ export class AccountManager { public async getAccount(): Promise { const nodeInfo = await this.pxe.getNodeInfo(); const completeAddress = this.getCompleteAddress(); - return this.accountContract.getInterface(completeAddress, this.getPublicKeysHash(), nodeInfo); + return this.accountContract.getInterface(completeAddress, nodeInfo); } /** diff --git a/yarn-project/aztec.js/src/utils/account.ts b/yarn-project/aztec.js/src/utils/account.ts index c128d8e227e..b9cc606b9b6 100644 --- a/yarn-project/aztec.js/src/utils/account.ts +++ b/yarn-project/aztec.js/src/utils/account.ts @@ -14,7 +14,7 @@ export async function waitForAccountSynch( address: CompleteAddress, { interval, timeout }: WaitOpts = DefaultWaitOpts, ): Promise { - const publicKey = address.publicKey.toString(); + const publicKey = address.masterIncomingViewingPublicKey.toString(); await retryUntil( async () => { const status = await pxe.getSyncStatus(); diff --git a/yarn-project/aztec.js/src/wallet/account_wallet.ts b/yarn-project/aztec.js/src/wallet/account_wallet.ts index a1f7cea1848..803d07010eb 100644 --- a/yarn-project/aztec.js/src/wallet/account_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/account_wallet.ts @@ -16,10 +16,6 @@ export class AccountWallet extends BaseWallet { super(pxe); } - getPublicKeysHash(): Fr { - return this.account.getPublicKeysHash(); - } - createTxExecutionRequest(exec: ExecutionRequestInit): Promise { return this.account.createTxExecutionRequest(exec); } diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index eeacdb4f23a..200ad930dee 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -32,8 +32,6 @@ export abstract class BaseWallet implements Wallet { abstract getCompleteAddress(): CompleteAddress; - abstract getPublicKeysHash(): Fr; - abstract getChainId(): Fr; abstract getVersion(): Fr; @@ -80,9 +78,6 @@ export abstract class BaseWallet implements Wallet { getRegisteredAccount(address: AztecAddress): Promise { return this.pxe.getRegisteredAccount(address); } - getRegisteredAccountPublicKeysHash(address: AztecAddress): Promise { - return this.pxe.getRegisteredAccountPublicKeysHash(address); - } getRecipients(): Promise { return this.pxe.getRecipients(); } diff --git a/yarn-project/aztec.js/src/wallet/index.ts b/yarn-project/aztec.js/src/wallet/index.ts index ad92b67fdd0..08e8cb27c41 100644 --- a/yarn-project/aztec.js/src/wallet/index.ts +++ b/yarn-project/aztec.js/src/wallet/index.ts @@ -25,11 +25,7 @@ export async function getWallet( if (!completeAddress) { throw new Error(`Account ${address} not found`); } - const publicKeysHash = await pxe.getRegisteredAccountPublicKeysHash(address); - if (!publicKeysHash) { - throw new Error(`Public keys hash for account ${address} not found`); - } const nodeInfo = await pxe.getNodeInfo(); - const entrypoint = accountContract.getInterface(completeAddress, publicKeysHash, nodeInfo); + const entrypoint = accountContract.getInterface(completeAddress, nodeInfo); return new AccountWallet(pxe, entrypoint); } diff --git a/yarn-project/aztec/src/cli/util.ts b/yarn-project/aztec/src/cli/util.ts index 769e3b1aba1..610aa727288 100644 --- a/yarn-project/aztec/src/cli/util.ts +++ b/yarn-project/aztec/src/cli/util.ts @@ -126,7 +126,14 @@ export async function createAccountLogs( accountLogStrings.push(` Address: ${completeAddress.address.toString()}\n`); accountLogStrings.push(` Partial Address: ${completeAddress.partialAddress.toString()}\n`); accountLogStrings.push(` Secret Key: ${account.secretKey.toString()}\n`); - accountLogStrings.push(` Public Key: ${completeAddress.publicKey.toString()}\n\n`); + accountLogStrings.push(` Master nullifier public key: ${completeAddress.masterNullifierPublicKey.toString()}\n`); + accountLogStrings.push( + ` Master incoming viewing public key: ${completeAddress.masterIncomingViewingPublicKey.toString()}\n\n`, + ); + accountLogStrings.push( + ` Master outgoing viewing public key: ${completeAddress.masterOutgoingViewingPublicKey.toString()}\n\n`, + ); + accountLogStrings.push(` Master tagging public key: ${completeAddress.masterTaggingPublicKey.toString()}\n\n`); } } return accountLogStrings; diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 0e95d024727..9e01820e4f7 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -1,4 +1,4 @@ -import { type AztecAddress, type CompleteAddress, type Fr, type PartialAddress, type Point } from '@aztec/circuits.js'; +import { type AztecAddress, type CompleteAddress, type Fr, type PartialAddress } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type ContractClassWithId, type ContractInstanceWithAddress } from '@aztec/types/contracts'; import { type NodeInfo } from '@aztec/types/interfaces'; @@ -73,8 +73,7 @@ export interface PXE { * the recipient's notes. We can send notes to this account because we can encrypt them with the recipient's * public key. */ - // TODO: #5834: Nuke publicKeys optional parameter after `CompleteAddress` refactor. - registerRecipient(recipient: CompleteAddress, publicKeys?: Point[]): Promise; + registerRecipient(recipient: CompleteAddress): Promise; /** * Retrieves the user accounts registered on this PXE Service. @@ -91,15 +90,6 @@ export interface PXE { */ getRegisteredAccount(address: AztecAddress): Promise; - /** - * Retrieves the public keys hash of the account corresponding to the provided aztec address. - * - * @param address - The address of account. - * @returns The public keys hash of the requested account if found. - * TODO(#5834): refactor complete address and merge with getRegisteredAccount? - */ - getRegisteredAccountPublicKeysHash(address: AztecAddress): Promise; - /** * Retrieves the recipients added to this PXE Service. * @returns An array of recipients registered on this PXE Service. diff --git a/yarn-project/circuit-types/src/keys/key_store.ts b/yarn-project/circuit-types/src/keys/key_store.ts index 168ec8d5f04..b4a0d7ce300 100644 --- a/yarn-project/circuit-types/src/keys/key_store.ts +++ b/yarn-project/circuit-types/src/keys/key_store.ts @@ -1,9 +1,9 @@ import { type AztecAddress, + type CompleteAddress, type Fr, type GrumpkinPrivateKey, type PartialAddress, - type Point, type PublicKey, } from '@aztec/circuits.js'; @@ -13,17 +13,17 @@ import { export interface KeyStore { /** * Creates a new account from a randomly generated secret key. - * @returns A promise that resolves to the newly created account's AztecAddress. + * @returns A promise that resolves to the newly created account's CompleteAddress. */ - createAccount(): Promise; + createAccount(): Promise; /** * Adds an account to the key store from the provided secret key. * @param sk - The secret key of the account. * @param partialAddress - The partial address of the account. - * @returns The account's address. + * @returns The account's complete address. */ - addAccount(sk: Fr, partialAddress: PartialAddress): Promise; + addAccount(sk: Fr, partialAddress: PartialAddress): Promise; /** * Retrieves addresses of accounts stored in the key store. @@ -117,21 +117,4 @@ export interface KeyStore { * @returns A Promise that resolves to the public keys hash. */ getPublicKeysHash(account: AztecAddress): Promise; - - /** - * This is used to register a recipient / for storing public keys of an address - * @param accountAddress - The account address to store keys for. - * @param masterNullifierPublicKey - The stored master nullifier public key - * @param masterIncomingViewingPublicKey - The stored incoming viewing public key - * @param masterOutgoingViewingPublicKey - The stored outgoing viewing public key - * @param masterTaggingPublicKey - The stored master tagging public key - */ - // TODO(#5834): Move this function out of here. Key store should only be used for accounts, not recipients - addPublicKeysForAccount( - accountAddress: AztecAddress, - masterNullifierPublicKey: Point, - masterIncomingViewingPublicKey: Point, - masterOutgoingViewingPublicKey: Point, - masterTaggingPublicKey: Point, - ): Promise; } diff --git a/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap b/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap index dc6ef757820..37d75fc64af 100644 --- a/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap +++ b/yarn-project/circuits.js/src/contract/__snapshots__/contract_address.test.ts.snap @@ -1,7 +1,5 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`ContractAddress Address from partial matches Noir 1`] = `"0x1b6ead051e7b42665064ca6cf1ec77da0a36d86e00d1ff6e44077966c0c3a9fa"`; - exports[`ContractAddress Public key hash matches Noir 1`] = `"0x22d83a089d7650514c2de24cd30185a414d943eaa19817c67bffe2c3183006a3"`; exports[`ContractAddress computeContractAddressFromInstance 1`] = `"0x0bed63221d281713007bfb0c063e1f61d0646404fb3701b99bb92f41b6390604"`; diff --git a/yarn-project/circuits.js/src/contract/contract_address.test.ts b/yarn-project/circuits.js/src/contract/contract_address.test.ts index 6199e69a25d..a2c84f657a1 100644 --- a/yarn-project/circuits.js/src/contract/contract_address.test.ts +++ b/yarn-project/circuits.js/src/contract/contract_address.test.ts @@ -5,7 +5,6 @@ import { setupCustomSnapshotSerializers, updateInlineTestData } from '@aztec/fou import { AztecAddress, deriveKeys } from '../index.js'; import { computeContractAddressFromInstance, - computeContractAddressFromPartial, computeInitializationHash, computePartialAddress, computeSaltedInitializationHash, @@ -69,14 +68,6 @@ describe('ContractAddress', () => { }).toString(); expect(address).toMatchSnapshot(); - - // TODO(#5834): the following was removed from aztec_address.nr, should it be re-introduced? - // // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data - // updateInlineTestData( - // 'noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr', - // 'expected_computed_address_from_preimage', - // address.toString(), - // ); }); it('Public key hash matches Noir', () => { @@ -91,18 +82,4 @@ describe('ContractAddress', () => { hash.toString(), ); }); - - it('Address from partial matches Noir', () => { - const publicKeysHash = new Fr(1n); - const partialAddress = new Fr(2n); - const address = computeContractAddressFromPartial({ publicKeysHash, partialAddress }).toString(); - expect(address).toMatchSnapshot(); - - // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data - updateInlineTestData( - 'noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr', - 'expected_computed_address_from_partial_and_pubkey', - address.toString(), - ); - }); }); diff --git a/yarn-project/circuits.js/src/contract/contract_address.ts b/yarn-project/circuits.js/src/contract/contract_address.ts index 11c4dade226..353e3737d90 100644 --- a/yarn-project/circuits.js/src/contract/contract_address.ts +++ b/yarn-project/circuits.js/src/contract/contract_address.ts @@ -1,12 +1,12 @@ import { type FunctionAbi, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; -import { AztecAddress } from '@aztec/foundation/aztec-address'; -import { pedersenHash, poseidon2Hash } from '@aztec/foundation/crypto'; +import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { pedersenHash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { type ContractInstance } from '@aztec/types/contracts'; import { GeneratorIndex } from '../constants.gen.js'; import { computeVarArgsHash } from '../hash/hash.js'; -import { deriveKeys } from '../keys/index.js'; +import { computeAddress } from '../keys/index.js'; // TODO(@spalladino): Review all generator indices in this file @@ -26,7 +26,7 @@ export function computeContractAddressFromInstance( ): AztecAddress { const partialAddress = computePartialAddress(instance); const publicKeysHash = instance.publicKeysHash; - return computeContractAddressFromPartial({ partialAddress, publicKeysHash }); + return computeAddress(publicKeysHash, partialAddress); } /** @@ -56,19 +56,6 @@ export function computeSaltedInitializationHash( return pedersenHash([instance.salt, instance.initializationHash, instance.deployer], GeneratorIndex.PARTIAL_ADDRESS); } -/** - * Computes a contract address from its partial address and public keys hash. - * @param args - The hash of the public keys or the plain public key to be hashed, along with the partial address. - * @returns The contract address. - */ -export function computeContractAddressFromPartial( - args: ({ publicKeysHash: Fr } | { secretKey: Fr }) & { partialAddress: Fr }, -): AztecAddress { - const publicKeysHash = 'secretKey' in args ? deriveKeys(args.secretKey).publicKeysHash : args.publicKeysHash; - const result = poseidon2Hash([publicKeysHash, args.partialAddress, GeneratorIndex.CONTRACT_ADDRESS_V1]); - return AztecAddress.fromField(result); -} - /** * Computes the initialization hash for an instance given its constructor function and arguments. * @param initFn - Constructor function or empty if no initialization is expected. diff --git a/yarn-project/circuits.js/src/keys/__snapshots__/index.test.ts.snap b/yarn-project/circuits.js/src/keys/__snapshots__/index.test.ts.snap new file mode 100644 index 00000000000..1ec1734aed8 --- /dev/null +++ b/yarn-project/circuits.js/src/keys/__snapshots__/index.test.ts.snap @@ -0,0 +1,3 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`🔑 Address from partial matches Noir 1`] = `"0x1b6ead051e7b42665064ca6cf1ec77da0a36d86e00d1ff6e44077966c0c3a9fa"`; diff --git a/yarn-project/circuits.js/src/keys/index.test.ts b/yarn-project/circuits.js/src/keys/index.test.ts new file mode 100644 index 00000000000..13d54bbbab1 --- /dev/null +++ b/yarn-project/circuits.js/src/keys/index.test.ts @@ -0,0 +1,44 @@ +import { Fr, Point } from '@aztec/foundation/fields'; +import { updateInlineTestData } from '@aztec/foundation/testing'; + +import { computeAddress, computePublicKeysHash } from './index.js'; + +describe('🔑', () => { + it('computing public keys hash matches Noir', () => { + const masterNullifierPublicKey = new Point(new Fr(1), new Fr(2)); + const masterIncomingViewingPublicKey = new Point(new Fr(3), new Fr(4)); + const masterOutgoingViewingPublicKey = new Point(new Fr(5), new Fr(6)); + const masterTaggingPublicKey = new Point(new Fr(7), new Fr(8)); + + const expected = Fr.fromString('0x1936abe4f6a920d16a9f6917f10a679507687e2cd935dd1f1cdcb1e908c027f3'); + expect( + computePublicKeysHash( + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + ), + ).toEqual(expected); + + // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + updateInlineTestData( + 'noir-projects/noir-protocol-circuits/crates/types/src/address/public_keys_hash.nr', + 'expected_public_keys_hash', + expected.toString(), + ); + }); + + it('Address from partial matches Noir', () => { + const publicKeysHash = new Fr(1n); + const partialAddress = new Fr(2n); + const address = computeAddress(publicKeysHash, partialAddress).toString(); + expect(address).toMatchSnapshot(); + + // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + updateInlineTestData( + 'noir-projects/noir-protocol-circuits/crates/types/src/address/aztec_address.nr', + 'expected_computed_address_from_partial_and_pubkey', + address.toString(), + ); + }); +}); diff --git a/yarn-project/circuits.js/src/keys/index.ts b/yarn-project/circuits.js/src/keys/index.ts index f8da77fcba5..11fd962e75e 100644 --- a/yarn-project/circuits.js/src/keys/index.ts +++ b/yarn-project/circuits.js/src/keys/index.ts @@ -1,4 +1,4 @@ -import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { poseidon2Hash, sha512ToGrumpkinScalar } from '@aztec/foundation/crypto'; import { type Fr, type GrumpkinScalar } from '@aztec/foundation/fields'; @@ -39,6 +39,11 @@ export function computePublicKeysHash( ]); } +export function computeAddress(publicKeysHash: Fr, partialAddress: Fr) { + const addressFr = poseidon2Hash([publicKeysHash, partialAddress, GeneratorIndex.CONTRACT_ADDRESS_V1]); + return AztecAddress.fromField(addressFr); +} + /** * Computes secret and public keys and public keys hash from a secret key. * @param secretKey - The secret key to derive keys from. diff --git a/yarn-project/circuits.js/src/structs/complete_address.test.ts b/yarn-project/circuits.js/src/structs/complete_address.test.ts index e8ce620e5e4..70c006ed2b2 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.test.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.test.ts @@ -4,16 +4,29 @@ import { Fr, Point } from '@aztec/foundation/fields'; import { CompleteAddress } from './complete_address.js'; describe('CompleteAddress', () => { - // TODO(#5834): re-enable or remove this test - it.skip('refuses to add an account with incorrect address for given partial address and pubkey', () => { - expect(() => CompleteAddress.create(AztecAddress.random(), Point.random(), Fr.random())).toThrow( - /cannot be derived/, - ); + it('refuses to add an account with incorrect address for given partial address and pubkey', () => { + expect(() => + CompleteAddress.create( + AztecAddress.random(), + Point.random(), + Point.random(), + Point.random(), + Point.random(), + Fr.random(), + ), + ).toThrow(/cannot be derived/); }); it('equals returns true when 2 instances are equal', () => { const address1 = CompleteAddress.random(); - const address2 = CompleteAddress.create(address1.address, address1.publicKey, address1.partialAddress); + const address2 = CompleteAddress.create( + address1.address, + address1.masterNullifierPublicKey, + address1.masterIncomingViewingPublicKey, + address1.masterOutgoingViewingPublicKey, + address1.masterTaggingPublicKey, + address1.partialAddress, + ); expect(address1.equals(address2)).toBe(true); }); diff --git a/yarn-project/circuits.js/src/structs/complete_address.ts b/yarn-project/circuits.js/src/structs/complete_address.ts index f4465685ca0..2e57265516a 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.ts @@ -2,8 +2,8 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr, Point } from '@aztec/foundation/fields'; import { BufferReader } from '@aztec/foundation/serialize'; -import { computeContractAddressFromPartial, computePartialAddress } from '../contract/contract_address.js'; -import { deriveKeys } from '../keys/index.js'; +import { computePartialAddress } from '../contract/contract_address.js'; +import { computeAddress, computePublicKeysHash, deriveKeys } from '../keys/index.js'; import { type PartialAddress } from '../types/partial_address.js'; import { type PublicKey } from '../types/public_key.js'; @@ -22,8 +22,14 @@ export class CompleteAddress { public constructor( /** Contract address (typically of an account contract) */ public address: AztecAddress, - /** Public key corresponding to the address (used during note encryption). */ - public publicKey: PublicKey, + /** Master nullifier public key */ + public masterNullifierPublicKey: PublicKey, + /** Master incoming viewing public key */ + public masterIncomingViewingPublicKey: PublicKey, + /** Master outgoing viewing public key */ + public masterOutgoingViewingPublicKey: PublicKey, + /** Master tagging viewing public key */ + public masterTaggingPublicKey: PublicKey, /** Partial key corresponding to the public key to the address. */ public partialAddress: PartialAddress, ) {} @@ -31,32 +37,47 @@ export class CompleteAddress { /** Size in bytes of an instance */ static readonly SIZE_IN_BYTES = 32 * 4; - static create(address: AztecAddress, publicKey: PublicKey, partialAddress: PartialAddress) { - const completeAddress = new CompleteAddress(address, publicKey, partialAddress); - // TODO(#5834): re-enable validation - // completeAddress.validate(); + static create( + address: AztecAddress, + masterNullifierPublicKey: PublicKey, + masterIncomingViewingPublicKey: PublicKey, + masterOutgoingViewingPublicKey: PublicKey, + masterTaggingPublicKey: PublicKey, + partialAddress: PartialAddress, + ): CompleteAddress { + const completeAddress = new CompleteAddress( + address, + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + partialAddress, + ); + completeAddress.validate(); return completeAddress; } - static random() { - // TODO(#5834): the following should be cleaned up - const secretKey = Fr.random(); - const partialAddress = Fr.random(); - const address = computeContractAddressFromPartial({ secretKey, partialAddress }); - const publicKey = deriveKeys(secretKey).masterIncomingViewingPublicKey; - return new CompleteAddress(address, publicKey, partialAddress); - } - - static fromRandomSecretKey() { - const secretKey = Fr.random(); - const partialAddress = Fr.random(); - return { secretKey, completeAddress: CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, partialAddress) }; + static random(): CompleteAddress { + return this.fromSecretKeyAndPartialAddress(Fr.random(), Fr.random()); } static fromSecretKeyAndPartialAddress(secretKey: Fr, partialAddress: Fr): CompleteAddress { - const address = computeContractAddressFromPartial({ secretKey, partialAddress }); - const publicKey = deriveKeys(secretKey).masterIncomingViewingPublicKey; - return new CompleteAddress(address, publicKey, partialAddress); + const { + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + publicKeysHash, + } = deriveKeys(secretKey); + const address = computeAddress(publicKeysHash, partialAddress); + return new CompleteAddress( + address, + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + partialAddress, + ); } static fromSecretKeyAndInstance( @@ -64,29 +85,31 @@ export class CompleteAddress { instance: Parameters[0], ): CompleteAddress { const partialAddress = computePartialAddress(instance); - const address = computeContractAddressFromPartial({ secretKey, partialAddress }); - const publicKey = deriveKeys(secretKey).masterIncomingViewingPublicKey; - return new CompleteAddress(address, publicKey, partialAddress); + return CompleteAddress.fromSecretKeyAndPartialAddress(secretKey, partialAddress); } - // TODO(#5834): re-enable validation - // /** Throws if the address is not correctly derived from the public key and partial address.*/ - // public validate() { - // const expectedAddress = computeContractAddressFromPartial(this); - // const address = this.address; - // if (!expectedAddress.equals(address)) { - // throw new Error( - // `Address cannot be derived from pubkey and partial address (received ${address.toString()}, derived ${expectedAddress.toString()})`, - // ); - // } - // } + /** Throws if the address is not correctly derived from the public key and partial address.*/ + public validate() { + const publicKeysHash = computePublicKeysHash( + this.masterNullifierPublicKey, + this.masterIncomingViewingPublicKey, + this.masterOutgoingViewingPublicKey, + this.masterTaggingPublicKey, + ); + const expectedAddress = computeAddress(publicKeysHash, this.partialAddress); + if (!expectedAddress.equals(this.address)) { + throw new Error( + `Address cannot be derived from public keys and partial address (received ${this.address.toString()}, derived ${expectedAddress.toString()})`, + ); + } + } /** - * Gets a readable string representation of a the complete address. + * Gets a readable string representation of the complete address. * @returns A readable string representation of the complete address. */ public toReadableString(): string { - return ` Address: ${this.address.toString()}\n Public Key: ${this.publicKey.toString()}\n Partial Address: ${this.partialAddress.toString()}\n`; + return `Address: ${this.address.toString()}\nMaster Nullifier Public Key: ${this.masterNullifierPublicKey.toString()}\nMaster Incoming Viewing Public Key: ${this.masterIncomingViewingPublicKey.toString()}\nMaster Outgoing Viewing Public Key: ${this.masterOutgoingViewingPublicKey.toString()}\nMaster Tagging Public Key: ${this.masterTaggingPublicKey.toString()}\nPartial Address: ${this.partialAddress.toString()}\n`; } /** @@ -96,10 +119,13 @@ export class CompleteAddress { * @param other - The CompleteAddress instance to compare against. * @returns True if the buffers of both instances are equal, false otherwise. */ - equals(other: CompleteAddress) { + equals(other: CompleteAddress): boolean { return ( this.address.equals(other.address) && - this.publicKey.equals(other.publicKey) && + this.masterNullifierPublicKey.equals(other.masterNullifierPublicKey) && + this.masterIncomingViewingPublicKey.equals(other.masterIncomingViewingPublicKey) && + this.masterOutgoingViewingPublicKey.equals(other.masterOutgoingViewingPublicKey) && + this.masterTaggingPublicKey.equals(other.masterTaggingPublicKey) && this.partialAddress.equals(other.partialAddress) ); } @@ -110,8 +136,15 @@ export class CompleteAddress { * * @returns A Buffer representation of the CompleteAddress instance. */ - toBuffer() { - return Buffer.concat([this.address.toBuffer(), this.publicKey.toBuffer(), this.partialAddress.toBuffer()]); + toBuffer(): Buffer { + return Buffer.concat([ + this.address.toBuffer(), + this.masterNullifierPublicKey.toBuffer(), + this.masterIncomingViewingPublicKey.toBuffer(), + this.masterOutgoingViewingPublicKey.toBuffer(), + this.masterTaggingPublicKey.toBuffer(), + this.partialAddress.toBuffer(), + ]); } /** @@ -122,12 +155,22 @@ export class CompleteAddress { * @param buffer - The input buffer or BufferReader containing the address data. * @returns - A new CompleteAddress instance with the extracted address data. */ - static fromBuffer(buffer: Buffer | BufferReader) { + static fromBuffer(buffer: Buffer | BufferReader): CompleteAddress { const reader = BufferReader.asReader(buffer); const address = reader.readObject(AztecAddress); - const publicKey = reader.readObject(Point); + const masterNullifierPublicKey = reader.readObject(Point); + const masterIncomingViewingPublicKey = reader.readObject(Point); + const masterOutgoingViewingPublicKey = reader.readObject(Point); + const masterTaggingPublicKey = reader.readObject(Point); const partialAddress = reader.readObject(Fr); - return new this(address, publicKey, partialAddress); + return new CompleteAddress( + address, + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + partialAddress, + ); } /** @@ -151,4 +194,13 @@ export class CompleteAddress { toString(): string { return `0x${this.toBuffer().toString('hex')}`; } + + get publicKeysHash(): Fr { + return computePublicKeysHash( + this.masterNullifierPublicKey, + this.masterIncomingViewingPublicKey, + this.masterOutgoingViewingPublicKey, + this.masterTaggingPublicKey, + ); + } } diff --git a/yarn-project/end-to-end/src/benchmarks/utils.ts b/yarn-project/end-to-end/src/benchmarks/utils.ts index 0dbbe2d6162..1072040b1ce 100644 --- a/yarn-project/end-to-end/src/benchmarks/utils.ts +++ b/yarn-project/end-to-end/src/benchmarks/utils.ts @@ -127,7 +127,8 @@ export async function waitNewPXESynced( */ export async function waitRegisteredAccountSynced(pxe: PXE, secretKey: Fr, partialAddress: PartialAddress) { const l2Block = await pxe.getBlockNumber(); - const { publicKey } = await pxe.registerAccount(secretKey, partialAddress); - const isAccountSynced = async () => (await pxe.getSyncStatus()).notes[publicKey.toString()] === l2Block; + const { masterIncomingViewingPublicKey } = await pxe.registerAccount(secretKey, partialAddress); + const isAccountSynced = async () => + (await pxe.getSyncStatus()).notes[masterIncomingViewingPublicKey.toString()] === l2Block; await retryUntil(isAccountSynced, 'pxe-notes-sync'); } diff --git a/yarn-project/end-to-end/src/e2e_account_contracts.test.ts b/yarn-project/end-to-end/src/e2e_account_contracts.test.ts index e5f8f0743a5..68ac9c89e28 100644 --- a/yarn-project/end-to-end/src/e2e_account_contracts.test.ts +++ b/yarn-project/end-to-end/src/e2e_account_contracts.test.ts @@ -74,11 +74,7 @@ describe('e2e_account_contracts', () => { const walletAt = async (pxe: PXE, accountContract: AccountContract, address: CompleteAddress) => { const nodeInfo = await pxe.getNodeInfo(); - const publicKeysHash = await pxe.getRegisteredAccountPublicKeysHash(address.address); - if (!publicKeysHash) { - throw new Error(`Public keys hash for account ${address.address} not found`); - } - const entrypoint = accountContract.getInterface(address, publicKeysHash, nodeInfo); + const entrypoint = accountContract.getInterface(address, nodeInfo); return new AccountWallet(pxe, entrypoint); }; diff --git a/yarn-project/end-to-end/src/e2e_card_game.test.ts b/yarn-project/end-to-end/src/e2e_card_game.test.ts index f0949b16663..743ff3a38b3 100644 --- a/yarn-project/end-to-end/src/e2e_card_game.test.ts +++ b/yarn-project/end-to-end/src/e2e_card_game.test.ts @@ -105,7 +105,7 @@ describe('e2e_card_game', () => { const publicKey = deriveKeys(key).masterIncomingViewingPublicKey; return ( preRegisteredAccounts.find(preRegisteredAccount => { - return preRegisteredAccount.publicKey.equals(publicKey); + return preRegisteredAccount.masterIncomingViewingPublicKey.equals(publicKey); }) == undefined ); }); diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts index 05b31422828..ffafbb038e7 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts @@ -3,11 +3,10 @@ import { type AccountWallet, type AztecAddress, type AztecNode, - CompleteAddress, type ContractArtifact, type ContractBase, type DebugLogger, - type Fr, + Fr, type PXE, type Wallet, createDebugLogger, @@ -81,10 +80,8 @@ export class DeployTest { } async registerRandomAccount(): Promise { - const pxe = this.pxe; - const { completeAddress: owner, secretKey } = CompleteAddress.fromRandomSecretKey(); - await pxe.registerAccount(secretKey, owner.partialAddress); - return owner.address; + const completeAddress = await this.pxe.registerAccount(Fr.random(), Fr.random()); + return completeAddress.address; } } diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/legacy.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/legacy.test.ts index 403da38154c..8d50e4e91a1 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/legacy.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/legacy.test.ts @@ -33,7 +33,7 @@ describe('e2e_deploy_contract legacy', () => { */ it('should deploy a test contract', async () => { const salt = Fr.random(); - const publicKeysHash = wallet.getPublicKeysHash(); + const publicKeysHash = wallet.getCompleteAddress().publicKeysHash; const deploymentData = getContractInstanceFromDeployParams(TestContractArtifact, { salt, publicKeysHash, diff --git a/yarn-project/end-to-end/src/e2e_key_registry.test.ts b/yarn-project/end-to-end/src/e2e_key_registry.test.ts index c770ceaf9dd..88d65a5037a 100644 --- a/yarn-project/end-to-end/src/e2e_key_registry.test.ts +++ b/yarn-project/end-to-end/src/e2e_key_registry.test.ts @@ -1,6 +1,5 @@ import { type AccountWallet, AztecAddress, Fr, type PXE } from '@aztec/aztec.js'; -import { CompleteAddress, GeneratorIndex, type PartialAddress, Point, deriveKeys } from '@aztec/circuits.js'; -import { poseidon2Hash } from '@aztec/foundation/crypto'; +import { CompleteAddress, Point } from '@aztec/circuits.js'; import { KeyRegistryContract, TestContract } from '@aztec/noir-contracts.js'; import { getCanonicalKeyRegistryAddress } from '@aztec/protocol-contracts/key-registry'; @@ -23,16 +22,7 @@ describe('Key Registry', () => { let teardown: () => Promise; - // TODO(#5834): use AztecAddress.compute or smt - const { - masterNullifierPublicKey, - masterIncomingViewingPublicKey, - masterOutgoingViewingPublicKey, - masterTaggingPublicKey, - publicKeysHash, - } = deriveKeys(Fr.random()); - const partialAddress: PartialAddress = Fr.random(); - let account: AztecAddress; + const account = CompleteAddress.random(); beforeAll(async () => { ({ teardown, pxe, wallets } = await setup(3)); @@ -41,11 +31,6 @@ describe('Key Registry', () => { testContract = await TestContract.deploy(wallets[0]).send().deployed(); await publicDeployAccounts(wallets[0], wallets.slice(0, 2)); - - // TODO(#5834): use AztecAddress.compute or smt - account = AztecAddress.fromField( - poseidon2Hash([publicKeysHash, partialAddress, GeneratorIndex.CONTRACT_ADDRESS_V1]), - ); }); const crossDelay = async () => { @@ -60,10 +45,10 @@ describe('Key Registry', () => { describe('failure cases', () => { it('throws when address preimage check fails', async () => { const keys = [ - masterNullifierPublicKey, - masterIncomingViewingPublicKey, - masterOutgoingViewingPublicKey, - masterTaggingPublicKey, + account.masterNullifierPublicKey, + account.masterIncomingViewingPublicKey, + account.masterOutgoingViewingPublicKey, + account.masterTaggingPublicKey, ]; // We randomly invalidate some of the keys @@ -72,7 +57,7 @@ describe('Key Registry', () => { await expect( keyRegistry .withWallet(wallets[0]) - .methods.register(AztecAddress.fromField(account), partialAddress, keys[0], keys[1], keys[2], keys[3]) + .methods.register(account, account.partialAddress, keys[0], keys[1], keys[2], keys[3]) .send() .wait(), ).rejects.toThrow('Computed address does not match supplied address'); @@ -82,7 +67,7 @@ describe('Key Registry', () => { await expect( keyRegistry .withWallet(wallets[0]) - .methods.rotate_nullifier_public_key(wallets[1].getAddress(), Point.random(), Fr.ZERO) + .methods.rotate_npk_m(wallets[1].getAddress(), Point.random(), Fr.ZERO) .send() .wait(), ).rejects.toThrow('Assertion failed: Message not authorized by account'); @@ -96,33 +81,20 @@ describe('Key Registry', () => { await expect( testContract.methods.test_nullifier_key_freshness(randomAddress, randomMasterNullifierPublicKey).send().wait(), - ).rejects.toThrow(`Cannot satisfy constraint 'computed_address.eq(address)'`); + ).rejects.toThrow(/No public key registered for address/); }); }); it('fresh key lib succeeds for non-registered account available in PXE', async () => { - // TODO(#5834): Make this not disgusting - const newAccountKeys = deriveKeys(Fr.random()); - const newAccountPartialAddress = Fr.random(); - const newAccount = AztecAddress.fromField( - poseidon2Hash([newAccountKeys.publicKeysHash, newAccountPartialAddress, GeneratorIndex.CONTRACT_ADDRESS_V1]), - ); - const newAccountCompleteAddress = CompleteAddress.create( - newAccount, - newAccountKeys.masterIncomingViewingPublicKey, - newAccountPartialAddress, - ); - - await pxe.registerRecipient(newAccountCompleteAddress, [ - newAccountKeys.masterNullifierPublicKey, - newAccountKeys.masterIncomingViewingPublicKey, - newAccountKeys.masterOutgoingViewingPublicKey, - newAccountKeys.masterTaggingPublicKey, - ]); + const newAccountCompleteAddress = CompleteAddress.random(); + await pxe.registerRecipient(newAccountCompleteAddress); // Should succeed as the account is now registered as a recipient in PXE await testContract.methods - .test_nullifier_key_freshness(newAccount, newAccountKeys.masterNullifierPublicKey) + .test_nullifier_key_freshness( + newAccountCompleteAddress.address, + newAccountCompleteAddress.masterNullifierPublicKey, + ) .send() .wait(); }); @@ -133,11 +105,11 @@ describe('Key Registry', () => { .withWallet(wallets[0]) .methods.register( account, - partialAddress, - masterNullifierPublicKey, - masterIncomingViewingPublicKey, - masterOutgoingViewingPublicKey, - masterTaggingPublicKey, + account.partialAddress, + account.masterNullifierPublicKey, + account.masterIncomingViewingPublicKey, + account.masterOutgoingViewingPublicKey, + account.masterTaggingPublicKey, ) .send() .wait(); @@ -157,13 +129,13 @@ describe('Key Registry', () => { .test_shared_mutable_private_getter_for_registry_contract(1, account) .simulate(); - expect(new Fr(nullifierPublicKeyX)).toEqual(masterNullifierPublicKey.x); + expect(new Fr(nullifierPublicKeyX)).toEqual(account.masterNullifierPublicKey.x); }); // Note: This test case is dependent on state from the previous one it('key lib succeeds for registered account', async () => { // Should succeed as the account is registered in key registry from tests before - await testContract.methods.test_nullifier_key_freshness(account, masterNullifierPublicKey).send().wait(); + await testContract.methods.test_nullifier_key_freshness(account, account.masterNullifierPublicKey).send().wait(); }); }); @@ -174,7 +146,7 @@ describe('Key Registry', () => { it('rotates npk_m', async () => { await keyRegistry .withWallet(wallets[0]) - .methods.rotate_nullifier_public_key(wallets[0].getAddress(), firstNewMasterNullifierPublicKey, Fr.ZERO) + .methods.rotate_npk_m(wallets[0].getAddress(), firstNewMasterNullifierPublicKey, Fr.ZERO) .send() .wait(); @@ -199,7 +171,7 @@ describe('Key Registry', () => { it(`rotates npk_m with authwit`, async () => { const action = keyRegistry .withWallet(wallets[1]) - .methods.rotate_nullifier_public_key(wallets[0].getAddress(), secondNewMasterNullifierPublicKey, Fr.ZERO); + .methods.rotate_npk_m(wallets[0].getAddress(), secondNewMasterNullifierPublicKey, Fr.ZERO); await wallets[0] .setPublicAuthWit({ caller: wallets[1].getCompleteAddress().address, action }, true) diff --git a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts index a8b0ed53439..6aaae7545f7 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts @@ -50,7 +50,7 @@ describe('e2e_multiple_accounts_1_enc_key', () => { const encryptionPublicKey = deriveKeys(encryptionPrivateKey).masterIncomingViewingPublicKey; for (const account of accounts) { - expect(account.publicKey).toEqual(encryptionPublicKey); + expect(account.masterIncomingViewingPublicKey).toEqual(encryptionPublicKey); } logger.info(`Deploying Token...`); diff --git a/yarn-project/key-store/src/test_key_store.test.ts b/yarn-project/key-store/src/test_key_store.test.ts index 61647e0097c..2395dbf1472 100644 --- a/yarn-project/key-store/src/test_key_store.test.ts +++ b/yarn-project/key-store/src/test_key_store.test.ts @@ -11,7 +11,7 @@ describe('TestKeyStore', () => { const sk = new Fr(8923n); const partialAddress = new Fr(243523n); - const accountAddress = await keyStore.addAccount(sk, partialAddress); + const { address: accountAddress } = await keyStore.addAccount(sk, partialAddress); expect(accountAddress.toString()).toMatchInlineSnapshot( `"0x1a8a9a1d91cbb353d8df4f1bbfd0283f7fc63766f671edd9443a1270a7b2a954"`, ); diff --git a/yarn-project/key-store/src/test_key_store.ts b/yarn-project/key-store/src/test_key_store.ts index a3c0d4b239b..a21763ebf83 100644 --- a/yarn-project/key-store/src/test_key_store.ts +++ b/yarn-project/key-store/src/test_key_store.ts @@ -1,12 +1,14 @@ import { type KeyStore, type PublicKey } from '@aztec/circuit-types'; import { AztecAddress, + CompleteAddress, Fr, GeneratorIndex, type GrumpkinPrivateKey, GrumpkinScalar, type PartialAddress, Point, + computeAddress, computeAppNullifierSecretKey, deriveKeys, } from '@aztec/circuits.js'; @@ -26,9 +28,9 @@ export class TestKeyStore implements KeyStore { /** * Creates a new account from a randomly generated secret key. - * @returns A promise that resolves to the newly created account's AztecAddress. + * @returns A promise that resolves to the newly created account's CompleteAddress. */ - public createAccount(): Promise { + public createAccount(): Promise { const sk = Fr.random(); const partialAddress = Fr.random(); return this.addAccount(sk, partialAddress); @@ -38,9 +40,9 @@ export class TestKeyStore implements KeyStore { * Adds an account to the key store from the provided secret key. * @param sk - The secret key of the account. * @param partialAddress - The partial address of the account. - * @returns The account's address. + * @returns The account's complete address. */ - public async addAccount(sk: Fr, partialAddress: PartialAddress): Promise { + public async addAccount(sk: Fr, partialAddress: PartialAddress): Promise { const { publicKeysHash, masterNullifierSecretKey, @@ -53,10 +55,7 @@ export class TestKeyStore implements KeyStore { masterTaggingPublicKey, } = deriveKeys(sk); - // We hash the partial address and the public keys hash to get the account address - // TODO(#5726): Move the following line to AztecAddress class? - const accountAddressFr = poseidon2Hash([publicKeysHash, partialAddress, GeneratorIndex.CONTRACT_ADDRESS_V1]); - const accountAddress = AztecAddress.fromField(accountAddressFr); + const accountAddress = computeAddress(publicKeysHash, partialAddress); // We save the keys to db await this.#keys.set(`${accountAddress.toString()}-public_keys_hash`, publicKeysHash.toBuffer()); @@ -72,7 +71,16 @@ export class TestKeyStore implements KeyStore { await this.#keys.set(`${accountAddress.toString()}-tpk_m`, masterTaggingPublicKey.toBuffer()); // At last, we return the newly derived account address - return Promise.resolve(accountAddress); + return Promise.resolve( + CompleteAddress.create( + accountAddress, + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + partialAddress, + ), + ); } /** @@ -292,18 +300,4 @@ export class TestKeyStore implements KeyStore { } return Promise.resolve(Fr.fromBuffer(publicKeysHashBuffer)); } - - // TODO(#5834): Re-add separation between recipients and accounts in keystore. - public async addPublicKeysForAccount( - accountAddress: AztecAddress, - masterNullifierPublicKey: Point, - masterIncomingViewingPublicKey: Point, - masterOutgoingViewingPublicKey: Point, - masterTaggingPublicKey: Point, - ): Promise { - await this.#keys.set(`${accountAddress.toString()}-npk_m`, masterNullifierPublicKey.toBuffer()); - await this.#keys.set(`${accountAddress.toString()}-ivpk_m`, masterIncomingViewingPublicKey.toBuffer()); - await this.#keys.set(`${accountAddress.toString()}-ovpk_m`, masterOutgoingViewingPublicKey.toBuffer()); - await this.#keys.set(`${accountAddress.toString()}-tpk_m`, masterTaggingPublicKey.toBuffer()); - } } diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index c07a29219de..7452f98f5b9 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -209,7 +209,7 @@ export class KVPxeDatabase implements PxeDatabase { #getNotes(filter: NoteFilter): NoteDao[] { const publicKey: PublicKey | undefined = filter.owner - ? this.#getCompleteAddress(filter.owner)?.publicKey + ? this.#getCompleteAddress(filter.owner)?.masterIncomingViewingPublicKey : undefined; filter.status = filter.status ?? NoteStatus.ACTIVE; diff --git a/yarn-project/pxe/src/database/pxe_database_test_suite.ts b/yarn-project/pxe/src/database/pxe_database_test_suite.ts index 9a1fbbe46f8..440df3db400 100644 --- a/yarn-project/pxe/src/database/pxe_database_test_suite.ts +++ b/yarn-project/pxe/src/database/pxe_database_test_suite.ts @@ -92,7 +92,10 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { [() => ({ txHash: notes[0].txHash }), () => [notes[0]]], [() => ({ txHash: randomTxHash() }), () => []], - [() => ({ owner: owners[0].address }), () => notes.filter(note => note.publicKey.equals(owners[0].publicKey))], + [ + () => ({ owner: owners[0].address }), + () => notes.filter(note => note.publicKey.equals(owners[0].masterIncomingViewingPublicKey)), + ], [ () => ({ contractAddress: contractAddresses[0], storageSlot: storageSlots[0] }), @@ -113,7 +116,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { randomNoteDao({ contractAddress: contractAddresses[i % contractAddresses.length], storageSlot: storageSlots[i % storageSlots.length], - publicKey: owners[i % owners.length].publicKey, + publicKey: owners[i % owners.length].masterIncomingViewingPublicKey, index: BigInt(i), }), ); @@ -142,9 +145,11 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { // Nullify all notes and use the same filter as other test cases for (const owner of owners) { - const notesToNullify = notes.filter(note => note.publicKey.equals(owner.publicKey)); + const notesToNullify = notes.filter(note => note.publicKey.equals(owner.masterIncomingViewingPublicKey)); const nullifiers = notesToNullify.map(note => note.siloedNullifier); - await expect(database.removeNullifiedNotes(nullifiers, owner.publicKey)).resolves.toEqual(notesToNullify); + await expect( + database.removeNullifiedNotes(nullifiers, owner.masterIncomingViewingPublicKey), + ).resolves.toEqual(notesToNullify); } await expect(database.getNotes({ ...getFilter(), status: NoteStatus.ACTIVE_OR_NULLIFIED })).resolves.toEqual( @@ -155,7 +160,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it('skips nullified notes by default or when requesting active', async () => { await database.addNotes(notes); - const notesToNullify = notes.filter(note => note.publicKey.equals(owners[0].publicKey)); + const notesToNullify = notes.filter(note => note.publicKey.equals(owners[0].masterIncomingViewingPublicKey)); const nullifiers = notesToNullify.map(note => note.siloedNullifier); await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].publicKey)).resolves.toEqual( notesToNullify, @@ -171,7 +176,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it('returns active and nullified notes when requesting either', async () => { await database.addNotes(notes); - const notesToNullify = notes.filter(note => note.publicKey.equals(owners[0].publicKey)); + const notesToNullify = notes.filter(note => note.publicKey.equals(owners[0].masterIncomingViewingPublicKey)); const nullifiers = notesToNullify.map(note => note.siloedNullifier); await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].publicKey)).resolves.toEqual( notesToNullify, @@ -215,7 +220,14 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it.skip('refuses to overwrite an address with a different public key', async () => { const address = CompleteAddress.random(); - const otherAddress = new CompleteAddress(address.address, Point.random(), address.partialAddress); + const otherAddress = new CompleteAddress( + address.address, + Point.random(), + Point.random(), + Point.random(), + Point.random(), + address.partialAddress, + ); await database.addCompleteAddress(address); await expect(database.addCompleteAddress(otherAddress)).rejects.toThrow(); diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 3ed4fa30cf4..c918ffaafa9 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -25,7 +25,7 @@ import { type TxPXEProcessingStats } from '@aztec/circuit-types/stats'; import { AztecAddress, CallRequest, - CompleteAddress, + type CompleteAddress, FunctionData, MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, type PartialAddress, @@ -37,7 +37,7 @@ import { import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type ContractArtifact, type DecodedReturn, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { arrayNonEmptyLength, padArrayEnd } from '@aztec/foundation/collection'; -import { Fr, type Point } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; @@ -115,12 +115,12 @@ export class PXEService implements PXE { let count = 0; for (const address of registeredAddresses) { - if (!publicKeysSet.has(address.publicKey.toString())) { + if (!publicKeysSet.has(address.masterIncomingViewingPublicKey.toString())) { continue; } count++; - this.synchronizer.addAccount(address.publicKey, this.keyStore, this.config.l2StartingBlock); + this.synchronizer.addAccount(address.masterIncomingViewingPublicKey, this.keyStore, this.config.l2StartingBlock); } if (count > 0) { @@ -170,24 +170,21 @@ export class PXEService implements PXE { public async registerAccount(secretKey: Fr, partialAddress: PartialAddress): Promise { const accounts = await this.keyStore.getAccounts(); - const account = await this.keyStore.addAccount(secretKey, partialAddress); - const completeAddress = new CompleteAddress( - account, - await this.keyStore.getMasterIncomingViewingPublicKey(account), - partialAddress, - ); - if (accounts.includes(account)) { - this.log.info(`Account:\n "${completeAddress.address.toString()}"\n already registered.`); - return completeAddress; + const accountCompleteAddress = await this.keyStore.addAccount(secretKey, partialAddress); + if (accounts.includes(accountCompleteAddress.address)) { + this.log.info(`Account:\n "${accountCompleteAddress.address.toString()}"\n already registered.`); + return accountCompleteAddress; } else { - const masterIncomingViewingPublicKey = await this.keyStore.getMasterIncomingViewingPublicKey(account); + const masterIncomingViewingPublicKey = await this.keyStore.getMasterIncomingViewingPublicKey( + accountCompleteAddress.address, + ); this.synchronizer.addAccount(masterIncomingViewingPublicKey, this.keyStore, this.config.l2StartingBlock); - this.log.info(`Registered account ${completeAddress.address.toString()}`); - this.log.debug(`Registered account\n ${completeAddress.toReadableString()}`); + this.log.info(`Registered account ${accountCompleteAddress.address.toString()}`); + this.log.debug(`Registered account\n ${accountCompleteAddress.toReadableString()}`); } - await this.db.addCompleteAddress(completeAddress); - return completeAddress; + await this.db.addCompleteAddress(accountCompleteAddress); + return accountCompleteAddress; } public async getRegisteredAccounts(): Promise { @@ -214,20 +211,9 @@ export class PXEService implements PXE { return this.keyStore.getPublicKeysHash(address); } - public async registerRecipient(recipient: CompleteAddress, publicKeys: Point[] = []): Promise { + public async registerRecipient(recipient: CompleteAddress): Promise { const wasAdded = await this.db.addCompleteAddress(recipient); - // TODO #5834: This should be refactored to be okay with only adding complete address - if (publicKeys.length !== 0) { - await this.keyStore.addPublicKeysForAccount( - recipient.address, - publicKeys[0], - publicKeys[1], - publicKeys[2], - publicKeys[3], - ); - } - if (wasAdded) { this.log.info(`Added recipient:\n ${recipient.toReadableString()}`); } else { @@ -306,7 +292,7 @@ export class PXEService implements PXE { let owner = filter.owner; if (owner === undefined) { const completeAddresses = (await this.db.getCompleteAddresses()).find(address => - address.publicKey.equals(dao.publicKey), + address.masterIncomingViewingPublicKey.equals(dao.publicKey), ); if (completeAddresses === undefined) { throw new Error(`Cannot find complete address for public key ${dao.publicKey.toString()}`); @@ -319,8 +305,8 @@ export class PXEService implements PXE { } public async addNote(note: ExtendedNote) { - const { publicKey } = (await this.db.getCompleteAddress(note.owner)) ?? {}; - if (!publicKey) { + const { masterIncomingViewingPublicKey } = (await this.db.getCompleteAddress(note.owner)) ?? {}; + if (!masterIncomingViewingPublicKey) { throw new Error('Unknown account.'); } @@ -360,7 +346,7 @@ export class PXEService implements PXE { innerNoteHash, siloedNullifier, index, - publicKey, + masterIncomingViewingPublicKey, ), ); } diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts index 75d2f11b142..cf9e7d6c4c4 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts @@ -70,7 +70,14 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => it('cannot register a recipient with the same aztec address but different pub key or partial address', async () => { const recipient1 = CompleteAddress.random(); - const recipient2 = new CompleteAddress(recipient1.address, Point.random(), Fr.random()); + const recipient2 = new CompleteAddress( + recipient1.address, + Point.random(), + Point.random(), + Point.random(), + Point.random(), + Fr.random(), + ); await pxe.registerRecipient(recipient1); await expect(() => pxe.registerRecipient(recipient2)).rejects.toThrow( diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 12e540148b7..ac9ee3566ef 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -15,7 +15,6 @@ import { type FunctionSelector, type Header, type L1_TO_L2_MSG_TREE_HEIGHT, - type Point, } from '@aztec/circuits.js'; import { computeL1ToL2MessageNullifier } from '@aztec/circuits.js/hash'; import { type FunctionArtifact, getFunctionArtifact } from '@aztec/foundation/abi'; @@ -44,7 +43,6 @@ export class SimulatorOracle implements DBOracle { return { masterNullifierPublicKey, appNullifierSecretKey }; } - // TODO: #5834 async getCompleteAddress(address: AztecAddress): Promise { const completeAddress = await this.db.getCompleteAddress(address); if (!completeAddress) { @@ -79,16 +77,6 @@ export class SimulatorOracle implements DBOracle { return capsule; } - // TODO: #5834 - async getPublicKeysForAddress(address: AztecAddress): Promise { - const nullifierPublicKey = await this.keyStore.getMasterNullifierPublicKey(address); - const incomingViewingPublicKey = await this.keyStore.getMasterIncomingViewingPublicKey(address); - const outgoingViewingPublicKey = await this.keyStore.getMasterOutgoingViewingPublicKey(address); - const taggingPublicKey = await this.keyStore.getMasterTaggingPublicKey(address); - - return [nullifierPublicKey, incomingViewingPublicKey, outgoingViewingPublicKey, taggingPublicKey]; - } - async getNotes(contractAddress: AztecAddress, storageSlot: Fr, status: NoteStatus) { const noteDaos = await this.db.getNotes({ contractAddress, diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index f8deb8b8ca3..1c145eb3302 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -1,5 +1,5 @@ import { type AztecNode, L2Block } from '@aztec/circuit-types'; -import { CompleteAddress, Fr, type Header, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; +import { Fr, type Header, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; import { makeHeader } from '@aztec/circuits.js/testing'; import { randomInt } from '@aztec/foundation/crypto'; import { SerialQueue } from '@aztec/foundation/fifo'; @@ -130,12 +130,9 @@ describe('Synchronizer', () => { const addAddress = async (startingBlockNum: number) => { const secretKey = Fr.random(); const partialAddress = Fr.random(); - const accountAddress = await keyStore.addAccount(secretKey, partialAddress); - const masterIncomingViewingPublicKey = await keyStore.getMasterIncomingViewingPublicKey(accountAddress); - - const completeAddress = new CompleteAddress(accountAddress, masterIncomingViewingPublicKey, partialAddress); + const completeAddress = await keyStore.addAccount(secretKey, partialAddress); await database.addCompleteAddress(completeAddress); - synchronizer.addAccount(completeAddress.publicKey, keyStore, startingBlockNum); + synchronizer.addAccount(completeAddress.masterIncomingViewingPublicKey, keyStore, startingBlockNum); return completeAddress; }; diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index dc7f1890877..d7da26c991e 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -285,7 +285,8 @@ export class Synchronizer { if (!completeAddress) { throw new Error(`Checking if account is synched is not possible for ${account} because it is not registered.`); } - const findByPublicKey = (x: NoteProcessor) => x.masterIncomingViewingPublicKey.equals(completeAddress.publicKey); + const findByPublicKey = (x: NoteProcessor) => + x.masterIncomingViewingPublicKey.equals(completeAddress.masterIncomingViewingPublicKey); const processor = this.noteProcessors.find(findByPublicKey) ?? this.noteProcessorsToCatchUp.find(findByPublicKey); if (!processor) { throw new Error( diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index 415f8c3e84e..7df1704f427 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -1,5 +1,5 @@ import { MerkleTreeId, UnencryptedL2Log } from '@aztec/circuit-types'; -import { type PartialAddress, acvmFieldMessageToString, oracleDebugCallToFormattedStr } from '@aztec/circuits.js'; +import { acvmFieldMessageToString, oracleDebugCallToFormattedStr } from '@aztec/circuits.js'; import { EventSelector, FunctionSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr, Point } from '@aztec/foundation/fields'; @@ -53,14 +53,6 @@ export class Oracle { ]; } - // TODO: #5834 Nuke this - async getPublicKeyAndPartialAddress([address]: ACVMField[]) { - const { publicKey, partialAddress } = await this.typedOracle.getCompleteAddress( - AztecAddress.fromField(fromACVMField(address)), - ); - return [publicKey.x, publicKey.y, partialAddress].map(toACVMField); - } - async getContractInstance([address]: ACVMField[]) { const instance = await this.typedOracle.getContractInstance(AztecAddress.fromField(fromACVMField(address))); @@ -173,25 +165,22 @@ export class Oracle { } async getPublicKeysAndPartialAddress([address]: ACVMField[]): Promise { - let publicKeys: Point[] | undefined; - let partialAddress: PartialAddress; - - // TODO #5834: This should be reworked to return the public keys as well - try { - ({ partialAddress } = await this.typedOracle.getCompleteAddress(AztecAddress.fromField(fromACVMField(address)))); - } catch (err) { - partialAddress = Fr.ZERO; - } + const parsedAddress = AztecAddress.fromField(fromACVMField(address)); + const { + masterNullifierPublicKey, + masterIncomingViewingPublicKey, + masterOutgoingViewingPublicKey, + masterTaggingPublicKey, + partialAddress, + } = await this.typedOracle.getCompleteAddress(parsedAddress); - try { - publicKeys = await this.typedOracle.getPublicKeysForAddress(AztecAddress.fromField(fromACVMField(address))); - } catch (err) { - publicKeys = Array(4).fill(Point.ZERO); - } - - const acvmPublicKeys = publicKeys.flatMap(key => key.toFields()); - - return [...acvmPublicKeys, partialAddress].map(toACVMField); + return [ + ...masterNullifierPublicKey.toFields(), + ...masterIncomingViewingPublicKey.toFields(), + ...masterOutgoingViewingPublicKey.toFields(), + ...masterTaggingPublicKey.toFields(), + partialAddress, + ].map(toACVMField); } async getNotes( diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 171ccb4d757..231d8cd99d1 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -17,7 +17,7 @@ import { } from '@aztec/circuits.js'; import { type FunctionSelector } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; -import { Fr, type Point } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { type ContractInstance } from '@aztec/types/contracts'; /** Nullifier keys which both correspond to the same master nullifier secret key. */ @@ -93,10 +93,6 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getNullifierKeys'); } - getPublicKeyAndPartialAddress(_address: AztecAddress): Promise { - throw new OracleMethodNotAvailableError('getPublicKeyAndPartialAddress'); - } - getContractInstance(_address: AztecAddress): Promise { throw new OracleMethodNotAvailableError('getContractInstance'); } @@ -140,10 +136,6 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('popCapsule'); } - getPublicKeysForAddress(_address: AztecAddress): Promise { - throw new OracleMethodNotAvailableError('getPublicKeysForAddress'); - } - getNotes( _storageSlot: Fr, _numSelects: number, diff --git a/yarn-project/simulator/src/client/db_oracle.ts b/yarn-project/simulator/src/client/db_oracle.ts index a7e78619eb1..0fbdbd64364 100644 --- a/yarn-project/simulator/src/client/db_oracle.ts +++ b/yarn-project/simulator/src/client/db_oracle.ts @@ -8,7 +8,7 @@ import { import { type CompleteAddress, type Header } from '@aztec/circuits.js'; import { type FunctionArtifact, type FunctionSelector } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; -import { type Fr, type Point } from '@aztec/foundation/fields'; +import { type Fr } from '@aztec/foundation/fields'; import { type ContractInstance } from '@aztec/types/contracts'; import { type NoteData, type NullifierKeys } from '../acvm/index.js'; @@ -64,14 +64,6 @@ export interface DBOracle extends CommitmentsDB { */ popCapsule(): Promise; - /** - * Gets public keys for an address. - * @param The address to look up - * @returns The public keys for a specific address - * TODO(#5834): Replace with `getCompleteAddress`. - */ - getPublicKeysForAddress(address: AztecAddress): Promise; - /** * Retrieve nullifier keys associated with a specific account and app/contract address. * diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 1037f15109e..8bfb5019bc9 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -1053,7 +1053,7 @@ describe('Private Execution test suite', () => { // Generate a partial address, pubkey, and resulting address const completeAddress = CompleteAddress.random(); const args = [completeAddress.address]; - const pubKey = completeAddress.publicKey; + const pubKey = completeAddress.masterIncomingViewingPublicKey; oracle.getCompleteAddress.mockResolvedValue(completeAddress); const result = await runSimulator({ artifact, args }); diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index 50dc2552c25..b4c02039175 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -166,16 +166,6 @@ export class ViewDataOracle extends TypedOracle { return this.db.popCapsule(); } - /** - * Gets public keys for an address. - * @param The address to look up - * @returns The public keys for a specific address - * TODO(#5834): Replace with `getCompleteAddress`. - */ - public override getPublicKeysForAddress(address: AztecAddress) { - return this.db.getPublicKeysForAddress(address); - } - /** * Gets some notes for a contract address and storage slot. * Returns a flattened array containing filtered notes. From 3cda21a9e2ff598232fe0119a235e98463ec718b Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 10 May 2024 10:14:51 -0400 Subject: [PATCH 43/43] feat: Sync from noir (#6332) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE fix: Ignore no_predicates in brillig functions (https://github.com/noir-lang/noir/pull/5012) chore(experimental): Add compiler option to enable the Elaborator (https://github.com/noir-lang/noir/pull/5003) chore(experimental): Add Elaborator pass (https://github.com/noir-lang/noir/pull/4992) END_COMMIT_OVERRIDE --------- Co-authored-by: sirasistant --- .noir-sync-commit | 2 +- noir/noir-repo/acvm-repo/acvm_js/build.sh | 2 +- .../compiler/noirc_driver/src/lib.rs | 25 +- .../noirc_driver/tests/stdlib_warnings.rs | 3 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 7 +- .../noirc_frontend/src/ast/function.rs | 9 + .../noirc_frontend/src/ast/statement.rs | 5 +- .../src/elaborator/expressions.rs | 604 +++++++ .../noirc_frontend/src/elaborator/mod.rs | 782 +++++++++ .../noirc_frontend/src/elaborator/patterns.rs | 465 ++++++ .../noirc_frontend/src/elaborator/scope.rs | 200 +++ .../src/elaborator/statements.rs | 409 +++++ .../noirc_frontend/src/elaborator/types.rs | 1438 +++++++++++++++++ .../src/hir/def_collector/dc_crate.rs | 104 +- .../src/hir/def_collector/dc_mod.rs | 16 +- .../noirc_frontend/src/hir/def_map/mod.rs | 10 +- .../src/hir/resolution/import.rs | 9 + .../src/hir/resolution/resolver.rs | 12 +- .../noirc_frontend/src/hir/type_check/expr.rs | 4 +- .../noirc_frontend/src/hir/type_check/mod.rs | 2 +- .../noirc_frontend/src/hir_def/expr.rs | 11 +- .../noirc_frontend/src/hir_def/function.rs | 5 +- .../noirc_frontend/src/hir_def/types.rs | 8 +- .../compiler/noirc_frontend/src/lib.rs | 1 + .../noirc_frontend/src/node_interner.rs | 6 +- .../compiler/noirc_frontend/src/tests.rs | 1 + .../no_predicates_brillig/Nargo.toml | 7 + .../no_predicates_brillig/Prover.toml | 2 + .../no_predicates_brillig/src/main.nr | 12 + noir/noir-repo/tooling/lsp/src/lib.rs | 2 +- .../tooling/lsp/src/notifications/mod.rs | 4 +- .../lsp/src/requests/code_lens_request.rs | 2 +- .../lsp/src/requests/goto_declaration.rs | 2 +- .../lsp/src/requests/goto_definition.rs | 2 +- .../tooling/lsp/src/requests/test_run.rs | 2 +- .../tooling/lsp/src/requests/tests.rs | 2 +- .../tooling/nargo_cli/src/cli/check_cmd.rs | 4 +- .../tooling/nargo_cli/src/cli/export_cmd.rs | 1 + .../tooling/nargo_cli/src/cli/test_cmd.rs | 2 + .../tooling/nargo_cli/tests/stdlib-tests.rs | 17 +- .../tooling/noir_js/test/node/execute.test.ts | 48 + 41 files changed, 4145 insertions(+), 104 deletions(-) create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs create mode 100644 noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Prover.toml create mode 100644 noir/noir-repo/test_programs/execution_success/no_predicates_brillig/src/main.nr diff --git a/.noir-sync-commit b/.noir-sync-commit index 5fe0fbedd16..e7c73939ac6 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -95d4d133d1eb5e0eb44cd928d8183d890e970a13 +b541e793e20fa3c991e0328ec2ff7926bdcdfd45 diff --git a/noir/noir-repo/acvm-repo/acvm_js/build.sh b/noir/noir-repo/acvm-repo/acvm_js/build.sh index c07d2d8a4c1..ee93413ab85 100755 --- a/noir/noir-repo/acvm-repo/acvm_js/build.sh +++ b/noir/noir-repo/acvm-repo/acvm_js/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +# require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 5f1985b0553..801c0b685a9 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -103,6 +103,10 @@ pub struct CompileOptions { /// Force Brillig output (for step debugging) #[arg(long, hide = true)] pub force_brillig: bool, + + /// Enable the experimental elaborator pass + #[arg(long, hide = true)] + pub use_elaborator: bool, } fn parse_expression_width(input: &str) -> Result { @@ -245,12 +249,13 @@ pub fn check_crate( crate_id: CrateId, deny_warnings: bool, disable_macros: bool, + use_elaborator: bool, ) -> CompilationResult<()> { let macros: &[&dyn MacroProcessor] = if disable_macros { &[] } else { &[&aztec_macros::AztecMacro as &dyn MacroProcessor] }; let mut errors = vec![]; - let diagnostics = CrateDefMap::collect_defs(crate_id, context, macros); + let diagnostics = CrateDefMap::collect_defs(crate_id, context, use_elaborator, macros); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic = CustomDiagnostic::from(&error); diagnostic.in_file(file_id) @@ -282,8 +287,13 @@ pub fn compile_main( options: &CompileOptions, cached_program: Option, ) -> CompilationResult { - let (_, mut warnings) = - check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; + let (_, mut warnings) = check_crate( + context, + crate_id, + options.deny_warnings, + options.disable_macros, + options.use_elaborator, + )?; let main = context.get_main_function(&crate_id).ok_or_else(|| { // TODO(#2155): This error might be a better to exist in Nargo @@ -318,8 +328,13 @@ pub fn compile_contract( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult { - let (_, warnings) = - check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; + let (_, warnings) = check_crate( + context, + crate_id, + options.deny_warnings, + options.disable_macros, + options.use_elaborator, + )?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); diff --git a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs index 6f437621123..327c8daad06 100644 --- a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -24,7 +24,8 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let mut context = Context::new(file_manager, parsed_files); let root_crate_id = prepare_crate(&mut context, file_name); - let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false)?; + let ((), warnings) = + noirc_driver::check_crate(&mut context, root_crate_id, false, false, false)?; assert_eq!(warnings, Vec::new(), "stdlib is producing warnings"); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index bddfb25f26c..77b9e545e03 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -11,7 +11,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::{CallStack, InsertInstructionResult}, - function::{Function, FunctionId}, + function::{Function, FunctionId, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, value::{Value, ValueId}, }, @@ -392,10 +392,11 @@ impl<'function> PerFunctionContext<'function> { Some(func_id) => { let function = &ssa.functions[&func_id]; // If we have not already finished the flattening pass, functions marked - // to not have predicates should be marked as entry points. + // to not have predicates should be marked as entry points unless we are inlining into brillig. let no_predicates_is_entry_point = self.context.no_predicates_is_entry_point - && function.is_no_predicates(); + && function.is_no_predicates() + && !matches!(self.source_function.runtime(), RuntimeType::Brillig); if function.runtime().is_entry_point() || no_predicates_is_entry_point { self.push_instruction(*id); } else { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs index dc426a4642a..8acc068d86a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs @@ -32,6 +32,15 @@ pub enum FunctionKind { Recursive, } +impl FunctionKind { + pub fn can_ignore_return_type(self) -> bool { + match self { + FunctionKind::LowLevel | FunctionKind::Builtin | FunctionKind::Oracle => true, + FunctionKind::Normal | FunctionKind::Recursive => false, + } + } +} + impl NoirFunction { pub fn normal(def: FunctionDefinition) -> NoirFunction { NoirFunction { kind: FunctionKind::Normal, def } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs index 0da39edfd85..94b5841e52c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs @@ -565,7 +565,7 @@ impl ForRange { identifier: Ident, block: Expression, for_loop_span: Span, - ) -> StatementKind { + ) -> Statement { /// Counter used to generate unique names when desugaring /// code in the parser requires the creation of fresh variables. /// The parser is stateless so this is a static global instead. @@ -662,7 +662,8 @@ impl ForRange { let block = ExpressionKind::Block(BlockExpression { statements: vec![let_array, for_loop], }); - StatementKind::Expression(Expression::new(block, for_loop_span)) + let kind = StatementKind::Expression(Expression::new(block, for_loop_span)); + Statement { kind, span: for_loop_span } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs new file mode 100644 index 00000000000..ed8ed5305d1 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -0,0 +1,604 @@ +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; +use regex::Regex; +use rustc_hash::FxHashSet as HashSet; + +use crate::{ + ast::{ + ArrayLiteral, ConstructorExpression, IfExpression, InfixExpression, Lambda, + UnresolvedTypeExpression, + }, + hir::{ + resolution::{errors::ResolverError, resolver::LambdaContext}, + type_check::TypeCheckError, + }, + hir_def::{ + expr::{ + HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, + HirConstructorExpression, HirIdent, HirIfExpression, HirIndexExpression, + HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, + HirMethodReference, HirPrefixExpression, + }, + traits::TraitConstraint, + }, + macros_api::{ + BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, HirExpression, + HirLiteral, HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, + MethodCallExpression, PrefixExpression, + }, + node_interner::{DefinitionKind, ExprId, FuncId}, + Shared, StructType, Type, +}; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + pub(super) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { + let (hir_expr, typ) = match expr.kind { + ExpressionKind::Literal(literal) => self.elaborate_literal(literal, expr.span), + ExpressionKind::Block(block) => self.elaborate_block(block), + ExpressionKind::Prefix(prefix) => self.elaborate_prefix(*prefix), + ExpressionKind::Index(index) => self.elaborate_index(*index), + ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), + ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), + ExpressionKind::Constructor(constructor) => self.elaborate_constructor(*constructor), + ExpressionKind::MemberAccess(access) => { + return self.elaborate_member_access(*access, expr.span) + } + ExpressionKind::Cast(cast) => self.elaborate_cast(*cast, expr.span), + ExpressionKind::Infix(infix) => return self.elaborate_infix(*infix, expr.span), + ExpressionKind::If(if_) => self.elaborate_if(*if_), + ExpressionKind::Variable(variable) => return self.elaborate_variable(variable), + ExpressionKind::Tuple(tuple) => self.elaborate_tuple(tuple), + ExpressionKind::Lambda(lambda) => self.elaborate_lambda(*lambda), + ExpressionKind::Parenthesized(expr) => return self.elaborate_expression(*expr), + ExpressionKind::Quote(quote) => self.elaborate_quote(quote), + ExpressionKind::Comptime(comptime) => self.elaborate_comptime_block(comptime), + ExpressionKind::Error => (HirExpression::Error, Type::Error), + }; + let id = self.interner.push_expr(hir_expr); + self.interner.push_expr_location(id, expr.span, self.file); + self.interner.push_expr_type(id, typ.clone()); + (id, typ) + } + + pub(super) fn elaborate_block(&mut self, block: BlockExpression) -> (HirExpression, Type) { + self.push_scope(); + let mut block_type = Type::Unit; + let mut statements = Vec::with_capacity(block.statements.len()); + + for (i, statement) in block.statements.into_iter().enumerate() { + let (id, stmt_type) = self.elaborate_statement(statement); + statements.push(id); + + if let HirStatement::Semi(expr) = self.interner.statement(&id) { + let inner_expr_type = self.interner.id_type(expr); + let span = self.interner.expr_span(&expr); + + self.unify(&inner_expr_type, &Type::Unit, || TypeCheckError::UnusedResultError { + expr_type: inner_expr_type.clone(), + expr_span: span, + }); + + if i + 1 == statements.len() { + block_type = stmt_type; + } + } + } + + self.pop_scope(); + (HirExpression::Block(HirBlockExpression { statements }), block_type) + } + + fn elaborate_literal(&mut self, literal: Literal, span: Span) -> (HirExpression, Type) { + use HirExpression::Literal as Lit; + match literal { + Literal::Unit => (Lit(HirLiteral::Unit), Type::Unit), + Literal::Bool(b) => (Lit(HirLiteral::Bool(b)), Type::Bool), + Literal::Integer(integer, sign) => { + let int = HirLiteral::Integer(integer, sign); + (Lit(int), self.polymorphic_integer_or_field()) + } + Literal::Str(str) | Literal::RawStr(str, _) => { + let len = Type::Constant(str.len() as u64); + (Lit(HirLiteral::Str(str)), Type::String(Box::new(len))) + } + Literal::FmtStr(str) => self.elaborate_fmt_string(str, span), + Literal::Array(array_literal) => { + self.elaborate_array_literal(array_literal, span, true) + } + Literal::Slice(array_literal) => { + self.elaborate_array_literal(array_literal, span, false) + } + } + } + + fn elaborate_array_literal( + &mut self, + array_literal: ArrayLiteral, + span: Span, + is_array: bool, + ) -> (HirExpression, Type) { + let (expr, elem_type, length) = match array_literal { + ArrayLiteral::Standard(elements) => { + let first_elem_type = self.interner.next_type_variable(); + let first_span = elements.first().map(|elem| elem.span).unwrap_or(span); + + let elements = vecmap(elements.into_iter().enumerate(), |(i, elem)| { + let span = elem.span; + let (elem_id, elem_type) = self.elaborate_expression(elem); + + self.unify(&elem_type, &first_elem_type, || { + TypeCheckError::NonHomogeneousArray { + first_span, + first_type: first_elem_type.to_string(), + first_index: 0, + second_span: span, + second_type: elem_type.to_string(), + second_index: i, + } + .add_context("elements in an array must have the same type") + }); + elem_id + }); + + let length = Type::Constant(elements.len() as u64); + (HirArrayLiteral::Standard(elements), first_elem_type, length) + } + ArrayLiteral::Repeated { repeated_element, length } => { + let span = length.span; + let length = + UnresolvedTypeExpression::from_expr(*length, span).unwrap_or_else(|error| { + self.push_err(ResolverError::ParserError(Box::new(error))); + UnresolvedTypeExpression::Constant(0, span) + }); + + let length = self.convert_expression_type(length); + let (repeated_element, elem_type) = self.elaborate_expression(*repeated_element); + + let length_clone = length.clone(); + (HirArrayLiteral::Repeated { repeated_element, length }, elem_type, length_clone) + } + }; + let constructor = if is_array { HirLiteral::Array } else { HirLiteral::Slice }; + let elem_type = Box::new(elem_type); + let typ = if is_array { + Type::Array(Box::new(length), elem_type) + } else { + Type::Slice(elem_type) + }; + (HirExpression::Literal(constructor(expr)), typ) + } + + fn elaborate_fmt_string(&mut self, str: String, call_expr_span: Span) -> (HirExpression, Type) { + let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") + .expect("ICE: an invalid regex pattern was used for checking format strings"); + + let mut fmt_str_idents = Vec::new(); + let mut capture_types = Vec::new(); + + for field in re.find_iter(&str) { + let matched_str = field.as_str(); + let ident_name = &matched_str[1..(matched_str.len() - 1)]; + + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(ident_name); + if let Some((old_value, _)) = variable { + old_value.num_times_used += 1; + let ident = HirExpression::Ident(old_value.ident.clone()); + let expr_id = self.interner.push_expr(ident); + self.interner.push_expr_location(expr_id, call_expr_span, self.file); + let ident = old_value.ident.clone(); + let typ = self.type_check_variable(ident, expr_id); + self.interner.push_expr_type(expr_id, typ.clone()); + capture_types.push(typ); + fmt_str_idents.push(expr_id); + } else if ident_name.parse::().is_ok() { + self.push_err(ResolverError::NumericConstantInFormatString { + name: ident_name.to_owned(), + span: call_expr_span, + }); + } else { + self.push_err(ResolverError::VariableNotDeclared { + name: ident_name.to_owned(), + span: call_expr_span, + }); + } + } + + let len = Type::Constant(str.len() as u64); + let typ = Type::FmtString(Box::new(len), Box::new(Type::Tuple(capture_types))); + (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) + } + + fn elaborate_prefix(&mut self, prefix: PrefixExpression) -> (HirExpression, Type) { + let span = prefix.rhs.span; + let (rhs, rhs_type) = self.elaborate_expression(prefix.rhs); + let ret_type = self.type_check_prefix_operand(&prefix.operator, &rhs_type, span); + (HirExpression::Prefix(HirPrefixExpression { operator: prefix.operator, rhs }), ret_type) + } + + fn elaborate_index(&mut self, index_expr: IndexExpression) -> (HirExpression, Type) { + let span = index_expr.index.span; + let (index, index_type) = self.elaborate_expression(index_expr.index); + + let expected = self.polymorphic_integer_or_field(); + self.unify(&index_type, &expected, || TypeCheckError::TypeMismatch { + expected_typ: "an integer".to_owned(), + expr_typ: index_type.to_string(), + expr_span: span, + }); + + // When writing `a[i]`, if `a : &mut ...` then automatically dereference `a` as many + // times as needed to get the underlying array. + let lhs_span = index_expr.collection.span; + let (lhs, lhs_type) = self.elaborate_expression(index_expr.collection); + let (collection, lhs_type) = self.insert_auto_dereferences(lhs, lhs_type); + + let typ = match lhs_type.follow_bindings() { + // XXX: We can check the array bounds here also, but it may be better to constant fold first + // and have ConstId instead of ExprId for constants + Type::Array(_, base_type) => *base_type, + Type::Slice(base_type) => *base_type, + Type::Error => Type::Error, + typ => { + self.push_err(TypeCheckError::TypeMismatch { + expected_typ: "Array".to_owned(), + expr_typ: typ.to_string(), + expr_span: lhs_span, + }); + Type::Error + } + }; + + let expr = HirExpression::Index(HirIndexExpression { collection, index }); + (expr, typ) + } + + fn elaborate_call(&mut self, call: CallExpression, span: Span) -> (HirExpression, Type) { + let (func, func_type) = self.elaborate_expression(*call.func); + + let mut arguments = Vec::with_capacity(call.arguments.len()); + let args = vecmap(call.arguments, |arg| { + let span = arg.span; + let (arg, typ) = self.elaborate_expression(arg); + arguments.push(arg); + (typ, arg, span) + }); + + let location = Location::new(span, self.file); + let call = HirCallExpression { func, arguments, location }; + let typ = self.type_check_call(&call, func_type, args, span); + (HirExpression::Call(call), typ) + } + + fn elaborate_method_call( + &mut self, + method_call: MethodCallExpression, + span: Span, + ) -> (HirExpression, Type) { + let object_span = method_call.object.span; + let (mut object, mut object_type) = self.elaborate_expression(method_call.object); + object_type = object_type.follow_bindings(); + + let method_name = method_call.method_name.0.contents.as_str(); + match self.lookup_method(&object_type, method_name, span) { + Some(method_ref) => { + // Automatically add `&mut` if the method expects a mutable reference and + // the object is not already one. + if let HirMethodReference::FuncId(func_id) = &method_ref { + if *func_id != FuncId::dummy_id() { + let function_type = self.interner.function_meta(func_id).typ.clone(); + + self.try_add_mutable_reference_to_object( + &function_type, + &mut object_type, + &mut object, + ); + } + } + + // These arguments will be given to the desugared function call. + // Compared to the method arguments, they also contain the object. + let mut function_args = Vec::with_capacity(method_call.arguments.len() + 1); + let mut arguments = Vec::with_capacity(method_call.arguments.len()); + + function_args.push((object_type.clone(), object, object_span)); + + for arg in method_call.arguments { + let span = arg.span; + let (arg, typ) = self.elaborate_expression(arg); + arguments.push(arg); + function_args.push((typ, arg, span)); + } + + let location = Location::new(span, self.file); + let method = method_call.method_name; + let method_call = HirMethodCallExpression { method, object, arguments, location }; + + // Desugar the method call into a normal, resolved function call + // so that the backend doesn't need to worry about methods + // TODO: update object_type here? + let ((function_id, function_name), function_call) = method_call.into_function_call( + &method_ref, + object_type, + location, + self.interner, + ); + + let func_type = self.type_check_variable(function_name, function_id); + + // Type check the new call now that it has been changed from a method call + // to a function call. This way we avoid duplicating code. + let typ = self.type_check_call(&function_call, func_type, function_args, span); + (HirExpression::Call(function_call), typ) + } + None => (HirExpression::Error, Type::Error), + } + } + + fn elaborate_constructor( + &mut self, + constructor: ConstructorExpression, + ) -> (HirExpression, Type) { + let span = constructor.type_name.span(); + + match self.lookup_type_or_error(constructor.type_name) { + Some(Type::Struct(r#type, struct_generics)) => { + let struct_type = r#type.clone(); + let generics = struct_generics.clone(); + + let fields = constructor.fields; + let field_types = r#type.borrow().get_fields(&struct_generics); + let fields = self.resolve_constructor_expr_fields( + struct_type.clone(), + field_types, + fields, + span, + ); + let expr = HirExpression::Constructor(HirConstructorExpression { + fields, + r#type, + struct_generics, + }); + (expr, Type::Struct(struct_type, generics)) + } + Some(typ) => { + self.push_err(ResolverError::NonStructUsedInConstructor { typ, span }); + (HirExpression::Error, Type::Error) + } + None => (HirExpression::Error, Type::Error), + } + } + + /// Resolve all the fields of a struct constructor expression. + /// Ensures all fields are present, none are repeated, and all + /// are part of the struct. + fn resolve_constructor_expr_fields( + &mut self, + struct_type: Shared, + field_types: Vec<(String, Type)>, + fields: Vec<(Ident, Expression)>, + span: Span, + ) -> Vec<(Ident, ExprId)> { + let mut ret = Vec::with_capacity(fields.len()); + let mut seen_fields = HashSet::default(); + let mut unseen_fields = struct_type.borrow().field_names(); + + for (field_name, field) in fields { + let expected_type = field_types.iter().find(|(name, _)| name == &field_name.0.contents); + let expected_type = expected_type.map(|(_, typ)| typ).unwrap_or(&Type::Error); + + let field_span = field.span; + let (resolved, field_type) = self.elaborate_expression(field); + + if unseen_fields.contains(&field_name) { + unseen_fields.remove(&field_name); + seen_fields.insert(field_name.clone()); + + self.unify_with_coercions(&field_type, expected_type, resolved, || { + TypeCheckError::TypeMismatch { + expected_typ: expected_type.to_string(), + expr_typ: field_type.to_string(), + expr_span: field_span, + } + }); + } else if seen_fields.contains(&field_name) { + // duplicate field + self.push_err(ResolverError::DuplicateField { field: field_name.clone() }); + } else { + // field not required by struct + self.push_err(ResolverError::NoSuchField { + field: field_name.clone(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret.push((field_name, resolved)); + } + + if !unseen_fields.is_empty() { + self.push_err(ResolverError::MissingFields { + span, + missing_fields: unseen_fields.into_iter().map(|field| field.to_string()).collect(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret + } + + fn elaborate_member_access( + &mut self, + access: MemberAccessExpression, + span: Span, + ) -> (ExprId, Type) { + let (lhs, lhs_type) = self.elaborate_expression(access.lhs); + let rhs = access.rhs; + // `is_offset` is only used when lhs is a reference and we want to return a reference to rhs + let access = HirMemberAccess { lhs, rhs, is_offset: false }; + let expr_id = self.intern_expr(HirExpression::MemberAccess(access.clone()), span); + let typ = self.type_check_member_access(access, expr_id, lhs_type, span); + self.interner.push_expr_type(expr_id, typ.clone()); + (expr_id, typ) + } + + pub fn intern_expr(&mut self, expr: HirExpression, span: Span) -> ExprId { + let id = self.interner.push_expr(expr); + self.interner.push_expr_location(id, span, self.file); + id + } + + fn elaborate_cast(&mut self, cast: CastExpression, span: Span) -> (HirExpression, Type) { + let (lhs, lhs_type) = self.elaborate_expression(cast.lhs); + let r#type = self.resolve_type(cast.r#type); + let result = self.check_cast(lhs_type, &r#type, span); + let expr = HirExpression::Cast(HirCastExpression { lhs, r#type }); + (expr, result) + } + + fn elaborate_infix(&mut self, infix: InfixExpression, span: Span) -> (ExprId, Type) { + let (lhs, lhs_type) = self.elaborate_expression(infix.lhs); + let (rhs, rhs_type) = self.elaborate_expression(infix.rhs); + let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); + + let operator = HirBinaryOp::new(infix.operator, self.file); + let expr = HirExpression::Infix(HirInfixExpression { + lhs, + operator, + trait_method_id: trait_id, + rhs, + }); + + let expr_id = self.interner.push_expr(expr); + self.interner.push_expr_location(expr_id, span, self.file); + + let typ = match self.infix_operand_type_rules(&lhs_type, &operator, &rhs_type, span) { + Ok((typ, use_impl)) => { + if use_impl { + // Delay checking the trait constraint until the end of the function. + // Checking it now could bind an unbound type variable to any type + // that implements the trait. + let constraint = TraitConstraint { + typ: lhs_type.clone(), + trait_id: trait_id.trait_id, + trait_generics: Vec::new(), + }; + self.trait_constraints.push((constraint, expr_id)); + self.type_check_operator_method(expr_id, trait_id, &lhs_type, span); + } + typ + } + Err(error) => { + self.push_err(error); + Type::Error + } + }; + + self.interner.push_expr_type(expr_id, typ.clone()); + (expr_id, typ) + } + + fn elaborate_if(&mut self, if_expr: IfExpression) -> (HirExpression, Type) { + let expr_span = if_expr.condition.span; + let (condition, cond_type) = self.elaborate_expression(if_expr.condition); + let (consequence, mut ret_type) = self.elaborate_expression(if_expr.consequence); + + self.unify(&cond_type, &Type::Bool, || TypeCheckError::TypeMismatch { + expected_typ: Type::Bool.to_string(), + expr_typ: cond_type.to_string(), + expr_span, + }); + + let alternative = if_expr.alternative.map(|alternative| { + let expr_span = alternative.span; + let (else_, else_type) = self.elaborate_expression(alternative); + + self.unify(&ret_type, &else_type, || { + let err = TypeCheckError::TypeMismatch { + expected_typ: ret_type.to_string(), + expr_typ: else_type.to_string(), + expr_span, + }; + + let context = if ret_type == Type::Unit { + "Are you missing a semicolon at the end of your 'else' branch?" + } else if else_type == Type::Unit { + "Are you missing a semicolon at the end of the first block of this 'if'?" + } else { + "Expected the types of both if branches to be equal" + }; + + err.add_context(context) + }); + else_ + }); + + if alternative.is_none() { + ret_type = Type::Unit; + } + + let if_expr = HirIfExpression { condition, consequence, alternative }; + (HirExpression::If(if_expr), ret_type) + } + + fn elaborate_tuple(&mut self, tuple: Vec) -> (HirExpression, Type) { + let mut element_ids = Vec::with_capacity(tuple.len()); + let mut element_types = Vec::with_capacity(tuple.len()); + + for element in tuple { + let (id, typ) = self.elaborate_expression(element); + element_ids.push(id); + element_types.push(typ); + } + + (HirExpression::Tuple(element_ids), Type::Tuple(element_types)) + } + + fn elaborate_lambda(&mut self, lambda: Lambda) -> (HirExpression, Type) { + self.push_scope(); + let scope_index = self.scopes.current_scope_index(); + + self.lambda_stack.push(LambdaContext { captures: Vec::new(), scope_index }); + + let mut arg_types = Vec::with_capacity(lambda.parameters.len()); + let parameters = vecmap(lambda.parameters, |(pattern, typ)| { + let parameter = DefinitionKind::Local(None); + let typ = self.resolve_inferred_type(typ); + arg_types.push(typ.clone()); + (self.elaborate_pattern(pattern, typ.clone(), parameter), typ) + }); + + let return_type = self.resolve_inferred_type(lambda.return_type); + let body_span = lambda.body.span; + let (body, body_type) = self.elaborate_expression(lambda.body); + + let lambda_context = self.lambda_stack.pop().unwrap(); + self.pop_scope(); + + self.unify(&body_type, &return_type, || TypeCheckError::TypeMismatch { + expected_typ: return_type.to_string(), + expr_typ: body_type.to_string(), + expr_span: body_span, + }); + + let captured_vars = vecmap(&lambda_context.captures, |capture| { + self.interner.definition_type(capture.ident.id) + }); + + let env_type = + if captured_vars.is_empty() { Type::Unit } else { Type::Tuple(captured_vars) }; + + let captures = lambda_context.captures; + let expr = HirExpression::Lambda(HirLambda { parameters, return_type, body, captures }); + (expr, Type::Function(arg_types, Box::new(body_type), Box::new(env_type))) + } + + fn elaborate_quote(&mut self, block: BlockExpression) -> (HirExpression, Type) { + (HirExpression::Quote(block), Type::Code) + } + + fn elaborate_comptime_block(&mut self, _comptime: BlockExpression) -> (HirExpression, Type) { + todo!("Elaborate comptime block") + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs new file mode 100644 index 00000000000..446e5b62ead --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -0,0 +1,782 @@ +#![allow(unused)] +use std::{ + collections::{BTreeMap, BTreeSet}, + rc::Rc, +}; + +use crate::hir::def_map::CrateDefMap; +use crate::{ + ast::{ + ArrayLiteral, ConstructorExpression, FunctionKind, IfExpression, InfixExpression, Lambda, + UnresolvedTraitConstraint, UnresolvedTypeExpression, + }, + hir::{ + def_collector::dc_crate::CompilationError, + resolution::{errors::ResolverError, path_resolver::PathResolver, resolver::LambdaContext}, + scope::ScopeForest as GenericScopeForest, + type_check::TypeCheckError, + }, + hir_def::{ + expr::{ + HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, + HirConstructorExpression, HirIdent, HirIfExpression, HirIndexExpression, + HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, + HirMethodReference, HirPrefixExpression, + }, + traits::TraitConstraint, + }, + macros_api::{ + BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, HirExpression, + HirLiteral, HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, + MethodCallExpression, NodeInterner, NoirFunction, PrefixExpression, Statement, + StatementKind, StructId, + }, + node_interner::{DefinitionKind, DependencyId, ExprId, FuncId, StmtId, TraitId}, + Shared, StructType, Type, TypeVariable, +}; +use crate::{ + ast::{TraitBound, UnresolvedGenerics}, + graph::CrateId, + hir::{ + def_collector::{ + dc_crate::{CollectedItems, DefCollector}, + errors::DefCollectorErrorKind, + }, + def_map::{LocalModuleId, ModuleDefId, ModuleId, MAIN_FUNCTION}, + resolution::{ + errors::PubPosition, + import::{PathResolution, PathResolutionError}, + path_resolver::StandardPathResolver, + }, + Context, + }, + hir_def::function::{FuncMeta, HirFunction}, + macros_api::{Param, Path, UnresolvedType, UnresolvedTypeData, Visibility}, + node_interner::TraitImplId, + token::FunctionAttribute, + Generics, +}; + +mod expressions; +mod patterns; +mod scope; +mod statements; +mod types; + +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; +use regex::Regex; +use rustc_hash::FxHashSet as HashSet; + +/// ResolverMetas are tagged onto each definition to track how many times they are used +#[derive(Debug, PartialEq, Eq)] +pub struct ResolverMeta { + num_times_used: usize, + ident: HirIdent, + warn_if_unused: bool, +} + +type ScopeForest = GenericScopeForest; + +pub struct Elaborator<'context> { + scopes: ScopeForest, + + errors: Vec<(CompilationError, FileId)>, + + interner: &'context mut NodeInterner, + + def_maps: &'context BTreeMap, + + file: FileId, + + in_unconstrained_fn: bool, + nested_loops: usize, + + /// True if the current module is a contract. + /// This is usually determined by self.path_resolver.module_id(), but it can + /// be overridden for impls. Impls are an odd case since the methods within resolve + /// as if they're in the parent module, but should be placed in a child module. + /// Since they should be within a child module, in_contract is manually set to false + /// for these so we can still resolve them in the parent module without them being in a contract. + in_contract: bool, + + /// Contains a mapping of the current struct or functions's generics to + /// unique type variables if we're resolving a struct. Empty otherwise. + /// This is a Vec rather than a map to preserve the order a functions generics + /// were declared in. + generics: Vec<(Rc, TypeVariable, Span)>, + + /// When resolving lambda expressions, we need to keep track of the variables + /// that are captured. We do this in order to create the hidden environment + /// parameter for the lambda function. + lambda_stack: Vec, + + /// Set to the current type if we're resolving an impl + self_type: Option, + + /// The current dependency item we're resolving. + /// Used to link items to their dependencies in the dependency graph + current_item: Option, + + /// If we're currently resolving methods within a trait impl, this will be set + /// to the corresponding trait impl ID. + current_trait_impl: Option, + + trait_id: Option, + + /// In-resolution names + /// + /// This needs to be a set because we can have multiple in-resolution + /// names when resolving structs that are declared in reverse order of their + /// dependencies, such as in the following case: + /// + /// ``` + /// struct Wrapper { + /// value: Wrapped + /// } + /// struct Wrapped { + /// } + /// ``` + resolving_ids: BTreeSet, + + trait_bounds: Vec, + + current_function: Option, + + /// All type variables created in the current function. + /// This map is used to default any integer type variables at the end of + /// a function (before checking trait constraints) if a type wasn't already chosen. + type_variables: Vec, + + /// Trait constraints are collected during type checking until they are + /// verified at the end of a function. This is because constraints arise + /// on each variable, but it is only until function calls when the types + /// needed for the trait constraint may become known. + trait_constraints: Vec<(TraitConstraint, ExprId)>, + + /// The current module this elaborator is in. + /// Initially empty, it is set whenever a new top-level item is resolved. + local_module: LocalModuleId, + + crate_id: CrateId, +} + +impl<'context> Elaborator<'context> { + pub fn new(context: &'context mut Context, crate_id: CrateId) -> Self { + Self { + scopes: ScopeForest::default(), + errors: Vec::new(), + interner: &mut context.def_interner, + def_maps: &context.def_maps, + file: FileId::dummy(), + in_unconstrained_fn: false, + nested_loops: 0, + in_contract: false, + generics: Vec::new(), + lambda_stack: Vec::new(), + self_type: None, + current_item: None, + trait_id: None, + local_module: LocalModuleId::dummy_id(), + crate_id, + resolving_ids: BTreeSet::new(), + trait_bounds: Vec::new(), + current_function: None, + type_variables: Vec::new(), + trait_constraints: Vec::new(), + current_trait_impl: None, + } + } + + pub fn elaborate( + context: &'context mut Context, + crate_id: CrateId, + items: CollectedItems, + ) -> Vec<(CompilationError, FileId)> { + let mut this = Self::new(context, crate_id); + + // the resolver filters literal globals first + for global in items.globals {} + + for alias in items.type_aliases {} + + for trait_ in items.traits {} + + for struct_ in items.types {} + + for trait_impl in &items.trait_impls { + // only collect now + } + + for impl_ in &items.impls { + // only collect now + } + + // resolver resolves non-literal globals here + + for functions in items.functions { + this.file = functions.file_id; + this.trait_id = functions.trait_id; // TODO: Resolve? + for (local_module, id, func) in functions.functions { + this.local_module = local_module; + this.elaborate_function(func, id); + } + } + + for impl_ in items.impls {} + + for trait_impl in items.trait_impls {} + + let cycle_errors = this.interner.check_for_dependency_cycles(); + this.errors.extend(cycle_errors); + + this.errors + } + + fn elaborate_function(&mut self, mut function: NoirFunction, id: FuncId) { + self.current_function = Some(id); + self.resolve_where_clause(&mut function.def.where_clause); + + // Without this, impl methods can accidentally be placed in contracts. See #3254 + if self.self_type.is_some() { + self.in_contract = false; + } + + self.scopes.start_function(); + self.current_item = Some(DependencyId::Function(id)); + + // Check whether the function has globals in the local module and add them to the scope + self.resolve_local_globals(); + self.add_generics(&function.def.generics); + + self.desugar_impl_trait_args(&mut function, id); + self.trait_bounds = function.def.where_clause.clone(); + + let is_low_level_or_oracle = function + .attributes() + .function + .as_ref() + .map_or(false, |func| func.is_low_level() || func.is_oracle()); + + if function.def.is_unconstrained { + self.in_unconstrained_fn = true; + } + + let func_meta = self.extract_meta(&function, id); + + self.add_trait_constraints_to_scope(&func_meta); + + let (hir_func, body_type) = match function.kind { + FunctionKind::Builtin | FunctionKind::LowLevel | FunctionKind::Oracle => { + (HirFunction::empty(), Type::Error) + } + FunctionKind::Normal | FunctionKind::Recursive => { + let block_span = function.def.span; + let (block, body_type) = self.elaborate_block(function.def.body); + let expr_id = self.intern_expr(block, block_span); + self.interner.push_expr_type(expr_id, body_type.clone()); + (HirFunction::unchecked_from_expr(expr_id), body_type) + } + }; + + if !func_meta.can_ignore_return_type() { + self.type_check_function_body(body_type, &func_meta, hir_func.as_expr()); + } + + // Default any type variables that still need defaulting. + // This is done before trait impl search since leaving them bindable can lead to errors + // when multiple impls are available. Instead we default first to choose the Field or u64 impl. + for typ in &self.type_variables { + if let Type::TypeVariable(variable, kind) = typ.follow_bindings() { + let msg = "TypeChecker should only track defaultable type vars"; + variable.bind(kind.default_type().expect(msg)); + } + } + + // Verify any remaining trait constraints arising from the function body + for (constraint, expr_id) in std::mem::take(&mut self.trait_constraints) { + let span = self.interner.expr_span(&expr_id); + self.verify_trait_constraint( + &constraint.typ, + constraint.trait_id, + &constraint.trait_generics, + expr_id, + span, + ); + } + + // Now remove all the `where` clause constraints we added + for constraint in &func_meta.trait_constraints { + self.interner.remove_assumed_trait_implementations_for_trait(constraint.trait_id); + } + + let func_scope_tree = self.scopes.end_function(); + + // The arguments to low-level and oracle functions are always unused so we do not produce warnings for them. + if !is_low_level_or_oracle { + self.check_for_unused_variables_in_scope_tree(func_scope_tree); + } + + self.trait_bounds.clear(); + + self.interner.push_fn_meta(func_meta, id); + self.interner.update_fn(id, hir_func); + self.current_function = None; + } + + /// This turns function parameters of the form: + /// fn foo(x: impl Bar) + /// + /// into + /// fn foo(x: T0_impl_Bar) where T0_impl_Bar: Bar + fn desugar_impl_trait_args(&mut self, func: &mut NoirFunction, func_id: FuncId) { + let mut impl_trait_generics = HashSet::default(); + let mut counter: usize = 0; + for parameter in func.def.parameters.iter_mut() { + if let UnresolvedTypeData::TraitAsType(path, args) = ¶meter.typ.typ { + let mut new_generic_ident: Ident = + format!("T{}_impl_{}", func_id, path.as_string()).into(); + let mut new_generic_path = Path::from_ident(new_generic_ident.clone()); + while impl_trait_generics.contains(&new_generic_ident) + || self.lookup_generic_or_global_type(&new_generic_path).is_some() + { + new_generic_ident = + format!("T{}_impl_{}_{}", func_id, path.as_string(), counter).into(); + new_generic_path = Path::from_ident(new_generic_ident.clone()); + counter += 1; + } + impl_trait_generics.insert(new_generic_ident.clone()); + + let is_synthesized = true; + let new_generic_type_data = + UnresolvedTypeData::Named(new_generic_path, vec![], is_synthesized); + let new_generic_type = + UnresolvedType { typ: new_generic_type_data.clone(), span: None }; + let new_trait_bound = TraitBound { + trait_path: path.clone(), + trait_id: None, + trait_generics: args.to_vec(), + }; + let new_trait_constraint = UnresolvedTraitConstraint { + typ: new_generic_type, + trait_bound: new_trait_bound, + }; + + parameter.typ.typ = new_generic_type_data; + func.def.generics.push(new_generic_ident); + func.def.where_clause.push(new_trait_constraint); + } + } + self.add_generics(&impl_trait_generics.into_iter().collect()); + } + + /// Add the given generics to scope. + /// Each generic will have a fresh Shared associated with it. + pub fn add_generics(&mut self, generics: &UnresolvedGenerics) -> Generics { + vecmap(generics, |generic| { + // Map the generic to a fresh type variable + let id = self.interner.next_type_variable_id(); + let typevar = TypeVariable::unbound(id); + let span = generic.0.span(); + + // Check for name collisions of this generic + let name = Rc::new(generic.0.contents.clone()); + + if let Some((_, _, first_span)) = self.find_generic(&name) { + self.push_err(ResolverError::DuplicateDefinition { + name: generic.0.contents.clone(), + first_span: *first_span, + second_span: span, + }); + } else { + self.generics.push((name, typevar.clone(), span)); + } + + typevar + }) + } + + fn push_err(&mut self, error: impl Into) { + self.errors.push((error.into(), self.file)); + } + + fn resolve_where_clause(&mut self, clause: &mut [UnresolvedTraitConstraint]) { + for bound in clause { + if let Some(trait_id) = self.resolve_trait_by_path(bound.trait_bound.trait_path.clone()) + { + bound.trait_bound.trait_id = Some(trait_id); + } + } + } + + fn resolve_trait_by_path(&mut self, path: Path) -> Option { + let path_resolver = StandardPathResolver::new(self.module_id()); + + let error = match path_resolver.resolve(self.def_maps, path.clone()) { + Ok(PathResolution { module_def_id: ModuleDefId::TraitId(trait_id), error }) => { + if let Some(error) = error { + self.push_err(error); + } + return Some(trait_id); + } + Ok(_) => DefCollectorErrorKind::NotATrait { not_a_trait_name: path }, + Err(_) => DefCollectorErrorKind::TraitNotFound { trait_path: path }, + }; + self.push_err(error); + None + } + + fn resolve_local_globals(&mut self) { + let globals = vecmap(self.interner.get_all_globals(), |global| { + (global.id, global.local_id, global.ident.clone()) + }); + for (id, local_module_id, name) in globals { + if local_module_id == self.local_module { + let definition = DefinitionKind::Global(id); + self.add_global_variable_decl(name, definition); + } + } + } + + /// TODO: This is currently only respected for generic free functions + /// there's a bunch of other places where trait constraints can pop up + fn resolve_trait_constraints( + &mut self, + where_clause: &[UnresolvedTraitConstraint], + ) -> Vec { + where_clause + .iter() + .cloned() + .filter_map(|constraint| self.resolve_trait_constraint(constraint)) + .collect() + } + + pub fn resolve_trait_constraint( + &mut self, + constraint: UnresolvedTraitConstraint, + ) -> Option { + let typ = self.resolve_type(constraint.typ); + let trait_generics = + vecmap(constraint.trait_bound.trait_generics, |typ| self.resolve_type(typ)); + + let span = constraint.trait_bound.trait_path.span(); + let the_trait = self.lookup_trait_or_error(constraint.trait_bound.trait_path)?; + let trait_id = the_trait.id; + + let expected_generics = the_trait.generics.len(); + let actual_generics = trait_generics.len(); + + if actual_generics != expected_generics { + let item_name = the_trait.name.to_string(); + self.push_err(ResolverError::IncorrectGenericCount { + span, + item_name, + actual: actual_generics, + expected: expected_generics, + }); + } + + Some(TraitConstraint { typ, trait_id, trait_generics }) + } + + /// Extract metadata from a NoirFunction + /// to be used in analysis and intern the function parameters + /// Prerequisite: self.add_generics() has already been called with the given + /// function's generics, including any generics from the impl, if any. + fn extract_meta(&mut self, func: &NoirFunction, func_id: FuncId) -> FuncMeta { + let location = Location::new(func.name_ident().span(), self.file); + let id = self.interner.function_definition_id(func_id); + let name_ident = HirIdent::non_trait_method(id, location); + + let attributes = func.attributes().clone(); + let has_no_predicates_attribute = attributes.is_no_predicates(); + let should_fold = attributes.is_foldable(); + if !self.inline_attribute_allowed(func) { + if has_no_predicates_attribute { + self.push_err(ResolverError::NoPredicatesAttributeOnUnconstrained { + ident: func.name_ident().clone(), + }); + } else if should_fold { + self.push_err(ResolverError::FoldAttributeOnUnconstrained { + ident: func.name_ident().clone(), + }); + } + } + // Both the #[fold] and #[no_predicates] alter a function's inline type and code generation in similar ways. + // In certain cases such as type checking (for which the following flag will be used) both attributes + // indicate we should code generate in the same way. Thus, we unify the attributes into one flag here. + let has_inline_attribute = has_no_predicates_attribute || should_fold; + let is_entry_point = self.is_entry_point_function(func); + + let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); + let mut parameters = vec![]; + let mut parameter_types = vec![]; + + for Param { visibility, pattern, typ, span: _ } in func.parameters().iter().cloned() { + if visibility == Visibility::Public && !self.pub_allowed(func) { + self.push_err(ResolverError::UnnecessaryPub { + ident: func.name_ident().clone(), + position: PubPosition::Parameter, + }); + } + + let type_span = typ.span.unwrap_or_else(|| pattern.span()); + let typ = self.resolve_type_inner(typ, &mut generics); + self.check_if_type_is_valid_for_program_input( + &typ, + is_entry_point, + has_inline_attribute, + type_span, + ); + let pattern = self.elaborate_pattern(pattern, typ.clone(), DefinitionKind::Local(None)); + + parameters.push((pattern, typ.clone(), visibility)); + parameter_types.push(typ); + } + + let return_type = Box::new(self.resolve_type(func.return_type())); + + self.declare_numeric_generics(¶meter_types, &return_type); + + if !self.pub_allowed(func) && func.def.return_visibility == Visibility::Public { + self.push_err(ResolverError::UnnecessaryPub { + ident: func.name_ident().clone(), + position: PubPosition::ReturnType, + }); + } + + let is_low_level_function = + attributes.function.as_ref().map_or(false, |func| func.is_low_level()); + + if !self.crate_id.is_stdlib() && is_low_level_function { + let error = + ResolverError::LowLevelFunctionOutsideOfStdlib { ident: func.name_ident().clone() }; + self.push_err(error); + } + + // 'pub' is required on return types for entry point functions + if is_entry_point + && return_type.as_ref() != &Type::Unit + && func.def.return_visibility == Visibility::Private + { + self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); + } + // '#[recursive]' attribute is only allowed for entry point functions + if !is_entry_point && func.kind == FunctionKind::Recursive { + self.push_err(ResolverError::MisplacedRecursiveAttribute { + ident: func.name_ident().clone(), + }); + } + + if matches!(attributes.function, Some(FunctionAttribute::Test { .. })) + && !parameters.is_empty() + { + self.push_err(ResolverError::TestFunctionHasParameters { + span: func.name_ident().span(), + }); + } + + let mut typ = Type::Function(parameter_types, return_type, Box::new(Type::Unit)); + + if !generics.is_empty() { + typ = Type::Forall(generics, Box::new(typ)); + } + + self.interner.push_definition_type(name_ident.id, typ.clone()); + + let direct_generics = func.def.generics.iter(); + let direct_generics = direct_generics + .filter_map(|generic| self.find_generic(&generic.0.contents)) + .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .collect(); + + FuncMeta { + name: name_ident, + kind: func.kind, + location, + typ, + direct_generics, + trait_impl: self.current_trait_impl, + parameters: parameters.into(), + return_type: func.def.return_type.clone(), + return_visibility: func.def.return_visibility, + has_body: !func.def.body.is_empty(), + trait_constraints: self.resolve_trait_constraints(&func.def.where_clause), + is_entry_point, + has_inline_attribute, + } + } + + /// Only sized types are valid to be used as main's parameters or the parameters to a contract + /// function. If the given type is not sized (e.g. contains a slice or NamedGeneric type), an + /// error is issued. + fn check_if_type_is_valid_for_program_input( + &mut self, + typ: &Type, + is_entry_point: bool, + has_inline_attribute: bool, + span: Span, + ) { + if (is_entry_point && !typ.is_valid_for_program_input()) + || (has_inline_attribute && !typ.is_valid_non_inlined_function_input()) + { + self.push_err(TypeCheckError::InvalidTypeForEntryPoint { span }); + } + } + + fn inline_attribute_allowed(&self, func: &NoirFunction) -> bool { + // Inline attributes are only relevant for constrained functions + // as all unconstrained functions are not inlined + !func.def.is_unconstrained + } + + /// True if the 'pub' keyword is allowed on parameters in this function + /// 'pub' on function parameters is only allowed for entry point functions + fn pub_allowed(&self, func: &NoirFunction) -> bool { + self.is_entry_point_function(func) || func.attributes().is_foldable() + } + + fn is_entry_point_function(&self, func: &NoirFunction) -> bool { + if self.in_contract { + func.attributes().is_contract_entry_point() + } else { + func.name() == MAIN_FUNCTION + } + } + + fn declare_numeric_generics(&mut self, params: &[Type], return_type: &Type) { + if self.generics.is_empty() { + return; + } + + for (name_to_find, type_variable) in Self::find_numeric_generics(params, return_type) { + // Declare any generics to let users use numeric generics in scope. + // Don't issue a warning if these are unused + // + // We can fail to find the generic in self.generics if it is an implicit one created + // by the compiler. This can happen when, e.g. eliding array lengths using the slice + // syntax [T]. + if let Some((name, _, span)) = + self.generics.iter().find(|(name, _, _)| name.as_ref() == &name_to_find) + { + let ident = Ident::new(name.to_string(), *span); + let definition = DefinitionKind::GenericType(type_variable); + self.add_variable_decl_inner(ident, false, false, false, definition); + } + } + } + + fn find_numeric_generics( + parameters: &[Type], + return_type: &Type, + ) -> Vec<(String, TypeVariable)> { + let mut found = BTreeMap::new(); + for parameter in parameters { + Self::find_numeric_generics_in_type(parameter, &mut found); + } + Self::find_numeric_generics_in_type(return_type, &mut found); + found.into_iter().collect() + } + + fn find_numeric_generics_in_type(typ: &Type, found: &mut BTreeMap) { + match typ { + Type::FieldElement + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Error + | Type::TypeVariable(_, _) + | Type::Constant(_) + | Type::NamedGeneric(_, _) + | Type::Code + | Type::Forall(_, _) => (), + + Type::TraitAsType(_, _, args) => { + for arg in args { + Self::find_numeric_generics_in_type(arg, found); + } + } + + Type::Array(length, element_type) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + Self::find_numeric_generics_in_type(element_type, found); + } + + Type::Slice(element_type) => { + Self::find_numeric_generics_in_type(element_type, found); + } + + Type::Tuple(fields) => { + for field in fields { + Self::find_numeric_generics_in_type(field, found); + } + } + + Type::Function(parameters, return_type, _env) => { + for parameter in parameters { + Self::find_numeric_generics_in_type(parameter, found); + } + Self::find_numeric_generics_in_type(return_type, found); + } + + Type::Struct(struct_type, generics) => { + for (i, generic) in generics.iter().enumerate() { + if let Type::NamedGeneric(type_variable, name) = generic { + if struct_type.borrow().generic_is_numeric(i) { + found.insert(name.to_string(), type_variable.clone()); + } + } else { + Self::find_numeric_generics_in_type(generic, found); + } + } + } + Type::Alias(alias, generics) => { + for (i, generic) in generics.iter().enumerate() { + if let Type::NamedGeneric(type_variable, name) = generic { + if alias.borrow().generic_is_numeric(i) { + found.insert(name.to_string(), type_variable.clone()); + } + } else { + Self::find_numeric_generics_in_type(generic, found); + } + } + } + Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), + Type::String(length) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + } + Type::FmtString(length, fields) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + Self::find_numeric_generics_in_type(fields, found); + } + } + } + + fn add_trait_constraints_to_scope(&mut self, func_meta: &FuncMeta) { + for constraint in &func_meta.trait_constraints { + let object = constraint.typ.clone(); + let trait_id = constraint.trait_id; + let generics = constraint.trait_generics.clone(); + + if !self.interner.add_assumed_trait_implementation(object, trait_id, generics) { + if let Some(the_trait) = self.interner.try_get_trait(trait_id) { + let trait_name = the_trait.name.to_string(); + let typ = constraint.typ.clone(); + let span = func_meta.location.span; + self.push_err(TypeCheckError::UnneededTraitConstraint { + trait_name, + typ, + span, + }); + } + } + } + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs new file mode 100644 index 00000000000..195d37878f1 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -0,0 +1,465 @@ +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; +use rustc_hash::FxHashSet as HashSet; + +use crate::{ + ast::ERROR_IDENT, + hir::{ + resolution::errors::ResolverError, + type_check::{Source, TypeCheckError}, + }, + hir_def::{ + expr::{HirIdent, ImplKind}, + stmt::HirPattern, + }, + macros_api::{HirExpression, Ident, Path, Pattern}, + node_interner::{DefinitionId, DefinitionKind, ExprId, TraitImplKind}, + Shared, StructType, Type, TypeBindings, +}; + +use super::{Elaborator, ResolverMeta}; + +impl<'context> Elaborator<'context> { + pub(super) fn elaborate_pattern( + &mut self, + pattern: Pattern, + expected_type: Type, + definition_kind: DefinitionKind, + ) -> HirPattern { + self.elaborate_pattern_mut(pattern, expected_type, definition_kind, None) + } + + fn elaborate_pattern_mut( + &mut self, + pattern: Pattern, + expected_type: Type, + definition: DefinitionKind, + mutable: Option, + ) -> HirPattern { + match pattern { + Pattern::Identifier(name) => { + // If this definition is mutable, do not store the rhs because it will + // not always refer to the correct value of the variable + let definition = match (mutable, definition) { + (Some(_), DefinitionKind::Local(_)) => DefinitionKind::Local(None), + (_, other) => other, + }; + let ident = self.add_variable_decl(name, mutable.is_some(), true, definition); + self.interner.push_definition_type(ident.id, expected_type); + HirPattern::Identifier(ident) + } + Pattern::Mutable(pattern, span, _) => { + if let Some(first_mut) = mutable { + self.push_err(ResolverError::UnnecessaryMut { first_mut, second_mut: span }); + } + + let pattern = + self.elaborate_pattern_mut(*pattern, expected_type, definition, Some(span)); + let location = Location::new(span, self.file); + HirPattern::Mutable(Box::new(pattern), location) + } + Pattern::Tuple(fields, span) => { + let field_types = match expected_type { + Type::Tuple(fields) => fields, + Type::Error => Vec::new(), + expected_type => { + let tuple = + Type::Tuple(vecmap(&fields, |_| self.interner.next_type_variable())); + + self.push_err(TypeCheckError::TypeMismatchWithSource { + expected: expected_type, + actual: tuple, + span, + source: Source::Assignment, + }); + Vec::new() + } + }; + + let fields = vecmap(fields.into_iter().enumerate(), |(i, field)| { + let field_type = field_types.get(i).cloned().unwrap_or(Type::Error); + self.elaborate_pattern_mut(field, field_type, definition.clone(), mutable) + }); + let location = Location::new(span, self.file); + HirPattern::Tuple(fields, location) + } + Pattern::Struct(name, fields, span) => self.elaborate_struct_pattern( + name, + fields, + span, + expected_type, + definition, + mutable, + ), + } + } + + fn elaborate_struct_pattern( + &mut self, + name: Path, + fields: Vec<(Ident, Pattern)>, + span: Span, + expected_type: Type, + definition: DefinitionKind, + mutable: Option, + ) -> HirPattern { + let error_identifier = |this: &mut Self| { + // Must create a name here to return a HirPattern::Identifier. Allowing + // shadowing here lets us avoid further errors if we define ERROR_IDENT + // multiple times. + let name = ERROR_IDENT.into(); + let identifier = this.add_variable_decl(name, false, true, definition.clone()); + HirPattern::Identifier(identifier) + }; + + let (struct_type, generics) = match self.lookup_type_or_error(name) { + Some(Type::Struct(struct_type, generics)) => (struct_type, generics), + None => return error_identifier(self), + Some(typ) => { + self.push_err(ResolverError::NonStructUsedInConstructor { typ, span }); + return error_identifier(self); + } + }; + + let actual_type = Type::Struct(struct_type.clone(), generics); + let location = Location::new(span, self.file); + + self.unify(&actual_type, &expected_type, || TypeCheckError::TypeMismatchWithSource { + expected: expected_type.clone(), + actual: actual_type.clone(), + span: location.span, + source: Source::Assignment, + }); + + let typ = struct_type.clone(); + let fields = self.resolve_constructor_pattern_fields( + typ, + fields, + span, + expected_type.clone(), + definition, + mutable, + ); + + HirPattern::Struct(expected_type, fields, location) + } + + /// Resolve all the fields of a struct constructor expression. + /// Ensures all fields are present, none are repeated, and all + /// are part of the struct. + fn resolve_constructor_pattern_fields( + &mut self, + struct_type: Shared, + fields: Vec<(Ident, Pattern)>, + span: Span, + expected_type: Type, + definition: DefinitionKind, + mutable: Option, + ) -> Vec<(Ident, HirPattern)> { + let mut ret = Vec::with_capacity(fields.len()); + let mut seen_fields = HashSet::default(); + let mut unseen_fields = struct_type.borrow().field_names(); + + for (field, pattern) in fields { + let field_type = expected_type.get_field_type(&field.0.contents).unwrap_or(Type::Error); + let resolved = + self.elaborate_pattern_mut(pattern, field_type, definition.clone(), mutable); + + if unseen_fields.contains(&field) { + unseen_fields.remove(&field); + seen_fields.insert(field.clone()); + } else if seen_fields.contains(&field) { + // duplicate field + self.push_err(ResolverError::DuplicateField { field: field.clone() }); + } else { + // field not required by struct + self.push_err(ResolverError::NoSuchField { + field: field.clone(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret.push((field, resolved)); + } + + if !unseen_fields.is_empty() { + self.push_err(ResolverError::MissingFields { + span, + missing_fields: unseen_fields.into_iter().map(|field| field.to_string()).collect(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret + } + + pub(super) fn add_variable_decl( + &mut self, + name: Ident, + mutable: bool, + allow_shadowing: bool, + definition: DefinitionKind, + ) -> HirIdent { + self.add_variable_decl_inner(name, mutable, allow_shadowing, true, definition) + } + + pub fn add_variable_decl_inner( + &mut self, + name: Ident, + mutable: bool, + allow_shadowing: bool, + warn_if_unused: bool, + definition: DefinitionKind, + ) -> HirIdent { + if definition.is_global() { + return self.add_global_variable_decl(name, definition); + } + + let location = Location::new(name.span(), self.file); + let id = + self.interner.push_definition(name.0.contents.clone(), mutable, definition, location); + let ident = HirIdent::non_trait_method(id, location); + let resolver_meta = + ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused }; + + let scope = self.scopes.get_mut_scope(); + let old_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); + + if !allow_shadowing { + if let Some(old_value) = old_value { + self.push_err(ResolverError::DuplicateDefinition { + name: name.0.contents, + first_span: old_value.ident.location.span, + second_span: location.span, + }); + } + } + + ident + } + + pub fn add_global_variable_decl( + &mut self, + name: Ident, + definition: DefinitionKind, + ) -> HirIdent { + let scope = self.scopes.get_mut_scope(); + + // This check is necessary to maintain the same definition ids in the interner. Currently, each function uses a new resolver that has its own ScopeForest and thus global scope. + // We must first check whether an existing definition ID has been inserted as otherwise there will be multiple definitions for the same global statement. + // This leads to an error in evaluation where the wrong definition ID is selected when evaluating a statement using the global. The check below prevents this error. + let mut global_id = None; + let global = self.interner.get_all_globals(); + for global_info in global { + if global_info.ident == name && global_info.local_id == self.local_module { + global_id = Some(global_info.id); + } + } + + let (ident, resolver_meta) = if let Some(id) = global_id { + let global = self.interner.get_global(id); + let hir_ident = HirIdent::non_trait_method(global.definition_id, global.location); + let ident = hir_ident.clone(); + let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; + (hir_ident, resolver_meta) + } else { + let location = Location::new(name.span(), self.file); + let id = + self.interner.push_definition(name.0.contents.clone(), false, definition, location); + let ident = HirIdent::non_trait_method(id, location); + let resolver_meta = + ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused: true }; + (ident, resolver_meta) + }; + + let old_global_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); + if let Some(old_global_value) = old_global_value { + self.push_err(ResolverError::DuplicateDefinition { + name: name.0.contents.clone(), + first_span: old_global_value.ident.location.span, + second_span: name.span(), + }); + } + ident + } + + // Checks for a variable having been declared before. + // (Variable declaration and definition cannot be separate in Noir.) + // Once the variable has been found, intern and link `name` to this definition, + // returning (the ident, the IdentId of `name`) + // + // If a variable is not found, then an error is logged and a dummy id + // is returned, for better error reporting UX + pub(super) fn find_variable_or_default(&mut self, name: &Ident) -> (HirIdent, usize) { + self.use_variable(name).unwrap_or_else(|error| { + self.push_err(error); + let id = DefinitionId::dummy_id(); + let location = Location::new(name.span(), self.file); + (HirIdent::non_trait_method(id, location), 0) + }) + } + + /// Lookup and use the specified variable. + /// This will increment its use counter by one and return the variable if found. + /// If the variable is not found, an error is returned. + pub(super) fn use_variable( + &mut self, + name: &Ident, + ) -> Result<(HirIdent, usize), ResolverError> { + // Find the definition for this Ident + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(&name.0.contents); + + let location = Location::new(name.span(), self.file); + if let Some((variable_found, scope)) = variable { + variable_found.num_times_used += 1; + let id = variable_found.ident.id; + Ok((HirIdent::non_trait_method(id, location), scope)) + } else { + Err(ResolverError::VariableNotDeclared { + name: name.0.contents.clone(), + span: name.0.span(), + }) + } + } + + pub(super) fn elaborate_variable(&mut self, variable: Path) -> (ExprId, Type) { + let span = variable.span; + let expr = self.resolve_variable(variable); + let id = self.interner.push_expr(HirExpression::Ident(expr.clone())); + self.interner.push_expr_location(id, span, self.file); + let typ = self.type_check_variable(expr, id); + self.interner.push_expr_type(id, typ.clone()); + (id, typ) + } + + fn resolve_variable(&mut self, path: Path) -> HirIdent { + if let Some((method, constraint, assumed)) = self.resolve_trait_generic_path(&path) { + HirIdent { + location: Location::new(path.span, self.file), + id: self.interner.trait_method_id(method), + impl_kind: ImplKind::TraitMethod(method, constraint, assumed), + } + } else { + // If the Path is being used as an Expression, then it is referring to a global from a separate module + // Otherwise, then it is referring to an Identifier + // This lookup allows support of such statements: let x = foo::bar::SOME_GLOBAL + 10; + // If the expression is a singular indent, we search the resolver's current scope as normal. + let (hir_ident, var_scope_index) = self.get_ident_from_path(path); + + if hir_ident.id != DefinitionId::dummy_id() { + match self.interner.definition(hir_ident.id).kind { + DefinitionKind::Function(id) => { + if let Some(current_item) = self.current_item { + self.interner.add_function_dependency(current_item, id); + } + } + DefinitionKind::Global(global_id) => { + if let Some(current_item) = self.current_item { + self.interner.add_global_dependency(current_item, global_id); + } + } + DefinitionKind::GenericType(_) => { + // Initialize numeric generics to a polymorphic integer type in case + // they're used in expressions. We must do this here since type_check_variable + // does not check definition kinds and otherwise expects parameters to + // already be typed. + if self.interner.definition_type(hir_ident.id) == Type::Error { + let typ = Type::polymorphic_integer_or_field(self.interner); + self.interner.push_definition_type(hir_ident.id, typ); + } + } + DefinitionKind::Local(_) => { + // only local variables can be captured by closures. + self.resolve_local_variable(hir_ident.clone(), var_scope_index); + } + } + } + + hir_ident + } + } + + pub(super) fn type_check_variable(&mut self, ident: HirIdent, expr_id: ExprId) -> Type { + let mut bindings = TypeBindings::new(); + + // Add type bindings from any constraints that were used. + // We need to do this first since otherwise instantiating the type below + // will replace each trait generic with a fresh type variable, rather than + // the type used in the trait constraint (if it exists). See #4088. + if let ImplKind::TraitMethod(_, constraint, _) = &ident.impl_kind { + let the_trait = self.interner.get_trait(constraint.trait_id); + assert_eq!(the_trait.generics.len(), constraint.trait_generics.len()); + + for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { + // Avoid binding t = t + if !arg.occurs(param.id()) { + bindings.insert(param.id(), (param.clone(), arg.clone())); + } + } + } + + // An identifiers type may be forall-quantified in the case of generic functions. + // E.g. `fn foo(t: T, field: Field) -> T` has type `forall T. fn(T, Field) -> T`. + // We must instantiate identifiers at every call site to replace this T with a new type + // variable to handle generic functions. + let t = self.interner.id_type_substitute_trait_as_type(ident.id); + + // This instantiates a trait's generics as well which need to be set + // when the constraint below is later solved for when the function is + // finished. How to link the two? + let (typ, bindings) = t.instantiate_with_bindings(bindings, self.interner); + + // Push any trait constraints required by this definition to the context + // to be checked later when the type of this variable is further constrained. + if let Some(definition) = self.interner.try_definition(ident.id) { + if let DefinitionKind::Function(function) = definition.kind { + let function = self.interner.function_meta(&function); + + for mut constraint in function.trait_constraints.clone() { + constraint.apply_bindings(&bindings); + self.trait_constraints.push((constraint, expr_id)); + } + } + } + + if let ImplKind::TraitMethod(_, mut constraint, assumed) = ident.impl_kind { + constraint.apply_bindings(&bindings); + if assumed { + let trait_impl = TraitImplKind::Assumed { + object_type: constraint.typ, + trait_generics: constraint.trait_generics, + }; + self.interner.select_impl_for_expression(expr_id, trait_impl); + } else { + // Currently only one impl can be selected per expr_id, so this + // constraint needs to be pushed after any other constraints so + // that monomorphization can resolve this trait method to the correct impl. + self.trait_constraints.push((constraint, expr_id)); + } + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + typ + } + + fn get_ident_from_path(&mut self, path: Path) -> (HirIdent, usize) { + let location = Location::new(path.span(), self.file); + + let error = match path.as_ident().map(|ident| self.use_variable(ident)) { + Some(Ok(found)) => return found, + // Try to look it up as a global, but still issue the first error if we fail + Some(Err(error)) => match self.lookup_global(path) { + Ok(id) => return (HirIdent::non_trait_method(id, location), 0), + Err(_) => error, + }, + None => match self.lookup_global(path) { + Ok(id) => return (HirIdent::non_trait_method(id, location), 0), + Err(error) => error, + }, + }; + self.push_err(error); + let id = DefinitionId::dummy_id(); + (HirIdent::non_trait_method(id, location), 0) + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs new file mode 100644 index 00000000000..cf10dbbc2b2 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs @@ -0,0 +1,200 @@ +use noirc_errors::Spanned; +use rustc_hash::FxHashMap as HashMap; + +use crate::ast::ERROR_IDENT; +use crate::hir::comptime::Value; +use crate::hir::def_map::{LocalModuleId, ModuleId}; +use crate::hir::resolution::path_resolver::{PathResolver, StandardPathResolver}; +use crate::hir::resolution::resolver::SELF_TYPE_NAME; +use crate::hir::scope::{Scope as GenericScope, ScopeTree as GenericScopeTree}; +use crate::macros_api::Ident; +use crate::{ + hir::{ + def_map::{ModuleDefId, TryFromModuleDefId}, + resolution::errors::ResolverError, + }, + hir_def::{ + expr::{HirCapturedVar, HirIdent}, + traits::Trait, + }, + macros_api::{Path, StructId}, + node_interner::{DefinitionId, TraitId, TypeAliasId}, + Shared, StructType, +}; +use crate::{Type, TypeAlias}; + +use super::{Elaborator, ResolverMeta}; + +type Scope = GenericScope; +type ScopeTree = GenericScopeTree; + +impl<'context> Elaborator<'context> { + pub(super) fn lookup(&mut self, path: Path) -> Result { + let span = path.span(); + let id = self.resolve_path(path)?; + T::try_from(id).ok_or_else(|| ResolverError::Expected { + expected: T::description(), + got: id.as_str().to_owned(), + span, + }) + } + + pub(super) fn module_id(&self) -> ModuleId { + assert_ne!(self.local_module, LocalModuleId::dummy_id(), "local_module is unset"); + ModuleId { krate: self.crate_id, local_id: self.local_module } + } + + pub(super) fn resolve_path(&mut self, path: Path) -> Result { + let resolver = StandardPathResolver::new(self.module_id()); + let path_resolution = resolver.resolve(self.def_maps, path)?; + + if let Some(error) = path_resolution.error { + self.push_err(error); + } + + Ok(path_resolution.module_def_id) + } + + pub(super) fn get_struct(&self, type_id: StructId) -> Shared { + self.interner.get_struct(type_id) + } + + pub(super) fn get_trait_mut(&mut self, trait_id: TraitId) -> &mut Trait { + self.interner.get_trait_mut(trait_id) + } + + pub(super) fn resolve_local_variable(&mut self, hir_ident: HirIdent, var_scope_index: usize) { + let mut transitive_capture_index: Option = None; + + for lambda_index in 0..self.lambda_stack.len() { + if self.lambda_stack[lambda_index].scope_index > var_scope_index { + // Beware: the same variable may be captured multiple times, so we check + // for its presence before adding the capture below. + let position = self.lambda_stack[lambda_index] + .captures + .iter() + .position(|capture| capture.ident.id == hir_ident.id); + + if position.is_none() { + self.lambda_stack[lambda_index].captures.push(HirCapturedVar { + ident: hir_ident.clone(), + transitive_capture_index, + }); + } + + if lambda_index + 1 < self.lambda_stack.len() { + // There is more than one closure between the current scope and + // the scope of the variable, so this is a propagated capture. + // We need to track the transitive capture index as we go up in + // the closure stack. + transitive_capture_index = Some(position.unwrap_or( + // If this was a fresh capture, we added it to the end of + // the captures vector: + self.lambda_stack[lambda_index].captures.len() - 1, + )); + } + } + } + } + + pub(super) fn lookup_global(&mut self, path: Path) -> Result { + let span = path.span(); + let id = self.resolve_path(path)?; + + if let Some(function) = TryFromModuleDefId::try_from(id) { + return Ok(self.interner.function_definition_id(function)); + } + + if let Some(global) = TryFromModuleDefId::try_from(id) { + let global = self.interner.get_global(global); + return Ok(global.definition_id); + } + + let expected = "global variable".into(); + let got = "local variable".into(); + Err(ResolverError::Expected { span, expected, got }) + } + + pub fn push_scope(&mut self) { + self.scopes.start_scope(); + } + + pub fn pop_scope(&mut self) { + let scope = self.scopes.end_scope(); + self.check_for_unused_variables_in_scope_tree(scope.into()); + } + + pub fn check_for_unused_variables_in_scope_tree(&mut self, scope_decls: ScopeTree) { + let mut unused_vars = Vec::new(); + for scope in scope_decls.0.into_iter() { + Self::check_for_unused_variables_in_local_scope(scope, &mut unused_vars); + } + + for unused_var in unused_vars.iter() { + if let Some(definition_info) = self.interner.try_definition(unused_var.id) { + let name = &definition_info.name; + if name != ERROR_IDENT && !definition_info.is_global() { + let ident = Ident(Spanned::from(unused_var.location.span, name.to_owned())); + self.push_err(ResolverError::UnusedVariable { ident }); + } + } + } + } + + fn check_for_unused_variables_in_local_scope(decl_map: Scope, unused_vars: &mut Vec) { + let unused_variables = decl_map.filter(|(variable_name, metadata)| { + let has_underscore_prefix = variable_name.starts_with('_'); // XXX: This is used for development mode, and will be removed + metadata.warn_if_unused && metadata.num_times_used == 0 && !has_underscore_prefix + }); + unused_vars.extend(unused_variables.map(|(_, meta)| meta.ident.clone())); + } + + /// Lookup a given trait by name/path. + pub fn lookup_trait_or_error(&mut self, path: Path) -> Option<&mut Trait> { + match self.lookup(path) { + Ok(trait_id) => Some(self.get_trait_mut(trait_id)), + Err(error) => { + self.push_err(error); + None + } + } + } + + /// Lookup a given struct type by name. + pub fn lookup_struct_or_error(&mut self, path: Path) -> Option> { + match self.lookup(path) { + Ok(struct_id) => Some(self.get_struct(struct_id)), + Err(error) => { + self.push_err(error); + None + } + } + } + + /// Looks up a given type by name. + /// This will also instantiate any struct types found. + pub(super) fn lookup_type_or_error(&mut self, path: Path) -> Option { + let ident = path.as_ident(); + if ident.map_or(false, |i| i == SELF_TYPE_NAME) { + if let Some(typ) = &self.self_type { + return Some(typ.clone()); + } + } + + match self.lookup(path) { + Ok(struct_id) => { + let struct_type = self.get_struct(struct_id); + let generics = struct_type.borrow().instantiate(self.interner); + Some(Type::Struct(struct_type, generics)) + } + Err(error) => { + self.push_err(error); + None + } + } + } + + pub fn lookup_type_alias(&mut self, path: Path) -> Option> { + self.lookup(path).ok().map(|id| self.interner.get_type_alias(id)) + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs new file mode 100644 index 00000000000..a7a2df4041e --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -0,0 +1,409 @@ +use noirc_errors::{Location, Span}; + +use crate::{ + ast::{AssignStatement, ConstrainStatement, LValue}, + hir::{ + resolution::errors::ResolverError, + type_check::{Source, TypeCheckError}, + }, + hir_def::{ + expr::HirIdent, + stmt::{ + HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, + }, + }, + macros_api::{ + ForLoopStatement, ForRange, HirStatement, LetStatement, Statement, StatementKind, + }, + node_interner::{DefinitionId, DefinitionKind, StmtId}, + Type, +}; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + fn elaborate_statement_value(&mut self, statement: Statement) -> (HirStatement, Type) { + match statement.kind { + StatementKind::Let(let_stmt) => self.elaborate_let(let_stmt), + StatementKind::Constrain(constrain) => self.elaborate_constrain(constrain), + StatementKind::Assign(assign) => self.elaborate_assign(assign), + StatementKind::For(for_stmt) => self.elaborate_for(for_stmt), + StatementKind::Break => self.elaborate_jump(true, statement.span), + StatementKind::Continue => self.elaborate_jump(false, statement.span), + StatementKind::Comptime(statement) => self.elaborate_comptime(*statement), + StatementKind::Expression(expr) => { + let (expr, typ) = self.elaborate_expression(expr); + (HirStatement::Expression(expr), typ) + } + StatementKind::Semi(expr) => { + let (expr, _typ) = self.elaborate_expression(expr); + (HirStatement::Semi(expr), Type::Unit) + } + StatementKind::Error => (HirStatement::Error, Type::Error), + } + } + + pub(super) fn elaborate_statement(&mut self, statement: Statement) -> (StmtId, Type) { + let span = statement.span; + let (hir_statement, typ) = self.elaborate_statement_value(statement); + let id = self.interner.push_stmt(hir_statement); + self.interner.push_stmt_location(id, span, self.file); + (id, typ) + } + + pub(super) fn elaborate_let(&mut self, let_stmt: LetStatement) -> (HirStatement, Type) { + let expr_span = let_stmt.expression.span; + let (expression, expr_type) = self.elaborate_expression(let_stmt.expression); + let definition = DefinitionKind::Local(Some(expression)); + let annotated_type = self.resolve_type(let_stmt.r#type); + + // First check if the LHS is unspecified + // If so, then we give it the same type as the expression + let r#type = if annotated_type != Type::Error { + // Now check if LHS is the same type as the RHS + // Importantly, we do not coerce any types implicitly + self.unify_with_coercions(&expr_type, &annotated_type, expression, || { + TypeCheckError::TypeMismatch { + expected_typ: annotated_type.to_string(), + expr_typ: expr_type.to_string(), + expr_span, + } + }); + if annotated_type.is_unsigned() { + self.lint_overflowing_uint(&expression, &annotated_type); + } + annotated_type + } else { + expr_type + }; + + let let_ = HirLetStatement { + pattern: self.elaborate_pattern(let_stmt.pattern, r#type.clone(), definition), + r#type, + expression, + attributes: let_stmt.attributes, + comptime: let_stmt.comptime, + }; + (HirStatement::Let(let_), Type::Unit) + } + + pub(super) fn elaborate_constrain(&mut self, stmt: ConstrainStatement) -> (HirStatement, Type) { + let expr_span = stmt.0.span; + let (expr_id, expr_type) = self.elaborate_expression(stmt.0); + + // Must type check the assertion message expression so that we instantiate bindings + let msg = stmt.1.map(|assert_msg_expr| self.elaborate_expression(assert_msg_expr).0); + + self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { + expr_typ: expr_type.to_string(), + expected_typ: Type::Bool.to_string(), + expr_span, + }); + + (HirStatement::Constrain(HirConstrainStatement(expr_id, self.file, msg)), Type::Unit) + } + + pub(super) fn elaborate_assign(&mut self, assign: AssignStatement) -> (HirStatement, Type) { + let span = assign.expression.span; + let (expression, expr_type) = self.elaborate_expression(assign.expression); + let (lvalue, lvalue_type, mutable) = self.elaborate_lvalue(assign.lvalue, span); + + if !mutable { + let (name, span) = self.get_lvalue_name_and_span(&lvalue); + self.push_err(TypeCheckError::VariableMustBeMutable { name, span }); + } + + self.unify_with_coercions(&expr_type, &lvalue_type, expression, || { + TypeCheckError::TypeMismatchWithSource { + actual: expr_type.clone(), + expected: lvalue_type.clone(), + span, + source: Source::Assignment, + } + }); + + let stmt = HirAssignStatement { lvalue, expression }; + (HirStatement::Assign(stmt), Type::Unit) + } + + pub(super) fn elaborate_for(&mut self, for_loop: ForLoopStatement) -> (HirStatement, Type) { + let (start, end) = match for_loop.range { + ForRange::Range(start, end) => (start, end), + ForRange::Array(_) => { + let for_stmt = + for_loop.range.into_for(for_loop.identifier, for_loop.block, for_loop.span); + + return self.elaborate_statement_value(for_stmt); + } + }; + + let start_span = start.span; + let end_span = end.span; + + let (start_range, start_range_type) = self.elaborate_expression(start); + let (end_range, end_range_type) = self.elaborate_expression(end); + let (identifier, block) = (for_loop.identifier, for_loop.block); + + self.nested_loops += 1; + self.push_scope(); + + // TODO: For loop variables are currently mutable by default since we haven't + // yet implemented syntax for them to be optionally mutable. + let kind = DefinitionKind::Local(None); + let identifier = self.add_variable_decl(identifier, false, true, kind); + + // Check that start range and end range have the same types + let range_span = start_span.merge(end_span); + self.unify(&start_range_type, &end_range_type, || TypeCheckError::TypeMismatch { + expected_typ: start_range_type.to_string(), + expr_typ: end_range_type.to_string(), + expr_span: range_span, + }); + + let expected_type = self.polymorphic_integer(); + + self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { + typ: start_range_type.clone(), + place: "for loop", + span: range_span, + }); + + self.interner.push_definition_type(identifier.id, start_range_type); + + let (block, _block_type) = self.elaborate_expression(block); + + self.pop_scope(); + self.nested_loops -= 1; + + let statement = + HirStatement::For(HirForStatement { start_range, end_range, block, identifier }); + + (statement, Type::Unit) + } + + fn elaborate_jump(&mut self, is_break: bool, span: noirc_errors::Span) -> (HirStatement, Type) { + if !self.in_unconstrained_fn { + self.push_err(ResolverError::JumpInConstrainedFn { is_break, span }); + } + if self.nested_loops == 0 { + self.push_err(ResolverError::JumpOutsideLoop { is_break, span }); + } + + let expr = if is_break { HirStatement::Break } else { HirStatement::Continue }; + (expr, self.interner.next_type_variable()) + } + + fn get_lvalue_name_and_span(&self, lvalue: &HirLValue) -> (String, Span) { + match lvalue { + HirLValue::Ident(name, _) => { + let span = name.location.span; + + if let Some(definition) = self.interner.try_definition(name.id) { + (definition.name.clone(), span) + } else { + ("(undeclared variable)".into(), span) + } + } + HirLValue::MemberAccess { object, .. } => self.get_lvalue_name_and_span(object), + HirLValue::Index { array, .. } => self.get_lvalue_name_and_span(array), + HirLValue::Dereference { lvalue, .. } => self.get_lvalue_name_and_span(lvalue), + } + } + + fn elaborate_lvalue(&mut self, lvalue: LValue, assign_span: Span) -> (HirLValue, Type, bool) { + match lvalue { + LValue::Ident(ident) => { + let mut mutable = true; + let (ident, scope_index) = self.find_variable_or_default(&ident); + self.resolve_local_variable(ident.clone(), scope_index); + + let typ = if ident.id == DefinitionId::dummy_id() { + Type::Error + } else { + if let Some(definition) = self.interner.try_definition(ident.id) { + mutable = definition.mutable; + } + + let typ = self.interner.definition_type(ident.id).instantiate(self.interner).0; + typ.follow_bindings() + }; + + (HirLValue::Ident(ident.clone(), typ.clone()), typ, mutable) + } + LValue::MemberAccess { object, field_name, span } => { + let (object, lhs_type, mut mutable) = self.elaborate_lvalue(*object, assign_span); + let mut object = Box::new(object); + let field_name = field_name.clone(); + + let object_ref = &mut object; + let mutable_ref = &mut mutable; + let location = Location::new(span, self.file); + + let dereference_lhs = move |_: &mut Self, _, element_type| { + // We must create a temporary value first to move out of object_ref before + // we eventually reassign to it. + let id = DefinitionId::dummy_id(); + let ident = HirIdent::non_trait_method(id, location); + let tmp_value = HirLValue::Ident(ident, Type::Error); + + let lvalue = std::mem::replace(object_ref, Box::new(tmp_value)); + *object_ref = + Box::new(HirLValue::Dereference { lvalue, element_type, location }); + *mutable_ref = true; + }; + + let name = &field_name.0.contents; + let (object_type, field_index) = self + .check_field_access(&lhs_type, name, field_name.span(), Some(dereference_lhs)) + .unwrap_or((Type::Error, 0)); + + let field_index = Some(field_index); + let typ = object_type.clone(); + let lvalue = + HirLValue::MemberAccess { object, field_name, field_index, typ, location }; + (lvalue, object_type, mutable) + } + LValue::Index { array, index, span } => { + let expr_span = index.span; + let (index, index_type) = self.elaborate_expression(index); + let location = Location::new(span, self.file); + + let expected = self.polymorphic_integer_or_field(); + self.unify(&index_type, &expected, || TypeCheckError::TypeMismatch { + expected_typ: "an integer".to_owned(), + expr_typ: index_type.to_string(), + expr_span, + }); + + let (mut lvalue, mut lvalue_type, mut mutable) = + self.elaborate_lvalue(*array, assign_span); + + // Before we check that the lvalue is an array, try to dereference it as many times + // as needed to unwrap any &mut wrappers. + while let Type::MutableReference(element) = lvalue_type.follow_bindings() { + let element_type = element.as_ref().clone(); + lvalue = + HirLValue::Dereference { lvalue: Box::new(lvalue), element_type, location }; + lvalue_type = *element; + // We know this value to be mutable now since we found an `&mut` + mutable = true; + } + + let typ = match lvalue_type.follow_bindings() { + Type::Array(_, elem_type) => *elem_type, + Type::Slice(elem_type) => *elem_type, + Type::Error => Type::Error, + Type::String(_) => { + let (_lvalue_name, lvalue_span) = self.get_lvalue_name_and_span(&lvalue); + self.push_err(TypeCheckError::StringIndexAssign { span: lvalue_span }); + Type::Error + } + other => { + // TODO: Need a better span here + self.push_err(TypeCheckError::TypeMismatch { + expected_typ: "array".to_string(), + expr_typ: other.to_string(), + expr_span: assign_span, + }); + Type::Error + } + }; + + let array = Box::new(lvalue); + let array_type = typ.clone(); + (HirLValue::Index { array, index, typ, location }, array_type, mutable) + } + LValue::Dereference(lvalue, span) => { + let (lvalue, reference_type, _) = self.elaborate_lvalue(*lvalue, assign_span); + let lvalue = Box::new(lvalue); + let location = Location::new(span, self.file); + + let element_type = Type::type_variable(self.interner.next_type_variable_id()); + let expected_type = Type::MutableReference(Box::new(element_type.clone())); + + self.unify(&reference_type, &expected_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_type.to_string(), + expr_typ: reference_type.to_string(), + expr_span: assign_span, + }); + + // Dereferences are always mutable since we already type checked against a &mut T + let typ = element_type.clone(); + let lvalue = HirLValue::Dereference { lvalue, element_type, location }; + (lvalue, typ, true) + } + } + } + + /// Type checks a field access, adding dereference operators as necessary + pub(super) fn check_field_access( + &mut self, + lhs_type: &Type, + field_name: &str, + span: Span, + dereference_lhs: Option, + ) -> Option<(Type, usize)> { + let lhs_type = lhs_type.follow_bindings(); + + match &lhs_type { + Type::Struct(s, args) => { + let s = s.borrow(); + if let Some((field, index)) = s.get_field(field_name, args) { + return Some((field, index)); + } + } + Type::Tuple(elements) => { + if let Ok(index) = field_name.parse::() { + let length = elements.len(); + if index < length { + return Some((elements[index].clone(), index)); + } else { + self.push_err(TypeCheckError::TupleIndexOutOfBounds { + index, + lhs_type, + length, + span, + }); + return None; + } + } + } + // If the lhs is a mutable reference we automatically transform + // lhs.field into (*lhs).field + Type::MutableReference(element) => { + if let Some(mut dereference_lhs) = dereference_lhs { + dereference_lhs(self, lhs_type.clone(), element.as_ref().clone()); + return self.check_field_access( + element, + field_name, + span, + Some(dereference_lhs), + ); + } else { + let (element, index) = + self.check_field_access(element, field_name, span, dereference_lhs)?; + return Some((Type::MutableReference(Box::new(element)), index)); + } + } + _ => (), + } + + // If we get here the type has no field named 'access.rhs'. + // Now we specialize the error message based on whether we know the object type in question yet. + if let Type::TypeVariable(..) = &lhs_type { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + } else if lhs_type != Type::Error { + self.push_err(TypeCheckError::AccessUnknownMember { + lhs_type, + field_name: field_name.to_string(), + span, + }); + } + + None + } + + pub(super) fn elaborate_comptime(&self, _statement: Statement) -> (HirStatement, Type) { + todo!("Comptime scanning") + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs new file mode 100644 index 00000000000..4c8364b6dda --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -0,0 +1,1438 @@ +use std::rc::Rc; + +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; + +use crate::{ + ast::{BinaryOpKind, IntegerBitSize, UnresolvedTraitConstraint, UnresolvedTypeExpression}, + hir::{ + def_map::ModuleDefId, + resolution::{ + errors::ResolverError, + import::PathResolution, + resolver::{verify_mutable_reference, SELF_TYPE_NAME}, + }, + type_check::{Source, TypeCheckError}, + }, + hir_def::{ + expr::{ + HirBinaryOp, HirCallExpression, HirIdent, HirMemberAccess, HirMethodReference, + HirPrefixExpression, + }, + function::FuncMeta, + traits::{Trait, TraitConstraint}, + }, + macros_api::{ + HirExpression, HirLiteral, HirStatement, Path, PathKind, SecondaryAttribute, Signedness, + UnaryOp, UnresolvedType, UnresolvedTypeData, + }, + node_interner::{DefinitionKind, ExprId, GlobalId, TraitId, TraitImplKind, TraitMethodId}, + Generics, Shared, StructType, Type, TypeAlias, TypeBinding, TypeVariable, TypeVariableKind, +}; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + /// Translates an UnresolvedType to a Type + pub(super) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { + let span = typ.span; + let resolved_type = self.resolve_type_inner(typ, &mut vec![]); + if resolved_type.is_nested_slice() { + self.push_err(ResolverError::NestedSlices { span: span.unwrap() }); + } + + resolved_type + } + + /// Translates an UnresolvedType into a Type and appends any + /// freshly created TypeVariables created to new_variables. + pub fn resolve_type_inner( + &mut self, + typ: UnresolvedType, + new_variables: &mut Generics, + ) -> Type { + use crate::ast::UnresolvedTypeData::*; + + let resolved_type = match typ.typ { + FieldElement => Type::FieldElement, + Array(size, elem) => { + let elem = Box::new(self.resolve_type_inner(*elem, new_variables)); + let size = self.resolve_array_size(Some(size), new_variables); + Type::Array(Box::new(size), elem) + } + Slice(elem) => { + let elem = Box::new(self.resolve_type_inner(*elem, new_variables)); + Type::Slice(elem) + } + Expression(expr) => self.convert_expression_type(expr), + Integer(sign, bits) => Type::Integer(sign, bits), + Bool => Type::Bool, + String(size) => { + let resolved_size = self.resolve_array_size(size, new_variables); + Type::String(Box::new(resolved_size)) + } + FormatString(size, fields) => { + let resolved_size = self.convert_expression_type(size); + let fields = self.resolve_type_inner(*fields, new_variables); + Type::FmtString(Box::new(resolved_size), Box::new(fields)) + } + Code => Type::Code, + Unit => Type::Unit, + Unspecified => Type::Error, + Error => Type::Error, + Named(path, args, _) => self.resolve_named_type(path, args, new_variables), + TraitAsType(path, args) => self.resolve_trait_as_type(path, args, new_variables), + + Tuple(fields) => { + Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field, new_variables))) + } + Function(args, ret, env) => { + let args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + let ret = Box::new(self.resolve_type_inner(*ret, new_variables)); + + // expect() here is valid, because the only places we don't have a span are omitted types + // e.g. a function without return type implicitly has a spanless UnresolvedType::Unit return type + // To get an invalid env type, the user must explicitly specify the type, which will have a span + let env_span = + env.span.expect("Unexpected missing span for closure environment type"); + + let env = Box::new(self.resolve_type_inner(*env, new_variables)); + + match *env { + Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _) => { + Type::Function(args, ret, env) + } + _ => { + self.push_err(ResolverError::InvalidClosureEnvironment { + typ: *env, + span: env_span, + }); + Type::Error + } + } + } + MutableReference(element) => { + Type::MutableReference(Box::new(self.resolve_type_inner(*element, new_variables))) + } + Parenthesized(typ) => self.resolve_type_inner(*typ, new_variables), + }; + + if let Type::Struct(_, _) = resolved_type { + if let Some(unresolved_span) = typ.span { + // Record the location of the type reference + self.interner.push_type_ref_location( + resolved_type.clone(), + Location::new(unresolved_span, self.file), + ); + } + } + resolved_type + } + + pub fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { + self.generics.iter().find(|(name, _, _)| name.as_ref() == target_name) + } + + fn resolve_named_type( + &mut self, + path: Path, + args: Vec, + new_variables: &mut Generics, + ) -> Type { + if args.is_empty() { + if let Some(typ) = self.lookup_generic_or_global_type(&path) { + return typ; + } + } + + // Check if the path is a type variable first. We currently disallow generics on type + // variables since we do not support higher-kinded types. + if path.segments.len() == 1 { + let name = &path.last_segment().0.contents; + + if name == SELF_TYPE_NAME { + if let Some(self_type) = self.self_type.clone() { + if !args.is_empty() { + self.push_err(ResolverError::GenericsOnSelfType { span: path.span() }); + } + return self_type; + } + } + } + + let span = path.span(); + let mut args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + + if let Some(type_alias) = self.lookup_type_alias(path.clone()) { + let type_alias = type_alias.borrow(); + let expected_generic_count = type_alias.generics.len(); + let type_alias_string = type_alias.to_string(); + let id = type_alias.id; + + self.verify_generics_count(expected_generic_count, &mut args, span, || { + type_alias_string + }); + + if let Some(item) = self.current_item { + self.interner.add_type_alias_dependency(item, id); + } + + // Collecting Type Alias references [Location]s to be used by LSP in order + // to resolve the definition of the type alias + self.interner.add_type_alias_ref(id, Location::new(span, self.file)); + + // Because there is no ordering to when type aliases (and other globals) are resolved, + // it is possible for one to refer to an Error type and issue no error if it is set + // equal to another type alias. Fixing this fully requires an analysis to create a DFG + // of definition ordering, but for now we have an explicit check here so that we at + // least issue an error that the type was not found instead of silently passing. + let alias = self.interner.get_type_alias(id); + return Type::Alias(alias, args); + } + + match self.lookup_struct_or_error(path) { + Some(struct_type) => { + if self.resolving_ids.contains(&struct_type.borrow().id) { + self.push_err(ResolverError::SelfReferentialStruct { + span: struct_type.borrow().name.span(), + }); + + return Type::Error; + } + + let expected_generic_count = struct_type.borrow().generics.len(); + if !self.in_contract + && self + .interner + .struct_attributes(&struct_type.borrow().id) + .iter() + .any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) + { + self.push_err(ResolverError::AbiAttributeOutsideContract { + span: struct_type.borrow().name.span(), + }); + } + self.verify_generics_count(expected_generic_count, &mut args, span, || { + struct_type.borrow().to_string() + }); + + if let Some(current_item) = self.current_item { + let dependency_id = struct_type.borrow().id; + self.interner.add_type_dependency(current_item, dependency_id); + } + + Type::Struct(struct_type, args) + } + None => Type::Error, + } + } + + fn resolve_trait_as_type( + &mut self, + path: Path, + args: Vec, + new_variables: &mut Generics, + ) -> Type { + let args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + + if let Some(t) = self.lookup_trait_or_error(path) { + Type::TraitAsType(t.id, Rc::new(t.name.to_string()), args) + } else { + Type::Error + } + } + + fn verify_generics_count( + &mut self, + expected_count: usize, + args: &mut Vec, + span: Span, + type_name: impl FnOnce() -> String, + ) { + if args.len() != expected_count { + self.push_err(ResolverError::IncorrectGenericCount { + span, + item_name: type_name(), + actual: args.len(), + expected: expected_count, + }); + + // Fix the generic count so we can continue typechecking + args.resize_with(expected_count, || Type::Error); + } + } + + pub fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { + if path.segments.len() == 1 { + let name = &path.last_segment().0.contents; + if let Some((name, var, _)) = self.find_generic(name) { + return Some(Type::NamedGeneric(var.clone(), name.clone())); + } + } + + // If we cannot find a local generic of the same name, try to look up a global + match self.resolve_path(path.clone()) { + Ok(ModuleDefId::GlobalId(id)) => { + if let Some(current_item) = self.current_item { + self.interner.add_global_dependency(current_item, id); + } + + Some(Type::Constant(self.eval_global_as_array_length(id, path))) + } + _ => None, + } + } + + fn resolve_array_size( + &mut self, + length: Option, + new_variables: &mut Generics, + ) -> Type { + match length { + None => { + let id = self.interner.next_type_variable_id(); + let typevar = TypeVariable::unbound(id); + new_variables.push(typevar.clone()); + + // 'Named'Generic is a bit of a misnomer here, we want a type variable that + // wont be bound over but this one has no name since we do not currently + // require users to explicitly be generic over array lengths. + Type::NamedGeneric(typevar, Rc::new("".into())) + } + Some(length) => self.convert_expression_type(length), + } + } + + pub(super) fn convert_expression_type(&mut self, length: UnresolvedTypeExpression) -> Type { + match length { + UnresolvedTypeExpression::Variable(path) => { + self.lookup_generic_or_global_type(&path).unwrap_or_else(|| { + self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); + Type::Constant(0) + }) + } + UnresolvedTypeExpression::Constant(int, _) => Type::Constant(int), + UnresolvedTypeExpression::BinaryOperation(lhs, op, rhs, _) => { + let (lhs_span, rhs_span) = (lhs.span(), rhs.span()); + let lhs = self.convert_expression_type(*lhs); + let rhs = self.convert_expression_type(*rhs); + + match (lhs, rhs) { + (Type::Constant(lhs), Type::Constant(rhs)) => { + Type::Constant(op.function()(lhs, rhs)) + } + (lhs, _) => { + let span = + if !matches!(lhs, Type::Constant(_)) { lhs_span } else { rhs_span }; + self.push_err(ResolverError::InvalidArrayLengthExpr { span }); + Type::Constant(0) + } + } + } + } + } + + // this resolves Self::some_static_method, inside an impl block (where we don't have a concrete self_type) + // + // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_static_method_by_self( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + let trait_id = self.trait_id?; + + if path.kind == PathKind::Plain && path.segments.len() == 2 { + let name = &path.segments[0].0.contents; + let method = &path.segments[1]; + + if name == SELF_TYPE_NAME { + let the_trait = self.interner.get_trait(trait_id); + let method = the_trait.find_method(method.0.contents.as_str())?; + + let constraint = TraitConstraint { + typ: self.self_type.clone()?, + trait_generics: Type::from_generics(&the_trait.generics), + trait_id, + }; + return Some((method, constraint, false)); + } + } + None + } + + // this resolves TraitName::some_static_method + // + // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_static_method( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + if path.kind == PathKind::Plain && path.segments.len() == 2 { + let method = &path.segments[1]; + + let mut trait_path = path.clone(); + trait_path.pop(); + let trait_id = self.lookup(trait_path).ok()?; + let the_trait = self.interner.get_trait(trait_id); + + let method = the_trait.find_method(method.0.contents.as_str())?; + let constraint = TraitConstraint { + typ: Type::TypeVariable( + the_trait.self_type_typevar.clone(), + TypeVariableKind::Normal, + ), + trait_generics: Type::from_generics(&the_trait.generics), + trait_id, + }; + return Some((method, constraint, false)); + } + None + } + + // This resolves a static trait method T::trait_method by iterating over the where clause + // + // Returns the trait method, trait constraint, and whether the impl is assumed from a where + // clause. This is always true since this helper searches where clauses for a generic constraint. + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_method_by_named_generic( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + if path.segments.len() != 2 { + return None; + } + + for UnresolvedTraitConstraint { typ, trait_bound } in self.trait_bounds.clone() { + if let UnresolvedTypeData::Named(constraint_path, _, _) = &typ.typ { + // if `path` is `T::method_name`, we're looking for constraint of the form `T: SomeTrait` + if constraint_path.segments.len() == 1 + && path.segments[0] != constraint_path.last_segment() + { + continue; + } + + if let Ok(ModuleDefId::TraitId(trait_id)) = + self.resolve_path(trait_bound.trait_path.clone()) + { + let the_trait = self.interner.get_trait(trait_id); + if let Some(method) = + the_trait.find_method(path.segments.last().unwrap().0.contents.as_str()) + { + let constraint = TraitConstraint { + trait_id, + typ: self.resolve_type(typ.clone()), + trait_generics: vecmap(trait_bound.trait_generics, |typ| { + self.resolve_type(typ) + }), + }; + return Some((method, constraint, true)); + } + } + } + } + None + } + + // Try to resolve the given trait method path. + // + // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + pub(super) fn resolve_trait_generic_path( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + self.resolve_trait_static_method_by_self(path) + .or_else(|| self.resolve_trait_static_method(path)) + .or_else(|| self.resolve_trait_method_by_named_generic(path)) + } + + fn eval_global_as_array_length(&mut self, global: GlobalId, path: &Path) -> u64 { + let Some(stmt) = self.interner.get_global_let_statement(global) else { + let path = path.clone(); + self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); + return 0; + }; + + let length = stmt.expression; + let span = self.interner.expr_span(&length); + let result = self.try_eval_array_length_id(length, span); + + match result.map(|length| length.try_into()) { + Ok(Ok(length_value)) => return length_value, + Ok(Err(_cast_err)) => self.push_err(ResolverError::IntegerTooLarge { span }), + Err(Some(error)) => self.push_err(error), + Err(None) => (), + } + 0 + } + + fn try_eval_array_length_id( + &self, + rhs: ExprId, + span: Span, + ) -> Result> { + // Arbitrary amount of recursive calls to try before giving up + let fuel = 100; + self.try_eval_array_length_id_with_fuel(rhs, span, fuel) + } + + fn try_eval_array_length_id_with_fuel( + &self, + rhs: ExprId, + span: Span, + fuel: u32, + ) -> Result> { + if fuel == 0 { + // If we reach here, it is likely from evaluating cyclic globals. We expect an error to + // be issued for them after name resolution so issue no error now. + return Err(None); + } + + match self.interner.expression(&rhs) { + HirExpression::Literal(HirLiteral::Integer(int, false)) => { + int.try_into_u128().ok_or(Some(ResolverError::IntegerTooLarge { span })) + } + HirExpression::Ident(ident) => { + let definition = self.interner.definition(ident.id); + match definition.kind { + DefinitionKind::Global(global_id) => { + let let_statement = self.interner.get_global_let_statement(global_id); + if let Some(let_statement) = let_statement { + let expression = let_statement.expression; + self.try_eval_array_length_id_with_fuel(expression, span, fuel - 1) + } else { + Err(Some(ResolverError::InvalidArrayLengthExpr { span })) + } + } + _ => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), + } + } + HirExpression::Infix(infix) => { + let lhs = self.try_eval_array_length_id_with_fuel(infix.lhs, span, fuel - 1)?; + let rhs = self.try_eval_array_length_id_with_fuel(infix.rhs, span, fuel - 1)?; + + match infix.operator.kind { + BinaryOpKind::Add => Ok(lhs + rhs), + BinaryOpKind::Subtract => Ok(lhs - rhs), + BinaryOpKind::Multiply => Ok(lhs * rhs), + BinaryOpKind::Divide => Ok(lhs / rhs), + BinaryOpKind::Equal => Ok((lhs == rhs) as u128), + BinaryOpKind::NotEqual => Ok((lhs != rhs) as u128), + BinaryOpKind::Less => Ok((lhs < rhs) as u128), + BinaryOpKind::LessEqual => Ok((lhs <= rhs) as u128), + BinaryOpKind::Greater => Ok((lhs > rhs) as u128), + BinaryOpKind::GreaterEqual => Ok((lhs >= rhs) as u128), + BinaryOpKind::And => Ok(lhs & rhs), + BinaryOpKind::Or => Ok(lhs | rhs), + BinaryOpKind::Xor => Ok(lhs ^ rhs), + BinaryOpKind::ShiftRight => Ok(lhs >> rhs), + BinaryOpKind::ShiftLeft => Ok(lhs << rhs), + BinaryOpKind::Modulo => Ok(lhs % rhs), + } + } + _other => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), + } + } + + /// Check if an assignment is overflowing with respect to `annotated_type` + /// in a declaration statement where `annotated_type` is an unsigned integer + pub(super) fn lint_overflowing_uint(&mut self, rhs_expr: &ExprId, annotated_type: &Type) { + let expr = self.interner.expression(rhs_expr); + let span = self.interner.expr_span(rhs_expr); + match expr { + HirExpression::Literal(HirLiteral::Integer(value, false)) => { + let v = value.to_u128(); + if let Type::Integer(_, bit_count) = annotated_type { + let bit_count: u32 = (*bit_count).into(); + let max = 1 << bit_count; + if v >= max { + self.push_err(TypeCheckError::OverflowingAssignment { + expr: value, + ty: annotated_type.clone(), + range: format!("0..={}", max - 1), + span, + }); + }; + }; + } + HirExpression::Prefix(expr) => { + self.lint_overflowing_uint(&expr.rhs, annotated_type); + if matches!(expr.operator, UnaryOp::Minus) { + self.push_err(TypeCheckError::InvalidUnaryOp { + kind: "annotated_type".to_string(), + span, + }); + } + } + HirExpression::Infix(expr) => { + self.lint_overflowing_uint(&expr.lhs, annotated_type); + self.lint_overflowing_uint(&expr.rhs, annotated_type); + } + _ => {} + } + } + + pub(super) fn unify( + &mut self, + actual: &Type, + expected: &Type, + make_error: impl FnOnce() -> TypeCheckError, + ) { + let mut errors = Vec::new(); + actual.unify(expected, &mut errors, make_error); + self.errors.extend(errors.into_iter().map(|error| (error.into(), self.file))); + } + + /// Wrapper of Type::unify_with_coercions using self.errors + pub(super) fn unify_with_coercions( + &mut self, + actual: &Type, + expected: &Type, + expression: ExprId, + make_error: impl FnOnce() -> TypeCheckError, + ) { + let mut errors = Vec::new(); + actual.unify_with_coercions(expected, expression, self.interner, &mut errors, make_error); + self.errors.extend(errors.into_iter().map(|error| (error.into(), self.file))); + } + + /// Return a fresh integer or field type variable and log it + /// in self.type_variables to default it later. + pub(super) fn polymorphic_integer_or_field(&mut self) -> Type { + let typ = Type::polymorphic_integer_or_field(self.interner); + self.type_variables.push(typ.clone()); + typ + } + + /// Return a fresh integer type variable and log it + /// in self.type_variables to default it later. + pub(super) fn polymorphic_integer(&mut self) -> Type { + let typ = Type::polymorphic_integer(self.interner); + self.type_variables.push(typ.clone()); + typ + } + + /// Translates a (possibly Unspecified) UnresolvedType to a Type. + /// Any UnresolvedType::Unspecified encountered are replaced with fresh type variables. + pub(super) fn resolve_inferred_type(&mut self, typ: UnresolvedType) -> Type { + match &typ.typ { + UnresolvedTypeData::Unspecified => self.interner.next_type_variable(), + _ => self.resolve_type_inner(typ, &mut vec![]), + } + } + + pub(super) fn type_check_prefix_operand( + &mut self, + op: &crate::ast::UnaryOp, + rhs_type: &Type, + span: Span, + ) -> Type { + let mut unify = |this: &mut Self, expected| { + this.unify(rhs_type, &expected, || TypeCheckError::TypeMismatch { + expr_typ: rhs_type.to_string(), + expected_typ: expected.to_string(), + expr_span: span, + }); + expected + }; + + match op { + crate::ast::UnaryOp::Minus => { + if rhs_type.is_unsigned() { + self.push_err(TypeCheckError::InvalidUnaryOp { + kind: rhs_type.to_string(), + span, + }); + } + let expected = self.polymorphic_integer_or_field(); + self.unify(rhs_type, &expected, || TypeCheckError::InvalidUnaryOp { + kind: rhs_type.to_string(), + span, + }); + expected + } + crate::ast::UnaryOp::Not => { + let rhs_type = rhs_type.follow_bindings(); + + // `!` can work on booleans or integers + if matches!(rhs_type, Type::Integer(..)) { + return rhs_type; + } + + unify(self, Type::Bool) + } + crate::ast::UnaryOp::MutableReference => { + Type::MutableReference(Box::new(rhs_type.follow_bindings())) + } + crate::ast::UnaryOp::Dereference { implicitly_added: _ } => { + let element_type = self.interner.next_type_variable(); + unify(self, Type::MutableReference(Box::new(element_type.clone()))); + element_type + } + } + } + + /// Insert as many dereference operations as necessary to automatically dereference a method + /// call object to its base value type T. + pub(super) fn insert_auto_dereferences(&mut self, object: ExprId, typ: Type) -> (ExprId, Type) { + if let Type::MutableReference(element) = typ { + let location = self.interner.id_location(object); + + let object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { + operator: UnaryOp::Dereference { implicitly_added: true }, + rhs: object, + })); + self.interner.push_expr_type(object, element.as_ref().clone()); + self.interner.push_expr_location(object, location.span, location.file); + + // Recursively dereference to allow for converting &mut &mut T to T + self.insert_auto_dereferences(object, *element) + } else { + (object, typ) + } + } + + /// Given a method object: `(*foo).bar` of a method call `(*foo).bar.baz()`, remove the + /// implicitly added dereference operator if one is found. + /// + /// Returns Some(new_expr_id) if a dereference was removed and None otherwise. + fn try_remove_implicit_dereference(&mut self, object: ExprId) -> Option { + match self.interner.expression(&object) { + HirExpression::MemberAccess(mut access) => { + let new_lhs = self.try_remove_implicit_dereference(access.lhs)?; + access.lhs = new_lhs; + access.is_offset = true; + + // `object` will have a different type now, which will be filled in + // later when type checking the method call as a function call. + self.interner.replace_expr(&object, HirExpression::MemberAccess(access)); + Some(object) + } + HirExpression::Prefix(prefix) => match prefix.operator { + // Found a dereference we can remove. Now just replace it with its rhs to remove it. + UnaryOp::Dereference { implicitly_added: true } => Some(prefix.rhs), + _ => None, + }, + _ => None, + } + } + + fn bind_function_type_impl( + &mut self, + fn_params: &[Type], + fn_ret: &Type, + callsite_args: &[(Type, ExprId, Span)], + span: Span, + ) -> Type { + if fn_params.len() != callsite_args.len() { + self.push_err(TypeCheckError::ParameterCountMismatch { + expected: fn_params.len(), + found: callsite_args.len(), + span, + }); + return Type::Error; + } + + for (param, (arg, _, arg_span)) in fn_params.iter().zip(callsite_args) { + self.unify(arg, param, || TypeCheckError::TypeMismatch { + expected_typ: param.to_string(), + expr_typ: arg.to_string(), + expr_span: *arg_span, + }); + } + + fn_ret.clone() + } + + pub(super) fn bind_function_type( + &mut self, + function: Type, + args: Vec<(Type, ExprId, Span)>, + span: Span, + ) -> Type { + // Could do a single unification for the entire function type, but matching beforehand + // lets us issue a more precise error on the individual argument that fails to type check. + match function { + Type::TypeVariable(binding, TypeVariableKind::Normal) => { + if let TypeBinding::Bound(typ) = &*binding.borrow() { + return self.bind_function_type(typ.clone(), args, span); + } + + let ret = self.interner.next_type_variable(); + let args = vecmap(args, |(arg, _, _)| arg); + let env_type = self.interner.next_type_variable(); + let expected = Type::Function(args, Box::new(ret.clone()), Box::new(env_type)); + + if let Err(error) = binding.try_bind(expected, span) { + self.push_err(error); + } + ret + } + // The closure env is ignored on purpose: call arguments never place + // constraints on closure environments. + Type::Function(parameters, ret, _env) => { + self.bind_function_type_impl(¶meters, &ret, &args, span) + } + Type::Error => Type::Error, + found => { + self.push_err(TypeCheckError::ExpectedFunction { found, span }); + Type::Error + } + } + } + + pub(super) fn check_cast(&mut self, from: Type, to: &Type, span: Span) -> Type { + match from.follow_bindings() { + Type::Integer(..) + | Type::FieldElement + | Type::TypeVariable(_, TypeVariableKind::IntegerOrField) + | Type::TypeVariable(_, TypeVariableKind::Integer) + | Type::Bool => (), + + Type::TypeVariable(_, _) => { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + return Type::Error; + } + Type::Error => return Type::Error, + from => { + self.push_err(TypeCheckError::InvalidCast { from, span }); + return Type::Error; + } + } + + match to { + Type::Integer(sign, bits) => Type::Integer(*sign, *bits), + Type::FieldElement => Type::FieldElement, + Type::Bool => Type::Bool, + Type::Error => Type::Error, + _ => { + self.push_err(TypeCheckError::UnsupportedCast { span }); + Type::Error + } + } + } + + // Given a binary comparison operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. + fn comparator_operand_type_rules( + &mut self, + lhs_type: &Type, + rhs_type: &Type, + op: &HirBinaryOp, + span: Span, + ) -> Result<(Type, bool), TypeCheckError> { + use Type::*; + + match (lhs_type, rhs_type) { + // Avoid reporting errors multiple times + (Error, _) | (_, Error) => Ok((Bool, false)), + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + self.comparator_operand_type_rules(&alias, other, op, span) + } + + // Matches on TypeVariable must be first to follow any type + // bindings. + (TypeVariable(var, _), other) | (other, TypeVariable(var, _)) => { + if let TypeBinding::Bound(binding) = &*var.borrow() { + return self.comparator_operand_type_rules(other, binding, op, span); + } + + let use_impl = self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((Bool, use_impl)) + } + (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { + if sign_x != sign_y { + return Err(TypeCheckError::IntegerSignedness { + sign_x: *sign_x, + sign_y: *sign_y, + span, + }); + } + if bit_width_x != bit_width_y { + return Err(TypeCheckError::IntegerBitWidth { + bit_width_x: *bit_width_x, + bit_width_y: *bit_width_y, + span, + }); + } + Ok((Bool, false)) + } + (FieldElement, FieldElement) => { + if op.kind.is_valid_for_field_type() { + Ok((Bool, false)) + } else { + Err(TypeCheckError::FieldComparison { span }) + } + } + + // <= and friends are technically valid for booleans, just not very useful + (Bool, Bool) => Ok((Bool, false)), + + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((Bool, true)) + } + } + } + + /// Handles the TypeVariable case for checking binary operators. + /// Returns true if we should use the impl for the operator instead of the primitive + /// version of it. + fn bind_type_variables_for_infix( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) -> bool { + self.unify(lhs_type, rhs_type, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + + let use_impl = !lhs_type.is_numeric(); + + // If this operator isn't valid for fields we have to possibly narrow + // TypeVariableKind::IntegerOrField to TypeVariableKind::Integer. + // Doing so also ensures a type error if Field is used. + // The is_numeric check is to allow impls for custom types to bypass this. + if !op.kind.is_valid_for_field_type() && lhs_type.is_numeric() { + let target = Type::polymorphic_integer(self.interner); + + use crate::ast::BinaryOpKind::*; + use TypeCheckError::*; + self.unify(lhs_type, &target, || match op.kind { + Less | LessEqual | Greater | GreaterEqual => FieldComparison { span }, + And | Or | Xor | ShiftRight | ShiftLeft => FieldBitwiseOp { span }, + Modulo => FieldModulo { span }, + other => unreachable!("Operator {other:?} should be valid for Field"), + }); + } + + use_impl + } + + // Given a binary operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. + pub(super) fn infix_operand_type_rules( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) -> Result<(Type, bool), TypeCheckError> { + if op.kind.is_comparator() { + return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); + } + + use Type::*; + match (lhs_type, rhs_type) { + // An error type on either side will always return an error + (Error, _) | (_, Error) => Ok((Error, false)), + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + self.infix_operand_type_rules(&alias, op, other, span) + } + + // Matches on TypeVariable must be first so that we follow any type + // bindings. + (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { + if let TypeBinding::Bound(binding) = &*int.borrow() { + return self.infix_operand_type_rules(binding, op, other, span); + } + if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { + self.unify( + rhs_type, + &Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), + || TypeCheckError::InvalidShiftSize { span }, + ); + let use_impl = if lhs_type.is_numeric() { + let integer_type = Type::polymorphic_integer(self.interner); + self.bind_type_variables_for_infix(lhs_type, op, &integer_type, span) + } else { + true + }; + return Ok((lhs_type.clone(), use_impl)); + } + let use_impl = self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((other.clone(), use_impl)) + } + (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { + if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { + if *sign_y != Signedness::Unsigned || *bit_width_y != IntegerBitSize::Eight { + return Err(TypeCheckError::InvalidShiftSize { span }); + } + return Ok((Integer(*sign_x, *bit_width_x), false)); + } + if sign_x != sign_y { + return Err(TypeCheckError::IntegerSignedness { + sign_x: *sign_x, + sign_y: *sign_y, + span, + }); + } + if bit_width_x != bit_width_y { + return Err(TypeCheckError::IntegerBitWidth { + bit_width_x: *bit_width_x, + bit_width_y: *bit_width_y, + span, + }); + } + Ok((Integer(*sign_x, *bit_width_x), false)) + } + // The result of two Fields is always a witness + (FieldElement, FieldElement) => { + if !op.kind.is_valid_for_field_type() { + if op.kind == BinaryOpKind::Modulo { + return Err(TypeCheckError::FieldModulo { span }); + } else { + return Err(TypeCheckError::FieldBitwiseOp { span }); + } + } + Ok((FieldElement, false)) + } + + (Bool, Bool) => Ok((Bool, false)), + + (lhs, rhs) => { + if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { + if rhs == &Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight) { + return Ok((lhs.clone(), true)); + } + return Err(TypeCheckError::InvalidShiftSize { span }); + } + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((lhs.clone(), true)) + } + } + } + + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. + /// + /// Although by this point the operator is expected to already have a trait impl, + /// we still need to match the operator's type against the method's instantiated type + /// to ensure the instantiation bindings are correct and the monomorphizer can + /// re-apply the needed bindings. + pub(super) fn type_check_operator_method( + &mut self, + expr_id: ExprId, + trait_method_id: TraitMethodId, + object_type: &Type, + span: Span, + ) { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + + let method = &the_trait.methods[trait_method_id.method_index]; + let (method_type, mut bindings) = method.typ.clone().instantiate(self.interner); + + match method_type { + Type::Function(args, _, _) => { + // We can cheat a bit and match against only the object type here since no operator + // overload uses other generic parameters or return types aside from the object type. + let expected_object_type = &args[0]; + self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_object_type.to_string(), + expr_typ: object_type.to_string(), + expr_span: span, + }); + } + other => { + unreachable!("Expected operator method to have a function type, but found {other}") + } + } + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); + if let Some(TraitImplKind::Assumed { object_type, trait_generics }) = impl_kind { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner.select_impl_for_expression( + expr_id, + TraitImplKind::Assumed { object_type, trait_generics }, + ); + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + } + + pub(super) fn type_check_member_access( + &mut self, + mut access: HirMemberAccess, + expr_id: ExprId, + lhs_type: Type, + span: Span, + ) -> Type { + let access_lhs = &mut access.lhs; + + let dereference_lhs = |this: &mut Self, lhs_type, element| { + let old_lhs = *access_lhs; + *access_lhs = this.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { + operator: crate::ast::UnaryOp::Dereference { implicitly_added: true }, + rhs: old_lhs, + })); + this.interner.push_expr_type(old_lhs, lhs_type); + this.interner.push_expr_type(*access_lhs, element); + + let old_location = this.interner.id_location(old_lhs); + this.interner.push_expr_location(*access_lhs, span, old_location.file); + }; + + // If this access is just a field offset, we want to avoid dereferencing + let dereference_lhs = (!access.is_offset).then_some(dereference_lhs); + + match self.check_field_access(&lhs_type, &access.rhs.0.contents, span, dereference_lhs) { + Some((element_type, index)) => { + self.interner.set_field_index(expr_id, index); + // We must update `access` in case we added any dereferences to it + self.interner.replace_expr(&expr_id, HirExpression::MemberAccess(access)); + element_type + } + None => Type::Error, + } + } + + pub(super) fn lookup_method( + &mut self, + object_type: &Type, + method_name: &str, + span: Span, + ) -> Option { + match object_type.follow_bindings() { + Type::Struct(typ, _args) => { + let id = typ.borrow().id; + match self.interner.lookup_method(object_type, id, method_name, false) { + Some(method_id) => Some(HirMethodReference::FuncId(method_id)), + None => { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + } + } + // TODO: We should allow method calls on `impl Trait`s eventually. + // For now it is fine since they are only allowed on return types. + Type::TraitAsType(..) => { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + Type::NamedGeneric(_, _) => { + let func_meta = self.interner.function_meta( + &self.current_function.expect("unexpected method outside a function"), + ); + + for constraint in &func_meta.trait_constraints { + if *object_type == constraint.typ { + if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { + for (method_index, method) in the_trait.methods.iter().enumerate() { + if method.name.0.contents == method_name { + let trait_method = TraitMethodId { + trait_id: constraint.trait_id, + method_index, + }; + return Some(HirMethodReference::TraitMethodId( + trait_method, + constraint.trait_generics.clone(), + )); + } + } + } + } + } + + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + // Mutable references to another type should resolve to methods of their element type. + // This may be a struct or a primitive type. + Type::MutableReference(element) => self + .interner + .lookup_primitive_trait_method_mut(element.as_ref(), method_name) + .map(HirMethodReference::FuncId) + .or_else(|| self.lookup_method(&element, method_name, span)), + + // If we fail to resolve the object to a struct type, we have no way of type + // checking its arguments as we can't even resolve the name of the function + Type::Error => None, + + // The type variable must be unbound at this point since follow_bindings was called + Type::TypeVariable(_, TypeVariableKind::Normal) => { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + None + } + + other => match self.interner.lookup_primitive_method(&other, method_name) { + Some(method_id) => Some(HirMethodReference::FuncId(method_id)), + None => { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + }, + } + } + + pub(super) fn type_check_call( + &mut self, + call: &HirCallExpression, + func_type: Type, + args: Vec<(Type, ExprId, Span)>, + span: Span, + ) -> Type { + // Need to setup these flags here as `self` is borrowed mutably to type check the rest of the call expression + // These flags are later used to type check calls to unconstrained functions from constrained functions + let func_mod = self.current_function.map(|func| self.interner.function_modifiers(&func)); + let is_current_func_constrained = + func_mod.map_or(true, |func_mod| !func_mod.is_unconstrained); + + let is_unconstrained_call = self.is_unconstrained_call(call.func); + self.check_if_deprecated(call.func); + + // Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime + if is_current_func_constrained && is_unconstrained_call { + for (typ, _, _) in args.iter() { + if matches!(&typ.follow_bindings(), Type::MutableReference(_)) { + self.push_err(TypeCheckError::ConstrainedReferenceToUnconstrained { span }); + } + } + } + + let return_type = self.bind_function_type(func_type, args, span); + + // Check that we are not passing a slice from an unconstrained runtime to a constrained runtime + if is_current_func_constrained && is_unconstrained_call { + if return_type.contains_slice() { + self.push_err(TypeCheckError::UnconstrainedSliceReturnToConstrained { span }); + } else if matches!(&return_type.follow_bindings(), Type::MutableReference(_)) { + self.push_err(TypeCheckError::UnconstrainedReferenceToConstrained { span }); + } + }; + + return_type + } + + fn check_if_deprecated(&mut self, expr: ExprId) { + if let HirExpression::Ident(HirIdent { location, id, impl_kind: _ }) = + self.interner.expression(&expr) + { + if let Some(DefinitionKind::Function(func_id)) = + self.interner.try_definition(id).map(|def| &def.kind) + { + let attributes = self.interner.function_attributes(func_id); + if let Some(note) = attributes.get_deprecated_note() { + self.push_err(TypeCheckError::CallDeprecated { + name: self.interner.definition_name(id).to_string(), + note, + span: location.span, + }); + } + } + } + } + + fn is_unconstrained_call(&self, expr: ExprId) -> bool { + if let HirExpression::Ident(HirIdent { id, .. }) = self.interner.expression(&expr) { + if let Some(DefinitionKind::Function(func_id)) = + self.interner.try_definition(id).map(|def| &def.kind) + { + let modifiers = self.interner.function_modifiers(func_id); + return modifiers.is_unconstrained; + } + } + false + } + + /// Check if the given method type requires a mutable reference to the object type, and check + /// if the given object type is already a mutable reference. If not, add one. + /// This is used to automatically transform a method call: `foo.bar()` into a function + /// call: `bar(&mut foo)`. + /// + /// A notable corner case of this function is where it interacts with auto-deref of `.`. + /// If a field is being mutated e.g. `foo.bar.mutate_bar()` where `foo: &mut Foo`, the compiler + /// will insert a dereference before bar `(*foo).bar.mutate_bar()` which would cause us to + /// mutate a copy of bar rather than a reference to it. We must check for this corner case here + /// and remove the implicitly added dereference operator if we find one. + pub(super) fn try_add_mutable_reference_to_object( + &mut self, + function_type: &Type, + object_type: &mut Type, + object: &mut ExprId, + ) { + let expected_object_type = match function_type { + Type::Function(args, _, _) => args.first(), + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(args, _, _) => args.first(), + typ => unreachable!("Unexpected type for function: {typ}"), + }, + typ => unreachable!("Unexpected type for function: {typ}"), + }; + + if let Some(expected_object_type) = expected_object_type { + let actual_type = object_type.follow_bindings(); + + if matches!(expected_object_type.follow_bindings(), Type::MutableReference(_)) { + if !matches!(actual_type, Type::MutableReference(_)) { + if let Err(error) = verify_mutable_reference(self.interner, *object) { + self.push_err(TypeCheckError::ResolverError(error)); + } + + let new_type = Type::MutableReference(Box::new(actual_type)); + *object_type = new_type.clone(); + + // First try to remove a dereference operator that may have been implicitly + // inserted by a field access expression `foo.bar` on a mutable reference `foo`. + let new_object = self.try_remove_implicit_dereference(*object); + + // If that didn't work, then wrap the whole expression in an `&mut` + *object = new_object.unwrap_or_else(|| { + let location = self.interner.id_location(*object); + + let new_object = + self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { + operator: UnaryOp::MutableReference, + rhs: *object, + })); + self.interner.push_expr_type(new_object, new_type); + self.interner.push_expr_location(new_object, location.span, location.file); + new_object + }); + } + // Otherwise if the object type is a mutable reference and the method is not, insert as + // many dereferences as needed. + } else if matches!(actual_type, Type::MutableReference(_)) { + let (new_object, new_type) = self.insert_auto_dereferences(*object, actual_type); + *object_type = new_type; + *object = new_object; + } + } + } + + pub fn type_check_function_body(&mut self, body_type: Type, meta: &FuncMeta, body_id: ExprId) { + let (expr_span, empty_function) = self.function_info(body_id); + let declared_return_type = meta.return_type(); + + let func_span = self.interner.expr_span(&body_id); // XXX: We could be more specific and return the span of the last stmt, however stmts do not have spans yet + if let Type::TraitAsType(trait_id, _, generics) = declared_return_type { + if self.interner.lookup_trait_implementation(&body_type, *trait_id, generics).is_err() { + self.push_err(TypeCheckError::TypeMismatchWithSource { + expected: declared_return_type.clone(), + actual: body_type, + span: func_span, + source: Source::Return(meta.return_type.clone(), expr_span), + }); + } + } else { + self.unify_with_coercions(&body_type, declared_return_type, body_id, || { + let mut error = TypeCheckError::TypeMismatchWithSource { + expected: declared_return_type.clone(), + actual: body_type.clone(), + span: func_span, + source: Source::Return(meta.return_type.clone(), expr_span), + }; + + if empty_function { + error = error.add_context( + "implicitly returns `()` as its body has no tail or `return` expression", + ); + } + error + }); + } + } + + fn function_info(&self, function_body_id: ExprId) -> (noirc_errors::Span, bool) { + let (expr_span, empty_function) = + if let HirExpression::Block(block) = self.interner.expression(&function_body_id) { + let last_stmt = block.statements().last(); + let mut span = self.interner.expr_span(&function_body_id); + + if let Some(last_stmt) = last_stmt { + if let HirStatement::Expression(expr) = self.interner.statement(last_stmt) { + span = self.interner.expr_span(&expr); + } + } + + (span, last_stmt.is_none()) + } else { + (self.interner.expr_span(&function_body_id), false) + }; + (expr_span, empty_function) + } + + pub fn verify_trait_constraint( + &mut self, + object_type: &Type, + trait_id: TraitId, + trait_generics: &[Type], + function_ident_id: ExprId, + span: Span, + ) { + match self.interner.lookup_trait_implementation(object_type, trait_id, trait_generics) { + Ok(impl_kind) => { + self.interner.select_impl_for_expression(function_ident_id, impl_kind); + } + Err(erroring_constraints) => { + if erroring_constraints.is_empty() { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + } else { + // Don't show any errors where try_get_trait returns None. + // This can happen if a trait is used that was never declared. + let constraints = erroring_constraints + .into_iter() + .map(|constraint| { + let r#trait = self.interner.try_get_trait(constraint.trait_id)?; + let mut name = r#trait.name.to_string(); + if !constraint.trait_generics.is_empty() { + let generics = + vecmap(&constraint.trait_generics, ToString::to_string); + name += &format!("<{}>", generics.join(", ")); + } + Some((constraint.typ, name)) + }) + .collect::>>(); + + if let Some(constraints) = constraints { + self.push_err(TypeCheckError::NoMatchingImplFound { constraints, span }); + } + } + } + } + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 2f6b101e62f..4aac0fec9c3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -1,5 +1,6 @@ use super::dc_mod::collect_defs; use super::errors::{DefCollectorErrorKind, DuplicateType}; +use crate::elaborator::Elaborator; use crate::graph::CrateId; use crate::hir::comptime::{Interpreter, InterpreterError}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; @@ -129,14 +130,18 @@ pub struct UnresolvedGlobal { /// Given a Crate root, collect all definitions in that crate pub struct DefCollector { pub(crate) def_map: CrateDefMap, - pub(crate) collected_imports: Vec, - pub(crate) collected_functions: Vec, - pub(crate) collected_types: BTreeMap, - pub(crate) collected_type_aliases: BTreeMap, - pub(crate) collected_traits: BTreeMap, - pub(crate) collected_globals: Vec, - pub(crate) collected_impls: ImplMap, - pub(crate) collected_traits_impls: Vec, + pub(crate) imports: Vec, + pub(crate) items: CollectedItems, +} + +pub struct CollectedItems { + pub(crate) functions: Vec, + pub(crate) types: BTreeMap, + pub(crate) type_aliases: BTreeMap, + pub(crate) traits: BTreeMap, + pub(crate) globals: Vec, + pub(crate) impls: ImplMap, + pub(crate) trait_impls: Vec, } /// Maps the type and the module id in which the impl is defined to the functions contained in that @@ -210,14 +215,16 @@ impl DefCollector { fn new(def_map: CrateDefMap) -> DefCollector { DefCollector { def_map, - collected_imports: vec![], - collected_functions: vec![], - collected_types: BTreeMap::new(), - collected_type_aliases: BTreeMap::new(), - collected_traits: BTreeMap::new(), - collected_impls: HashMap::new(), - collected_globals: vec![], - collected_traits_impls: vec![], + imports: vec![], + items: CollectedItems { + functions: vec![], + types: BTreeMap::new(), + type_aliases: BTreeMap::new(), + traits: BTreeMap::new(), + impls: HashMap::new(), + globals: vec![], + trait_impls: vec![], + }, } } @@ -229,6 +236,7 @@ impl DefCollector { context: &mut Context, ast: SortedModule, root_file_id: FileId, + use_elaborator: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -242,7 +250,12 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - errors.extend(CrateDefMap::collect_defs(dep.crate_id, context, macro_processors)); + errors.extend(CrateDefMap::collect_defs( + dep.crate_id, + context, + use_elaborator, + macro_processors, + )); let dep_def_root = context.def_map(&dep.crate_id).expect("ice: def map was just created").root; @@ -275,18 +288,13 @@ impl DefCollector { // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); - inject_prelude(crate_id, context, crate_root, &mut def_collector.collected_imports); + inject_prelude(crate_id, context, crate_root, &mut def_collector.imports); for submodule in submodules { - inject_prelude( - crate_id, - context, - LocalModuleId(submodule), - &mut def_collector.collected_imports, - ); + inject_prelude(crate_id, context, LocalModuleId(submodule), &mut def_collector.imports); } // Resolve unresolved imports collected from the crate, one by one. - for collected_import in def_collector.collected_imports { + for collected_import in std::mem::take(&mut def_collector.imports) { match resolve_import(crate_id, &collected_import, &context.def_maps) { Ok(resolved_import) => { if let Some(error) = resolved_import.error { @@ -323,6 +331,12 @@ impl DefCollector { } } + if use_elaborator { + let mut more_errors = Elaborator::elaborate(context, crate_id, def_collector.items); + more_errors.append(&mut errors); + return errors; + } + let mut resolved_module = ResolvedModule { errors, ..Default::default() }; // We must first resolve and intern the globals before we can resolve any stmts inside each function. @@ -330,26 +344,25 @@ impl DefCollector { // // Additionally, we must resolve integer globals before structs since structs may refer to // the values of integer globals as numeric generics. - let (literal_globals, other_globals) = - filter_literal_globals(def_collector.collected_globals); + let (literal_globals, other_globals) = filter_literal_globals(def_collector.items.globals); resolved_module.resolve_globals(context, literal_globals, crate_id); resolved_module.errors.extend(resolve_type_aliases( context, - def_collector.collected_type_aliases, + def_collector.items.type_aliases, crate_id, )); resolved_module.errors.extend(resolve_traits( context, - def_collector.collected_traits, + def_collector.items.traits, crate_id, )); // Must resolve structs before we resolve globals. resolved_module.errors.extend(resolve_structs( context, - def_collector.collected_types, + def_collector.items.types, crate_id, )); @@ -358,7 +371,7 @@ impl DefCollector { resolved_module.errors.extend(collect_trait_impls( context, crate_id, - &mut def_collector.collected_traits_impls, + &mut def_collector.items.trait_impls, )); // Before we resolve any function symbols we must go through our impls and @@ -368,11 +381,7 @@ impl DefCollector { // // These are resolved after trait impls so that struct methods are chosen // over trait methods if there are name conflicts. - resolved_module.errors.extend(collect_impls( - context, - crate_id, - &def_collector.collected_impls, - )); + resolved_module.errors.extend(collect_impls(context, crate_id, &def_collector.items.impls)); // We must wait to resolve non-integer globals until after we resolve structs since struct // globals will need to reference the struct type they're initialized to to ensure they are valid. @@ -383,7 +392,7 @@ impl DefCollector { &mut context.def_interner, crate_id, &context.def_maps, - def_collector.collected_functions, + def_collector.items.functions, None, &mut resolved_module.errors, ); @@ -392,13 +401,13 @@ impl DefCollector { &mut context.def_interner, crate_id, &context.def_maps, - def_collector.collected_impls, + def_collector.items.impls, &mut resolved_module.errors, )); resolved_module.trait_impl_functions = resolve_trait_impls( context, - def_collector.collected_traits_impls, + def_collector.items.trait_impls, crate_id, &mut resolved_module.errors, ); @@ -431,15 +440,18 @@ fn inject_prelude( crate_root: LocalModuleId, collected_imports: &mut Vec, ) { - let segments: Vec<_> = "std::prelude" - .split("::") - .map(|segment| crate::ast::Ident::new(segment.into(), Span::default())) - .collect(); + if !crate_id.is_stdlib() { + let segments: Vec<_> = "std::prelude" + .split("::") + .map(|segment| crate::ast::Ident::new(segment.into(), Span::default())) + .collect(); - let path = - Path { segments: segments.clone(), kind: crate::ast::PathKind::Dep, span: Span::default() }; + let path = Path { + segments: segments.clone(), + kind: crate::ast::PathKind::Dep, + span: Span::default(), + }; - if !crate_id.is_stdlib() { if let Ok(PathResolution { module_def_id, error }) = path_resolver::resolve_path( &context.def_maps, ModuleId { krate: crate_id, local_id: crate_root }, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index b2ec7dbc813..e688f192d3d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -70,7 +70,7 @@ pub fn collect_defs( // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved for import in ast.imports { - collector.def_collector.collected_imports.push(ImportDirective { + collector.def_collector.imports.push(ImportDirective { module_id: collector.module_id, path: import.path, alias: import.alias, @@ -126,7 +126,7 @@ impl<'a> ModCollector<'a> { errors.push((err.into(), self.file_id)); } - self.def_collector.collected_globals.push(UnresolvedGlobal { + self.def_collector.items.globals.push(UnresolvedGlobal { file_id: self.file_id, module_id: self.module_id, global_id, @@ -154,7 +154,7 @@ impl<'a> ModCollector<'a> { } let key = (r#impl.object_type, self.module_id); - let methods = self.def_collector.collected_impls.entry(key).or_default(); + let methods = self.def_collector.items.impls.entry(key).or_default(); methods.push((r#impl.generics, r#impl.type_span, unresolved_functions)); } } @@ -191,7 +191,7 @@ impl<'a> ModCollector<'a> { trait_generics: trait_impl.trait_generics, }; - self.def_collector.collected_traits_impls.push(unresolved_trait_impl); + self.def_collector.items.trait_impls.push(unresolved_trait_impl); } } @@ -269,7 +269,7 @@ impl<'a> ModCollector<'a> { } } - self.def_collector.collected_functions.push(unresolved_functions); + self.def_collector.items.functions.push(unresolved_functions); errors } @@ -316,7 +316,7 @@ impl<'a> ModCollector<'a> { } // And store the TypeId -> StructType mapping somewhere it is reachable - self.def_collector.collected_types.insert(id, unresolved); + self.def_collector.items.types.insert(id, unresolved); } definition_errors } @@ -354,7 +354,7 @@ impl<'a> ModCollector<'a> { errors.push((err.into(), self.file_id)); } - self.def_collector.collected_type_aliases.insert(type_alias_id, unresolved); + self.def_collector.items.type_aliases.insert(type_alias_id, unresolved); } errors } @@ -506,7 +506,7 @@ impl<'a> ModCollector<'a> { method_ids, fns_with_default_impl: unresolved_functions, }; - self.def_collector.collected_traits.insert(trait_id, unresolved); + self.def_collector.items.traits.insert(trait_id, unresolved); } errors } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index 590c2e3d6b6..19e06387d43 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -73,6 +73,7 @@ impl CrateDefMap { pub fn collect_defs( crate_id: CrateId, context: &mut Context, + use_elaborator: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled @@ -116,7 +117,14 @@ impl CrateDefMap { }; // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect(def_map, context, ast, root_file_id, macro_processors)); + errors.extend(DefCollector::collect( + def_map, + context, + ast, + root_file_id, + use_elaborator, + macro_processors, + )); errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), root_file_id)).collect::>(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs index 8850331f683..343113836ed 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -2,11 +2,14 @@ use noirc_errors::{CustomDiagnostic, Span}; use thiserror::Error; use crate::graph::CrateId; +use crate::hir::def_collector::dc_crate::CompilationError; use std::collections::BTreeMap; use crate::ast::{Ident, ItemVisibility, Path, PathKind}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleDefId, ModuleId, PerNs}; +use super::errors::ResolverError; + #[derive(Debug, Clone)] pub struct ImportDirective { pub module_id: LocalModuleId, @@ -53,6 +56,12 @@ pub struct ResolvedImport { pub error: Option, } +impl From for CompilationError { + fn from(error: PathResolutionError) -> Self { + Self::ResolverError(ResolverError::PathResolutionError(error)) + } +} + impl<'a> From<&'a PathResolutionError> for CustomDiagnostic { fn from(error: &'a PathResolutionError) -> Self { match &error { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 60baaecab59..7dc307fe716 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -56,17 +56,17 @@ use crate::hir_def::{ use super::errors::{PubPosition, ResolverError}; use super::import::PathResolution; -const SELF_TYPE_NAME: &str = "Self"; +pub const SELF_TYPE_NAME: &str = "Self"; type Scope = GenericScope; type ScopeTree = GenericScopeTree; type ScopeForest = GenericScopeForest; pub struct LambdaContext { - captures: Vec, + pub captures: Vec, /// the index in the scope tree /// (sometimes being filled by ScopeTree's find method) - scope_index: usize, + pub scope_index: usize, } /// The primary jobs of the Resolver are to validate that every variable found refers to exactly 1 @@ -1345,7 +1345,7 @@ impl<'a> Resolver<'a> { range @ ForRange::Array(_) => { let for_stmt = range.into_for(for_loop.identifier, for_loop.block, for_loop.span); - self.resolve_stmt(for_stmt, for_loop.span) + self.resolve_stmt(for_stmt.kind, for_loop.span) } } } @@ -1361,7 +1361,7 @@ impl<'a> Resolver<'a> { StatementKind::Comptime(statement) => { let hir_statement = self.resolve_stmt(statement.kind, statement.span); let statement_id = self.interner.push_stmt(hir_statement); - self.interner.push_statement_location(statement_id, statement.span, self.file); + self.interner.push_stmt_location(statement_id, statement.span, self.file); HirStatement::Comptime(statement_id) } } @@ -1370,7 +1370,7 @@ impl<'a> Resolver<'a> { pub fn intern_stmt(&mut self, stmt: Statement) -> StmtId { let hir_stmt = self.resolve_stmt(stmt.kind, stmt.span); let id = self.interner.push_stmt(hir_stmt); - self.interner.push_statement_location(id, stmt.span, self.file); + self.interner.push_stmt_location(id, stmt.span, self.file); id } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs index 9b40c959981..48598109829 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -250,14 +250,14 @@ impl<'interner> TypeChecker<'interner> { } // TODO: update object_type here? - let function_call = method_call.into_function_call( + let (_, function_call) = method_call.into_function_call( &method_ref, object_type, location, self.interner, ); - self.interner.replace_expr(expr_id, function_call); + self.interner.replace_expr(expr_id, HirExpression::Call(function_call)); // Type check the new call now that it has been changed from a method call // to a function call. This way we avoid duplicating code. diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs index 0f8131d6ebb..2e448858d9e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -25,7 +25,7 @@ use crate::{ Type, TypeBindings, }; -use self::errors::Source; +pub use self::errors::Source; pub struct TypeChecker<'interner> { interner: &'interner mut NodeInterner, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index bf7d9b7b4ba..8df6785e0eb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -200,13 +200,15 @@ pub enum HirMethodReference { impl HirMethodCallExpression { /// Converts a method call into a function call + /// + /// Returns ((func_var_id, func_var), call_expr) pub fn into_function_call( mut self, method: &HirMethodReference, object_type: Type, location: Location, interner: &mut NodeInterner, - ) -> HirExpression { + ) -> ((ExprId, HirIdent), HirCallExpression) { let mut arguments = vec![self.object]; arguments.append(&mut self.arguments); @@ -224,10 +226,11 @@ impl HirMethodCallExpression { (id, ImplKind::TraitMethod(*method_id, constraint, false)) } }; - let func = HirExpression::Ident(HirIdent { location, id, impl_kind }); - let func = interner.push_expr(func); + let func_var = HirIdent { location, id, impl_kind }; + let func = interner.push_expr(HirExpression::Ident(func_var.clone())); interner.push_expr_location(func, location.span, location.file); - HirExpression::Call(HirCallExpression { func, arguments, location }) + let expr = HirCallExpression { func, arguments, location }; + ((func, func_var), expr) } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs index c38dd41fd3d..ceec9ad8580 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs @@ -135,10 +135,7 @@ impl FuncMeta { /// So this method tells the type checker to ignore the return /// of the empty function, which is unit pub fn can_ignore_return_type(&self) -> bool { - match self.kind { - FunctionKind::LowLevel | FunctionKind::Builtin | FunctionKind::Oracle => true, - FunctionKind::Normal | FunctionKind::Recursive => false, - } + self.kind.can_ignore_return_type() } pub fn function_signature(&self) -> FunctionSignature { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index f3b2a24c1f0..f31aeea0552 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -1423,14 +1423,14 @@ impl Type { /// Retrieves the type of the given field name /// Panics if the type is not a struct or tuple. - pub fn get_field_type(&self, field_name: &str) -> Type { + pub fn get_field_type(&self, field_name: &str) -> Option { match self { - Type::Struct(def, args) => def.borrow().get_field(field_name, args).unwrap().0, + Type::Struct(def, args) => def.borrow().get_field(field_name, args).map(|(typ, _)| typ), Type::Tuple(fields) => { let mut fields = fields.iter().enumerate(); - fields.find(|(i, _)| i.to_string() == *field_name).unwrap().1.clone() + fields.find(|(i, _)| i.to_string() == *field_name).map(|(_, typ)| typ).cloned() } - other => panic!("Tried to iterate over the fields of '{other}', which has none"), + _ => None, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lib.rs b/noir/noir-repo/compiler/noirc_frontend/src/lib.rs index 958a18ac2fb..b05c635f436 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lib.rs @@ -12,6 +12,7 @@ pub mod ast; pub mod debug; +pub mod elaborator; pub mod graph; pub mod lexer; pub mod monomorphization; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index 88adc7a9414..faf89016f96 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -532,7 +532,7 @@ impl NodeInterner { self.id_to_type.insert(expr_id.into(), typ); } - /// Store the type for an interned expression + /// Store the type for a definition pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { self.definition_to_type.insert(definition_id, typ); } @@ -696,7 +696,7 @@ impl NodeInterner { let statement = self.push_stmt(HirStatement::Error); let span = name.span(); let id = self.push_global(name, local_id, statement, file, attributes, mutable); - self.push_statement_location(statement, span, file); + self.push_stmt_location(statement, span, file); id } @@ -942,7 +942,7 @@ impl NodeInterner { self.id_location(stmt_id) } - pub fn push_statement_location(&mut self, id: StmtId, span: Span, file: FileId) { + pub fn push_stmt_location(&mut self, id: StmtId, span: Span, file: FileId) { self.id_to_location.insert(id.into(), Location::new(span, file)); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 6f7470807be..fb80a7d8018 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -81,6 +81,7 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation &mut context, program.clone().into_sorted(), root_file_id, + false, &[], // No macro processors )); } diff --git a/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Nargo.toml b/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Nargo.toml new file mode 100644 index 00000000000..328d78c8f99 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "no_predicates_brillig" +type = "bin" +authors = [""] +compiler_version = ">=0.27.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Prover.toml b/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Prover.toml new file mode 100644 index 00000000000..93a825f609f --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/Prover.toml @@ -0,0 +1,2 @@ +x = "10" +y = "20" diff --git a/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/src/main.nr b/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/src/main.nr new file mode 100644 index 00000000000..1d088473aa7 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/no_predicates_brillig/src/main.nr @@ -0,0 +1,12 @@ +unconstrained fn main(x: u32, y: pub u32) { + basic_checks(x, y); +} + +#[no_predicates] +fn basic_checks(x: u32, y: u32) { + if x > y { + assert(x == 10); + } else { + assert(y == 20); + } +} diff --git a/noir/noir-repo/tooling/lsp/src/lib.rs b/noir/noir-repo/tooling/lsp/src/lib.rs index be9b83e02f6..05345b96c80 100644 --- a/noir/noir-repo/tooling/lsp/src/lib.rs +++ b/noir/noir-repo/tooling/lsp/src/lib.rs @@ -345,7 +345,7 @@ fn prepare_package_from_source_string() { let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); - let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false, false); let main_func_id = context.get_main_function(&crate_id); assert!(main_func_id.is_some()); } diff --git a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs index 355bb7832c4..3856bdc79e9 100644 --- a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs @@ -56,7 +56,7 @@ pub(super) fn on_did_change_text_document( state.input_files.insert(params.text_document.uri.to_string(), text.clone()); let (mut context, crate_id) = prepare_source(text, state); - let _ = check_crate(&mut context, crate_id, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false); let workspace = match resolve_workspace_for_source_path( params.text_document.uri.to_file_path().unwrap().as_path(), @@ -139,7 +139,7 @@ fn process_noir_document( let (mut context, crate_id) = prepare_package(&workspace_file_manager, &parsed_files, package); - let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { + let file_diagnostics = match check_crate(&mut context, crate_id, false, false, false) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs b/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs index 893ba33d845..744bddedd9d 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs @@ -67,7 +67,7 @@ fn on_code_lens_request_inner( let (mut context, crate_id) = prepare_source(source_string, state); // We ignore the warnings and errors produced by compilation for producing code lenses // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false); let collected_lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); diff --git a/noir/noir-repo/tooling/lsp/src/requests/goto_declaration.rs b/noir/noir-repo/tooling/lsp/src/requests/goto_declaration.rs index 8e6d519b895..5cff16b2348 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/goto_declaration.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/goto_declaration.rs @@ -46,7 +46,7 @@ fn on_goto_definition_inner( interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false); interner = &context.def_interner; } diff --git a/noir/noir-repo/tooling/lsp/src/requests/goto_definition.rs b/noir/noir-repo/tooling/lsp/src/requests/goto_definition.rs index 88bb667f2e8..32e13ce00f6 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/goto_definition.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/goto_definition.rs @@ -54,7 +54,7 @@ fn on_goto_definition_inner( interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false); interner = &context.def_interner; } diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index 1844a3d9bf0..83b05ba06a2 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -60,7 +60,7 @@ fn on_test_run_request_inner( Some(package) => { let (mut context, crate_id) = prepare_package(&workspace_file_manager, &parsed_files, package); - if check_crate(&mut context, crate_id, false, false).is_err() { + if check_crate(&mut context, crate_id, false, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/noir/noir-repo/tooling/lsp/src/requests/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/tests.rs index 5b78fcc65c3..cdf4ad338c4 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/tests.rs @@ -61,7 +61,7 @@ fn on_tests_request_inner( prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false); // We don't add test headings for a package if it contains no `#[test]` functions get_package_tests_in_crate(&context, &crate_id, &package.name) diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 208379b098d..d5313d96076 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -87,6 +87,7 @@ fn check_package( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, + compile_options.use_elaborator, )?; if package.is_library() || package.is_contract() { @@ -173,8 +174,9 @@ pub(crate) fn check_crate_and_report_errors( deny_warnings: bool, disable_macros: bool, silence_warnings: bool, + use_elaborator: bool, ) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings, disable_macros); + let result = check_crate(context, crate_id, deny_warnings, disable_macros, use_elaborator); report_errors(result, &context.file_manager, deny_warnings, silence_warnings) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index a61f3ccfc02..324eed340ad 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -89,6 +89,7 @@ fn compile_exported_functions( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, + compile_options.use_elaborator, )?; let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 967d4c87e6d..51e21248afd 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -175,6 +175,7 @@ fn run_test( crate_id, compile_options.deny_warnings, compile_options.disable_macros, + compile_options.use_elaborator, ) .expect("Any errors should have occurred when collecting test functions"); @@ -208,6 +209,7 @@ fn get_tests_in_package( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, + compile_options.use_elaborator, )?; Ok(context diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index 9d377cfaee9..70a9354f50a 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -10,8 +10,7 @@ use nargo::{ parse_all, prepare_package, }; -#[test] -fn stdlib_noir_tests() { +fn run_stdlib_tests(use_elaborator: bool) { let mut file_manager = file_manager_with_stdlib(&PathBuf::from(".")); file_manager.add_file_with_source_canonical_path(&PathBuf::from("main.nr"), "".to_owned()); let parsed_files = parse_all(&file_manager); @@ -30,7 +29,7 @@ fn stdlib_noir_tests() { let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, true, false); + let result = check_crate(&mut context, dummy_crate_id, true, false, use_elaborator); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library"); @@ -60,3 +59,15 @@ fn stdlib_noir_tests() { assert!(!test_report.is_empty(), "Could not find any tests within the stdlib"); assert!(test_report.iter().all(|(_, status)| !status.failed())); } + +#[test] +fn stdlib_noir_tests() { + run_stdlib_tests(false) +} + +// Once this no longer panics we can use the elaborator by default and remove the old passes +#[test] +#[should_panic] +fn stdlib_elaborator_tests() { + run_stdlib_tests(true) +} diff --git a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts index dcf9f489003..d047e35035f 100644 --- a/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts +++ b/noir/noir-repo/tooling/noir_js/test/node/execute.test.ts @@ -81,3 +81,51 @@ it('circuit with a raw assert payload should fail with the decoded payload', asy }); } }); + +it('successfully executes a program with multiple acir circuits', async () => { + const inputs = { + x: '10', + }; + try { + await new Noir(fold_fibonacci_program).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); + } +}); + +it('successfully executes a program with multiple acir circuits', async () => { + const inputs = { + x: '10', + }; + try { + await new Noir(fold_fibonacci_program).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); + } +}); + +it('successfully executes a program with multiple acir circuits', async () => { + const inputs = { + x: '10', + }; + try { + await new Noir(fold_fibonacci_program).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); + } +}); + +it('successfully executes a program with multiple acir circuits', async () => { + const inputs = { + x: '10', + }; + try { + await new Noir(fold_fibonacci_program).execute(inputs); + } catch (error) { + const knownError = error as Error; + expect(knownError.message).to.equal('Circuit execution failed: Error: Cannot satisfy constraint'); + } +});